aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.coafile34
-rw-r--r--.gitattributes1
-rw-r--r--.gitignore3
-rw-r--r--.gitreview1
-rw-r--r--.readthedocs.yaml18
-rw-r--r--.yamllint16
-rw-r--r--INFO.yaml128
-rw-r--r--README.md17
-rw-r--r--S3Ptools/locust_Grafana.sh2
-rw-r--r--S3Ptools/locustfile.py8
-rw-r--r--bootstrap/README.md8
-rw-r--r--bootstrap/codesearch/README.rst78
-rw-r--r--bootstrap/codesearch/Vagrantfile40
-rwxr-xr-xbootstrap/codesearch/create_config.py97
-rw-r--r--bootstrap/codesearch/test-requirements.txt2
-rw-r--r--bootstrap/codesearch/tox.ini14
-rw-r--r--bootstrap/jenkins/README.md2
-rwxr-xr-xbootstrap/jenkins/scripts/gen-java-jobs.py14
-rw-r--r--bootstrap/jenkins/vagrant/jjb/docker-jobs.yaml134
-rw-r--r--bootstrap/jenkins/vagrant/jjb/java-jobs.yaml914
-rw-r--r--bootstrap/vagrant-minimal-onap/HACKING.rst20
-rw-r--r--bootstrap/vagrant-minimal-onap/README.rst137
-rw-r--r--bootstrap/vagrant-minimal-onap/Vagrantfile345
-rw-r--r--bootstrap/vagrant-minimal-onap/config/cluster.yml48
-rw-r--r--bootstrap/vagrant-minimal-onap/config/dot_env5
-rw-r--r--bootstrap/vagrant-minimal-onap/config/local.conf5
-rw-r--r--bootstrap/vagrant-minimal-onap/operator/.gitignore5
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/config/95silent-approval2
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/config/dot_curlrc8
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/config/dot_wgetrc2
-rwxr-xr-xbootstrap/vagrant-minimal-onap/tools/get_customization_scripts.sh5
-rw-r--r--[-rwxr-xr-x]bootstrap/vagrant-minimal-onap/tools/get_helm.sh (renamed from test/security/k8s/tools/casablanca/get_ranchercli.sh)21
-rwxr-xr-xbootstrap/vagrant-minimal-onap/tools/get_kubectl.sh41
-rwxr-xr-xbootstrap/vagrant-minimal-onap/tools/get_rke.sh39
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-controlnode.sh31
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-workernode.sh34
-rwxr-xr-xbootstrap/vagrant-minimal-onap/tools/setup_kubectl.sh52
-rw-r--r--bootstrap/vagrant-onap/README.md1
-rw-r--r--deployment/Azure_ARM_Template/arm_cluster_deploy_beijing.json79
-rw-r--r--deployment/Azure_ARM_Template/arm_cluster_deploy_parameters.json20
-rw-r--r--deployment/README.md8
-rw-r--r--deployment/aks/README.md90
-rw-r--r--deployment/aks/cloud.conf3
-rw-r--r--deployment/aks/cloud.conf.example2
-rwxr-xr-xdeployment/aks/cloud.sh96
-rwxr-xr-xdeployment/aks/create_aks.sh4
-rwxr-xr-xdeployment/aks/create_devstack.sh21
-rwxr-xr-xdeployment/aks/create_onap.sh30
-rwxr-xr-xdeployment/aks/post-install/000_bootstrap_onap.sh (renamed from deployment/aks/bootstrap_onap.sh)2
-rwxr-xr-xdeployment/aks/post-install/bootstrap/bootstrap.sh (renamed from deployment/aks/post-install/bootstrap.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_az.sh (renamed from deployment/aks/post-install/create_az.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_clli.sh (renamed from deployment/aks/post-install/create_clli.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_cloud_region.sh (renamed from deployment/aks/post-install/create_cloud_region.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_cloud_region_relationship.sh (renamed from deployment/aks/post-install/create_cloud_region_relationship.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_cloud_region_subscriber_relationship.sh (renamed from deployment/aks/post-install/create_cloud_region_subscriber_relationship.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_cloud_site.sh (renamed from deployment/aks/post-install/create_cloud_site.sh)10
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_customer.sh (renamed from deployment/aks/post-install/create_customer.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_lob.sh (renamed from deployment/aks/post-install/create_lob.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_owning_entity.sh (renamed from deployment/aks/post-install/create_owning_entity.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_platform.sh (renamed from deployment/aks/post-install/create_platform.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_project.sh (renamed from deployment/aks/post-install/create_project.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_service_type.sh (renamed from deployment/aks/post-install/create_service_type.sh)0
-rwxr-xr-xdeployment/aks/post-install/bootstrap/create_subscription.sh (renamed from deployment/aks/post-install/create_subscription.sh)0
-rwxr-xr-x[-rw-r--r--]deployment/aks/post_install.sh (renamed from test/mocks/pnfsimulator/netconfsimulator/netconf/__init__.py)34
-rwxr-xr-xdeployment/aks/pre_install.sh108
-rwxr-xr-xdeployment/aks/util/create_integration_override.sh3
-rwxr-xr-xdeployment/aks/util/create_openstack_cli.sh8
-rwxr-xr-xdeployment/aks/util/create_robot_config.sh3
-rw-r--r--deployment/aks/util/integration_override.template1
-rw-r--r--deployment/heat/onap-rke/env/windriver/onap-oom.env14
-rw-r--r--deployment/heat/onap-rke/nfs_vm_entrypoint.sh6
-rw-r--r--deployment/heat/onap-rke/onap-oom.yaml17
-rw-r--r--deployment/heat/onap-rke/parts/onap-oom-1.yaml10
-rw-r--r--deployment/heat/onap-rke/parts/onap-oom-3.yaml1
-rw-r--r--deployment/heat/onap-rke/policy-staging-image-override.yaml14
-rwxr-xr-xdeployment/heat/onap-rke/scripts/cleanup.sh12
-rwxr-xr-xdeployment/heat/onap-rke/scripts/redeploy-module.sh4
-rw-r--r--deployment/heat/onap-rke/sdc-staging-image-override.yaml13
-rw-r--r--deployment/heat/onap-rke/staging-image-override.yaml193
-rwxr-xr-xdeployment/heat/onap-rke/uui-staging-image-override.yaml5
-rw-r--r--deployment/noheat/README.rst48
-rw-r--r--deployment/noheat/cluster-rke/ansible/create.yml63
-rw-r--r--deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap11
l---------deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml1
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml35
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml11
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml41
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml17
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml55
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml19
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml51
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml13
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml15
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml66
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml3
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml5
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml26
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml41
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml8
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml7
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml5
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml45
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml35
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml12
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j213
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml3
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml13
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml16
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml25
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j252
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml2
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml59
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j21
-rw-r--r--deployment/noheat/common-vars.yml2
-rw-r--r--deployment/noheat/deploy-all.yml9
-rw-r--r--deployment/noheat/devstack/ansible/create.yml43
-rw-r--r--deployment/noheat/devstack/ansible/group_vars/all/all.yml3
-rw-r--r--deployment/noheat/devstack/ansible/templates/local.conf.j25
-rw-r--r--deployment/noheat/infra-openstack/HACKING.rst30
-rw-r--r--deployment/noheat/infra-openstack/README.rst34
-rw-r--r--deployment/noheat/infra-openstack/ansible/create.yml136
-rw-r--r--deployment/noheat/infra-openstack/ansible/destroy.yml15
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample63
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap86
l---------deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml1
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/operator-requirements.yml8
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml33
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml25
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml28
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml23
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml12
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml10
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j211
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j253
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j25
-rw-r--r--deployment/noheat/infra-openstack/vagrant/Vagrantfile167
-rw-r--r--deployment/noheat/infra-openstack/vagrant/config/clouds.yaml12
-rw-r--r--deployment/noheat/infra-openstack/vagrant/config/local.conf6
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/Makefile12
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/README.rst31
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_host.stderr (renamed from test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_host.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_host.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr (renamed from test/mocks/pnfsimulator/netconfsimulator/ftpes/files/ftpes-noone.txt)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_keypair.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_network.stderr (renamed from test/mocks/pnfsimulator/netconfsimulator/ftpes/files/onap/ftpes-onap.txt)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_network.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_network.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr (renamed from test/mocks/pnfsimulator/netconfsimulator/src/test/resources/application-it.properties)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr (renamed from test/vcpe/aaiutil.py)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_host.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_network.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test22
-rw-r--r--deployment/noheat/requirements.txt3
-rw-r--r--deployment/noheat/requirements.yml10
-rw-r--r--deployment/onap-lab-ci/README.md1
-rw-r--r--deployment/onap-lab-ci/jjb/jobs.yaml233
-rw-r--r--docs/.gitignore3
-rw-r--r--docs/_static/css/ribbon.css63
-rwxr-xr-xdocs/_static/favicon.icobin0 -> 2102 bytes
-rw-r--r--docs/_static/logo_onap_2017.pngbin0 -> 12278 bytes
-rw-r--r--docs/conf.py72
-rw-r--r--docs/docs_5G_Bulk_PM.rst17
-rw-r--r--docs/docs_5G_Configuration_over_NETCONF.rst12
-rw-r--r--docs/docs_5G_NRM_Configuration.rst41
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade.rst77
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst114
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst35
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst37
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst35
-rw-r--r--docs/docs_5G_oof_pci.rst43
-rw-r--r--docs/docs_5G_oof_son.rst128
-rw-r--r--docs/docs_5g_pnf_pnp.rst72
-rw-r--r--docs/docs_5g_rtpm.rst12
-rw-r--r--docs/docs_BBS.rst302
-rw-r--r--docs/docs_CCVPN.rst554
-rw-r--r--docs/docs_CM_flexible_designer_orchestrator.rst6
-rw-r--r--docs/docs_CM_schedule_optimizer.rst23
-rw-r--r--docs/docs_E2E_network_slicing.rst638
-rw-r--r--docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst130
-rw-r--r--docs/docs_StndDefined_Events_Collection_Mechanism.rst97
-rw-r--r--docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst189
-rw-r--r--docs/docs_pnf_onboarding_preonboarding.rst29
-rw-r--r--docs/docs_postman.rst190
-rw-r--r--docs/docs_robot.rst2
-rw-r--r--docs/docs_scaleout.rst541
-rw-r--r--docs/docs_usecases.rst52
-rw-r--r--docs/docs_usecases_release.rst34
-rw-r--r--docs/docs_vCPE with Tosca VNF.rst159
-rw-r--r--docs/docs_vCPE.rst107
-rw-r--r--docs/docs_vCPE_tosca_local.rst210
-rw-r--r--docs/docs_vCPE_with_Tosca_VNF.rst190
-rw-r--r--[-rwxr-xr-x]docs/docs_vFWDT.rst673
-rw-r--r--docs/docs_vFW_CNF_CDS.rst1903
-rw-r--r--docs/docs_vfw.rst13
-rw-r--r--docs/docs_vfwHPA.rst231
-rw-r--r--docs/docs_vfw_edgex_k8s.rst19
-rw-r--r--docs/docs_vipsec.rst62
-rw-r--r--docs/docs_vlb.rst39
-rw-r--r--docs/files/CI/ONAP_CI_0.pngbin0 -> 193773 bytes
-rw-r--r--docs/files/CI/ONAP_CI_1.pngbin0 -> 158090 bytes
-rw-r--r--docs/files/CI/ONAP_CI_10.pngbin0 -> 80878 bytes
-rw-r--r--docs/files/CI/ONAP_CI_2.pngbin0 -> 68472 bytes
-rw-r--r--docs/files/CI/ONAP_CI_3.pngbin0 -> 212584 bytes
-rw-r--r--docs/files/CI/ONAP_CI_4.pngbin0 -> 134170 bytes
-rw-r--r--docs/files/CI/ONAP_CI_5.pngbin0 -> 35394 bytes
-rw-r--r--docs/files/CI/ONAP_CI_6.pngbin0 -> 54172 bytes
-rw-r--r--docs/files/CI/ONAP_CI_7.pngbin0 -> 65675 bytes
-rwxr-xr-xdocs/files/CI/ONAP_CI_8.pngbin0 -> 84910 bytes
-rw-r--r--docs/files/CI/ONAP_CI_9.pngbin0 -> 157463 bytes
-rw-r--r--docs/files/bbs/BBS_dcae-ves-collector_config.pngbin0 -> 591369 bytes
-rw-r--r--docs/files/csv/release-demo-features.csv5
-rw-r--r--docs/files/csv/release-integration-features.csv5
-rw-r--r--docs/files/csv/release-integration-ref.csv39
-rw-r--r--docs/files/csv/release-oparent-features.csv4
-rw-r--r--docs/files/csv/release-pythonsdk-features.csv2
-rw-r--r--docs/files/csv/release-testsuite-features.csv2
-rw-r--r--docs/files/csv/repo-archived.csv10
-rw-r--r--docs/files/csv/repo-demo.csv2
-rw-r--r--docs/files/csv/repo-integration-external.csv2
-rw-r--r--docs/files/csv/repo-integration.csv13
-rw-r--r--docs/files/csv/repo-oparent.csv3
-rw-r--r--docs/files/csv/repo-pipelines.csv4
-rw-r--r--docs/files/csv/repo-simulators.csv13
-rw-r--r--docs/files/csv/repo-testsuite.csv10
-rw-r--r--docs/files/csv/s3p-instantiation.csv6
-rw-r--r--docs/files/csv/s3p-sdc.csv6
-rw-r--r--docs/files/csv/simulators.csv6
-rw-r--r--docs/files/csv/stability_basic_vm.csv11
-rw-r--r--docs/files/csv/stability_cluster_metric_cpu.csv2
-rw-r--r--docs/files/csv/stability_cluster_metric_memory.csv2
-rw-r--r--docs/files/csv/stability_cluster_metric_network.csv2
-rw-r--r--docs/files/csv/stability_top10_cpu.csv11
-rw-r--r--docs/files/csv/stability_top10_memory.csv11
-rw-r--r--docs/files/csv/stability_top10_net.csv11
-rw-r--r--docs/files/csv/tests-healthcheck.csv11
-rw-r--r--docs/files/csv/tests-infrastructure-healthcheck.csv4
-rw-r--r--docs/files/csv/tests-security.csv5
-rw-r--r--docs/files/csv/tests-smoke.csv11
-rw-r--r--docs/files/csv/usecases-deprecated.csv28
-rw-r--r--docs/files/csv/usecases-functional-requirements.csv3
-rw-r--r--docs/files/csv/usecases-non-functional-requirements.csv5
-rw-r--r--docs/files/csv/usecases-old-valid.csv6
-rw-r--r--docs/files/csv/usecases.csv4
-rwxr-xr-xdocs/files/dt-use-case.pngbin240228 -> 154683 bytes
-rw-r--r--docs/files/ns_automation/ns_automation_sdc_suffix.pngbin0 -> 17065 bytes
-rw-r--r--docs/files/ns_automation/ns_automation_suc.pngbin0 -> 170864 bytes
-rw-r--r--docs/files/ns_automation/ns_automation_test_class.pngbin0 -> 120331 bytes
-rw-r--r--docs/files/s3p/basic_vm_duration.pngbin0 -> 36201 bytes
-rw-r--r--docs/files/s3p/basic_vm_duration_histo.pngbin0 -> 29154 bytes
-rw-r--r--docs/files/s3p/daily_frankfurt1.pngbin0 -> 59945 bytes
-rw-r--r--docs/files/s3p/daily_frankfurt2.pngbin0 -> 24219 bytes
-rw-r--r--docs/files/s3p/guilin_daily_healthcheck.pngbin0 -> 20733 bytes
-rw-r--r--docs/files/s3p/guilin_daily_infrastructure_healthcheck.pngbin0 -> 19414 bytes
-rw-r--r--docs/files/s3p/guilin_daily_security.pngbin0 -> 10143 bytes
-rw-r--r--docs/files/s3p/guilin_daily_smoke.pngbin0 -> 17422 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_healthcheck.pngbin0 -> 19579 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_infrastructure_healthcheck.pngbin0 -> 24545 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_security.pngbin0 -> 20443 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_smoke.pngbin0 -> 26012 bytes
-rw-r--r--docs/files/s3p/honolulu_sdc_stability.pngbin0 -> 204363 bytes
-rw-r--r--docs/files/s3p/honolulu_sdc_stability_resources.pngbin0 -> 49466 bytes
-rw-r--r--docs/files/s3p/honolulu_so_stability_1_duration.pngbin0 -> 35364 bytes
-rw-r--r--docs/files/s3p/honolulu_so_stability_5.pngbin0 -> 129331 bytes
-rw-r--r--docs/files/s3p/honolulu_weekly_cpu.pngbin0 -> 263761 bytes
-rw-r--r--docs/files/s3p/honolulu_weekly_memory.pngbin0 -> 299476 bytes
-rw-r--r--docs/files/s3p/istanbul-dashboard.pngbin0 -> 60652 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_healthcheck.pngbin0 -> 21941 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_infrastructure_healthcheck.pngbin0 -> 21499 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_security.pngbin0 -> 16609 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_smoke.pngbin0 -> 21629 bytes
-rw-r--r--docs/files/s3p/istanbul_instantiation_stability_10.pngbin0 -> 90935 bytes
-rw-r--r--docs/files/s3p/istanbul_resiliency.pngbin0 -> 15880 bytes
-rw-r--r--docs/files/s3p/istanbul_sdc_stability.pngbin0 -> 75166 bytes
-rwxr-xr-xdocs/files/s3p/jakarta-dashboard.pngbin0 -> 59919 bytes
-rw-r--r--docs/files/s3p/stability_sdnc_memory.pngbin0 -> 22416 bytes
-rw-r--r--docs/files/scaleout/12.pngbin188128 -> 0 bytes
-rw-r--r--docs/files/scaleout/13.pngbin235213 -> 0 bytes
-rw-r--r--docs/files/scaleout/14.pngbin230675 -> 0 bytes
-rw-r--r--docs/files/scaleout/15.pngbin410181 -> 0 bytes
-rw-r--r--docs/files/scaleout/16.pngbin394513 -> 0 bytes
-rw-r--r--docs/files/scaleout/17.pngbin466637 -> 0 bytes
-rw-r--r--docs/files/scaleout/18.pngbin726385 -> 0 bytes
-rw-r--r--docs/files/scaleout/19.pngbin354411 -> 0 bytes
-rw-r--r--docs/files/scaleout/20.pngbin230514 -> 0 bytes
-rw-r--r--docs/files/scaleout/clamp/1.pngbin0 -> 314501 bytes
-rw-r--r--docs/files/scaleout/clamp/10.pngbin0 -> 552939 bytes
-rw-r--r--docs/files/scaleout/clamp/11.pngbin0 -> 535871 bytes
-rw-r--r--docs/files/scaleout/clamp/12.pngbin0 -> 441614 bytes
-rw-r--r--docs/files/scaleout/clamp/13.pngbin0 -> 456595 bytes
-rw-r--r--docs/files/scaleout/clamp/14.pngbin0 -> 463167 bytes
-rw-r--r--docs/files/scaleout/clamp/15.pngbin0 -> 498045 bytes
-rw-r--r--docs/files/scaleout/clamp/16.pngbin0 -> 555752 bytes
-rw-r--r--docs/files/scaleout/clamp/2.pngbin0 -> 322447 bytes
-rw-r--r--docs/files/scaleout/clamp/3.pngbin0 -> 500921 bytes
-rw-r--r--docs/files/scaleout/clamp/4.pngbin0 -> 542819 bytes
-rw-r--r--docs/files/scaleout/clamp/5.pngbin0 -> 494548 bytes
-rw-r--r--docs/files/scaleout/clamp/6.pngbin0 -> 460774 bytes
-rw-r--r--docs/files/scaleout/clamp/7.pngbin0 -> 437053 bytes
-rw-r--r--docs/files/scaleout/clamp/8.pngbin0 -> 508377 bytes
-rw-r--r--docs/files/scaleout/clamp/9.pngbin0 -> 538782 bytes
-rw-r--r--docs/files/scaleout/dcae_blueprint.yaml174
-rw-r--r--docs/files/scaleout/k8s-tca-clamp-policy-05162019.yaml175
-rw-r--r--docs/files/scaleout/latest-tca-guilin.yaml141
-rw-r--r--docs/files/simulators/NF-Simulator.pngbin0 -> 48925 bytes
-rw-r--r--docs/files/softwareUpgrade/APIDecisionTree.pngbin0 -> 54629 bytes
-rw-r--r--docs/files/softwareUpgrade/DirectNetconfYangInterface.pngbin0 -> 22980 bytes
-rw-r--r--docs/files/softwareUpgrade/OnboardingCsar.pngbin0 -> 247900 bytes
-rw-r--r--docs/files/softwareUpgrade/SWUPWorkflow.pngbin0 -> 79409 bytes
-rw-r--r--docs/files/softwareUpgrade/SchemaUpdate.pngbin0 -> 31529 bytes
-rw-r--r--docs/files/softwareUpgrade/ServiceLevelUpgrade.pngbin0 -> 106771 bytes
-rw-r--r--docs/files/softwareUpgrade/ServiceLevelWorkflow.pngbin0 -> 75772 bytes
-rw-r--r--docs/files/softwareUpgrade/WorkflowView.pngbin0 -> 47692 bytes
-rw-r--r--docs/files/softwareUpgrade/serviceModelVersions.pngbin0 -> 374401 bytes
-rw-r--r--docs/files/softwareUpgrade/verifyPNF.pngbin0 -> 348103 bytes
-rw-r--r--docs/files/softwareUpgrade/workflowList.pngbin0 -> 244285 bytes
-rw-r--r--docs/files/tests/test-basic-cnf.pngbin0 -> 56334 bytes
-rw-r--r--docs/files/tests/test-certif.pngbin0 -> 109106 bytes
-rw-r--r--docs/files/tests/test-dashboard.pngbin0 -> 91334 bytes
-rw-r--r--docs/files/tests/test-onap-helm.pngbin0 -> 43068 bytes
-rw-r--r--docs/files/tests/test-onap-k8s.pngbin0 -> 69369 bytes
-rwxr-xr-xdocs/files/vFW_CNF_CDS/Instantiation_topology.pngbin0 -> 162060 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/Native_Helm_Flow.pngbin0 -> 123903 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/healthcheck.pngbin0 -> 68856 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/helm-overrides-steps.pngbin0 -> 112118 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/helm-overrides.pngbin0 -> 7966 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/k8s-resources-response.json843
-rw-r--r--docs/files/vFW_CNF_CDS/postman.zipbin0 -> 12414 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/profile-templating.pngbin0 -> 121959 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/scenarios.pngbin0 -> 71874 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/status-response.json1213
-rwxr-xr-xdocs/files/vFW_CNF_CDS/vFW_CNF_CDS_Flow.pngbin0 -> 104595 bytes
-rwxr-xr-xdocs/files/vFW_CNF_CDS/vFW_Instance_In_Kubernetes.pngbin0 -> 56062 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json167
-rw-r--r--docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json133
-rw-r--r--docs/files/vLBMS_report.json2
-rw-r--r--docs/files/vcpe_tosca/create_image.pngbin0 -> 32427 bytes
-rw-r--r--docs/files/vcpe_tosca/create_project.pngbin0 -> 121527 bytes
-rw-r--r--docs/files/vcpe_tosca/create_user.pngbin0 -> 92034 bytes
-rw-r--r--docs/files/vcpe_tosca/customer_service.pngbin0 -> 463138 bytes
-rw-r--r--docs/files/vcpe_tosca/image.pngbin0 -> 36508 bytes
-rw-r--r--docs/files/vcpe_tosca/manage_project_user.pngbin0 -> 59063 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_active.pngbin0 -> 360996 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_create.pngbin0 -> 382016 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_create_input.pngbin0 -> 352714 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_delete.pngbin0 -> 375869 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_deleted.pngbin0 -> 484945 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_deleting.pngbin0 -> 468194 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_instance.pngbin0 -> 465672 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_package_list.pngbin0 -> 316778 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_package_onboard.pngbin0 -> 335609 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_vnf_heal.pngbin0 -> 370868 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_vnf_healed.pngbin0 -> 497264 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_vnf_healing.pngbin0 -> 503302 bytes
-rw-r--r--docs/files/vcpe_tosca/ns_vnf_list.pngbin0 -> 497187 bytes
-rw-r--r--docs/files/vcpe_tosca/sdc.pngbin0 -> 263081 bytes
-rw-r--r--docs/files/vcpe_tosca/vim.pngbin0 -> 194469 bytes
-rw-r--r--docs/files/vcpe_tosca/vnfm.pngbin0 -> 177001 bytes
-rw-r--r--docs/files/vfw-1-preload.json313
-rw-r--r--docs/files/vfw-2-preload.json313
-rw-r--r--docs/files/vfwdt-aai-postman.json426
-rw-r--r--docs/files/vfwdt-general-workflow-sd.pngbin0 -> 158564 bytes
-rw-r--r--docs/files/vfwdt-identification-workflow-sd.pngbin0 -> 75840 bytes
-rw-r--r--docs/files/vfwdt-td-workflow-sd.pngbin0 -> 200932 bytes
-rw-r--r--docs/files/vfwdt-upgrade-workflow-sd.pngbin0 -> 143490 bytes
-rw-r--r--docs/files/vfwdt-workflow-general.pngbin0 -> 14271 bytes
-rw-r--r--docs/files/vfwdt-workflow-traffic.pngbin0 -> 16021 bytes
-rw-r--r--docs/files/vfwdt-workflow-upgrade.pngbin0 -> 16124 bytes
-rw-r--r--docs/files/vpkg-preload.json313
-rw-r--r--docs/files/windriver/windriver_CPU.pngbin0 -> 43249 bytes
-rw-r--r--docs/files/windriver/windriver_RAM.pngbin0 -> 52154 bytes
-rw-r--r--docs/files/windriver/windriver_disks.pngbin0 -> 67839 bytes
-rw-r--r--docs/files/windriver/windriver_servers.pngbin0 -> 131462 bytes
-rw-r--r--docs/files/windriver/windrivers_servers2.pngbin0 -> 44165 bytes
-rw-r--r--docs/heat.rst236
-rw-r--r--docs/index.rst14
-rw-r--r--docs/integration-CICD.rst53
-rw-r--r--docs/integration-labs.rst38
-rw-r--r--docs/integration-missions.rst44
-rw-r--r--docs/integration-repositories.rst115
-rw-r--r--docs/integration-resources.rst16
-rw-r--r--docs/integration-s3p.rst276
-rw-r--r--docs/integration-simulators.rst111
-rw-r--r--docs/integration-tests.rst159
-rw-r--r--docs/integration-tooling.rst214
-rw-r--r--docs/onap-integration-ci.rst131
-rw-r--r--docs/onap-oom-heat.rst21
-rw-r--r--docs/release-notes.rst275
-rw-r--r--docs/requirements-docs.txt8
-rw-r--r--docs/schema-update-apis.csv49
-rw-r--r--docs/simulators/nf_simulator.rst148
-rw-r--r--docs/tox.ini31
-rw-r--r--docs/usecases/deprecated_usecases.rst28
-rw-r--r--docs/usecases/release_automated_usecases.rst37
-rw-r--r--docs/usecases/release_non_functional_requirements.rst15
-rw-r--r--docs/usecases/release_requirements.rst15
-rw-r--r--docs/usecases/release_usecases.rst37
-rw-r--r--documentation/api-dependencies/README.md4
-rw-r--r--pipelines/docker-onap-k8s-toolbox/Dockerfile20
-rw-r--r--pipelines/docker-onap-k8s-toolbox/README.md4
-rw-r--r--pipelines/docker-onap-k8s-toolbox/container-tag.yaml1
-rw-r--r--ptl/edit_committers_info/README.md73
-rw-r--r--ptl/edit_committers_info/edit_committers_list.py588
-rw-r--r--ptl/edit_committers_info/requirements.txt3
-rw-r--r--ptl/edit_committers_info/tox.ini17
-rw-r--r--test/README.md8
-rw-r--r--test/csit/README.md2
-rw-r--r--test/ete/scripts/probe-onap.py2
-rw-r--r--test/hpa_automation/heat/README.md42
-rwxr-xr-xtest/hpa_automation/heat/hpa_automation.py18
-rwxr-xr-xtest/hpa_automation/heat/hpa_automation_config.json353
-rw-r--r--test/hpa_automation/heat/insert_policy_models_heat.py10
-rwxr-xr-xtest/hpa_automation/tosca/hpa_automation.py99
-rwxr-xr-xtest/hpa_automation/tosca/vcpe_config.json412
-rwxr-xr-xtest/hpa_automation/tosca/vcpe_vgw_config.json316
-rw-r--r--test/legal/docker_license_analysis/Dockerfile.sample2
-rw-r--r--test/legal/docker_license_analysis/README.rst80
-rw-r--r--test/legal/docker_license_analysis/Vagrantfile102
-rwxr-xr-xtest/legal/docker_license_analysis/tools/analysis.sh31
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/pom.xml81
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java (renamed from test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/Main.java)35
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java109
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java49
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java51
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java356
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java396
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java175
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java215
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java138
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java115
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java137
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java134
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java159
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java137
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java74
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java)23
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java)23
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java85
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java (renamed from test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/LoadModelResponse.java)41
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java139
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java67
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java)20
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java69
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java471
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java365
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java48
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java209
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java53
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java258
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/KeyValuePair.java)30
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java98
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SearchExp.java)38
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java111
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java41
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java104
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherConfig.java)35
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java138
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/FlatTemplateContent.java)38
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java110
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Storage.java)30
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java154
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java40
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java140
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/TemplateRequest.java)38
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java153
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java107
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java59
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java59
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java57
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java88
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java89
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml25
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12bin0 -> 5526 bytes
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jksbin0 -> 3202 bytes
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java397
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java420
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java233
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java430
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java143
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java156
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java199
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java142
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java72
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java205
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java67
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java80
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java120
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java79
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java186
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json73
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json10
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json12
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json5
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json17
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json14
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json16
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json17
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml17
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json5
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json9
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json25
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json26
-rwxr-xr-xtest/mocks/aai-simulator/common/pom.xml38
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java54
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java65
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java101
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java66
-rwxr-xr-xtest/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java60
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/pom.xml87
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image34
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml46
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh69
-rwxr-xr-xtest/mocks/aai-simulator/package/pom.xml15
-rwxr-xr-xtest/mocks/aai-simulator/pom.xml84
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC1.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC10.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC100.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC11.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC12.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC13.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC14.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC15.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC2.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC20.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC200.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC21.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC210.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC220.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC3.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC30.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC31.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC32.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC33.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC4.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC40.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC400.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC401.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC402.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC403.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC404.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC5.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC50.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC6.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC60.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC61.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC7.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC70.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC71.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC8.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC80.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC81.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC9.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC90.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/auto-test/README.md65
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem26
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12bin0 -> 1530 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env4
-rw-r--r--test/mocks/datafilecollector-testharness/common/README.md182
-rw-r--r--test/mocks/datafilecollector-testharness/common/test_env.sh48
-rwxr-xr-xtest/mocks/datafilecollector-testharness/common/testcase_common.sh189
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/README.md118
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml10
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/package.json40
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md)22
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml)5
-rwxr-xr-xtest/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml)8
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key)0
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/.gitignore2
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/README.md34
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml24
-rwxr-xr-xtest/mocks/datafilecollector-testharness/http-https-server/prepare.sh49
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/README.md128
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml6
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py1590
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/setup.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/.gitignore3
-rwxr-xr-x[-rw-r--r--]test/mocks/datafilecollector-testharness/simulator-group/README.md132
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json34
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json54
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl13
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl11
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/consul_config.sh34
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh2
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml46
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml37
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh31
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml184
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh64
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh193
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem40
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jksbin0 -> 5400 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem103
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jksbin0 -> 3066 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass1
-rwxr-xr-xtest/mocks/emssimulator/install.sh14
-rwxr-xr-xtest/mocks/emssimulator/pnf-sw-upgrade-cba-builder/build-cba-for-pnf-sw-upgrade-with-em.sh77
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/PNF_SW_UPGRADE_WITH_EM.json.patch148
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/RestconfSoftwareUpgrade.kt.patch192
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/TOSCA.meta.patch13
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-software-upgrade-mapping.json.patch17
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-activate-ne-sw-template.vtl.patch19
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-download-ne-sw-template.vtl.patch34
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/resources_definition_types.json.patch14
-rw-r--r--test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/restconf-mount-template.vtl.patch16
-rw-r--r--test/mocks/emssimulator/swm-netconf/docker-compose.yml13
-rw-r--r--test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE13
-rw-r--r--test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE-2 (renamed from test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProvider.java)14
-rw-r--r--test/mocks/emssimulator/swm-netconf/pnf-swm/README9
-rw-r--r--test/mocks/emssimulator/swm-netconf/pnf-swm/data.xml13
-rw-r--r--test/mocks/emssimulator/swm-netconf/pnf-swm/model.yang86
-rwxr-xr-xtest/mocks/emssimulator/swm-netconf/pnf-swm/subscriber.py214
-rwxr-xr-xtest/mocks/emssimulator/swm/activateNESw131
-rw-r--r--test/mocks/emssimulator/swm/activate_n_e_sw.py141
-rw-r--r--test/mocks/emssimulator/swm/conf.py41
-rwxr-xr-xtest/mocks/emssimulator/swm/downloadNESw170
-rw-r--r--test/mocks/emssimulator/swm/download_n_e_sw.py169
-rw-r--r--test/mocks/emssimulator/swm/ems_db/ne_info_table.json44
-rw-r--r--test/mocks/emssimulator/swm/ems_util.py35
-rwxr-xr-xtest/mocks/emssimulator/swm/installNESw202
-rw-r--r--test/mocks/emssimulator/swm/install_n_e_sw.py234
-rw-r--r--test/mocks/emssimulator/swm/pnf_simulators/192.168.1.1/opt/install/v1/installed_sw.json10
-rw-r--r--test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v1/installed_sw.json10
-rw-r--r--test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v2/installed_sw.json10
-rwxr-xr-xtest/mocks/emssimulator/swm/swFallback123
-rw-r--r--test/mocks/emssimulator/swm/sw_fallback.py129
-rwxr-xr-xtest/mocks/emssimulator/swm/upgrade-post-check78
-rwxr-xr-xtest/mocks/emssimulator/swm/upgrade-pre-check69
-rw-r--r--test/mocks/emssimulator/swm/upgrade_post_check.py76
-rw-r--r--test/mocks/emssimulator/swm/upgrade_pre_check.py65
-rw-r--r--test/mocks/hvvessimulator/hvves_sim.yaml3
-rw-r--r--test/mocks/mass-pnf-sim/.gitignore4
-rw-r--r--test/mocks/mass-pnf-sim/README.md48
-rwxr-xr-xtest/mocks/mass-pnf-sim/clean.sh8
-rwxr-xr-xtest/mocks/mass-pnf-sim/diagnostic.sh30
-rwxr-xr-xtest/mocks/mass-pnf-sim/mass-pnf-sim.py226
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md39
-rwxr-xr-xtest/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh59
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json15
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml8
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env10
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf59
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml118
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml165
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml64
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json113
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json2432
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang9
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml345
-rwxr-xr-xtest/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh265
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml75
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java28
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java41
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java53
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java57
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml50
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java35
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java133
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java88
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java62
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java71
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java114
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java34
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java42
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java51
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java45
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java232
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java35
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java62
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java213
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java54
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java26
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java89
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java66
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties6
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java122
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java72
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java73
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java38
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java70
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java67
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java226
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java38
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java65
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java66
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java204
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java116
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java71
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java123
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml49
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json5
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json10
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes1
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gzbin188547 -> 0 bytes
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key27
-rw-r--r--test/mocks/mass-pnf-sim/requirements.txt2
-rwxr-xr-xtest/mocks/mass-pnf-sim/setup.sh13
-rw-r--r--test/mocks/netconf-pnp-simulator/README.md9
-rw-r--r--test/mocks/netconf-pnp-simulator/docs/README.rst113
-rw-r--r--test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/data.json10
-rw-r--r--test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/docker-compose.yml12
-rw-r--r--test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/model.yang29
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/docs/examples/mynetconf/subscriber.py136
-rw-r--r--test/mocks/netconf-pnp-simulator/docs/images/Architecture.pngbin0 -> 58061 bytes
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/Dockerfile204
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/LICENSE13
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/common.sh155
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/startup.xml72
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/turing-machine.yang262
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa.pub1
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/tls/ca.pem (renamed from test/mocks/pnfsimulator/netconfsimulator/tls/ca.crt)0
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/config/tls/netopeer2-client.sh95
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/tls/server_cert.pem (renamed from test/mocks/pnfsimulator/netconfsimulator/tls/server_cert.crt)0
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/config/tls/server_key.pem (renamed from test/mocks/pnfsimulator/netconfsimulator/tls/server_key.pem)0
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/configure-modules.sh102
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/container-tag.yaml1
-rwxr-xr-x[-rw-r--r--]test/mocks/netconf-pnp-simulator/engine/entrypoint.sh (renamed from test/mocks/pnfsimulator/simulator-cli/requirements.txt)28
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/generic_subscriber.py133
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/01-fix-grep-count.patch35
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/02-zlog.patch105
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/01-configurable-PYTHON_MODULE_PATH.patch14
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/02-fix-missing-include-dir.patch11
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/03-fix-missing-pthread_rwlockattr_setkind_np.patch20
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/04-io-log.patch27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/libyang/01-configurable-PYTHON_MODULE_PATH.patch17
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/supervisor/01-std-log-format.patch26
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/01-configurable-PYTHON_MODULE_PATH.patch21
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/02-zlog.patch172
-rwxr-xr-x[-rw-r--r--]test/mocks/netconf-pnp-simulator/engine/reconfigure-ssh.sh (renamed from test/mocks/pnfsimulator/simulator-cli/cli/client/__init__.py)23
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/reconfigure-tls.sh31
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/supervisord.conf (renamed from test/mocks/pnfsimulator/simulator-cli/setup.py)54
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/templates/ietf-keystore.xml20
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/templates/ietf-netconf-server.xml27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/templates/ietf-system.xml12
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/README2
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/README2
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/ca.pem24
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_cert.pem24
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_key.pem27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_cert.pem24
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_key.pem27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/README1
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca.pem21
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca_key.pem28
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_cert.pem21
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_key.pem27
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/openssl_2way_auth.sh84
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_cert.pem21
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_key.pem27
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/nctest.py50
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/settings.py8
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/test_basic_operations.py49
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/test_tls.py115
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tests/test_turing_machine.py130
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tox.ini39
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/zlog.conf7
-rw-r--r--test/mocks/netconf-pnp-simulator/modules/docker-compose.yml12
-rw-r--r--test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/LICENSE13
-rw-r--r--test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/pnf-sw-upgrade.yang78
-rw-r--r--test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/startup.xml12
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/subscriber.py209
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/FileReadyEvent.json34
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml27
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py5
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem24
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem24
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem27
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml33
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/pnf-subscriptions.yang47
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/pnf.py113
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt3
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py28
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml41
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/startup.xml26
-rwxr-xr-xtest/mocks/pmsh-pnf-sim/docker-compose/subscriber.py105
-rw-r--r--test/mocks/pnf-onboarding/README.md22
-rw-r--r--test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_pnfd_2_5_1_types.yaml60
-rw-r--r--test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_vnfd_2_5_1_types.yaml375
-rw-r--r--test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/pnf_main_descriptor.yaml6
-rw-r--r--test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Events/MyPnf_Pnf_v1.yaml20
-rw-r--r--test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Measurements/PM_Dictionary.yaml282
-rw-r--r--test/mocks/pnfsimulator/.gitignore3
-rw-r--r--test/mocks/pnfsimulator/checkstyle-suppressions.xml46
-rw-r--r--test/mocks/pnfsimulator/deployment/assembly.xml35
-rw-r--r--test/mocks/pnfsimulator/deployment/pom.xml52
-rw-r--r--test/mocks/pnfsimulator/deployment/src/MANIFEST.json17
-rw-r--r--test/mocks/pnfsimulator/deployment/src/simulators_heat_template.env10
-rw-r--r--test/mocks/pnfsimulator/deployment/src/simulators_heat_template.yaml147
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/README.md276
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/config/netconf.env5
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/docker-compose.yml96
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/docker/Dockerfile4
-rwxr-xr-xtest/mocks/pnfsimulator/netconfsimulator/ftpes/tls/pure-ftpd.pem49
-rwxr-xr-xtest/mocks/pnfsimulator/netconfsimulator/ftpes/userpass/pureftpd.passwd1
-rwxr-xr-xtest/mocks/pnfsimulator/netconfsimulator/netconf/initialize_netopeer.sh59
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/load_server_certs.xml40
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.xml24
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.yang9
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.data.xml24
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.yang9
-rwxr-xr-xtest/mocks/pnfsimulator/netconfsimulator/netconf/set-up-xmls.py153
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/test_yang_loader_server.py121
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/tls_listen.xml48
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netconf/yang_loader_server.py172
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.cpp74
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.h43
-rwxr-xr-xtest/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/CMakeLists.txt20
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.cpp105
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.h44
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.cpp108
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.h42
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/main.cpp48
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo.h2015
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Connection.hpp63
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Internal.hpp80
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Session.hpp245
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Struct.hpp514
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Sysrepo.hpp177
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Tree.hpp176
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Xpath.hpp97
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/plugins.h139
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/trees.h226
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/values.h196
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/xpath.h232
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/pom.xml276
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Configuration.java34
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Main.java31
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/SwaggerConfig.java43
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/Config.java70
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/MessageDTO.java31
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreController.java59
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreService.java91
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerEntry.java36
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandler.java67
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/model/KafkaMessage.java37
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/NetconfController.java111
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfBeanConfiguration.java60
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditor.java50
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReader.java57
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationService.java76
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationTO.java32
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConnectionParams.java37
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfSessionHelper.java37
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderService.java104
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/EndpointConfig.java46
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/NetconfEndpoint.java95
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageEncoder.java34
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListener.java51
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/main/resources/application.properties8
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfFunctionsIT.java211
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfSimulatorClient.java150
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/EmbeddedKafkaConfig.java69
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreControllerTest.java86
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreServiceTest.java103
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandlerTest.java87
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/NetconfControllerTest.java172
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditorTest.java69
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReaderTest.java94
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationServiceTest.java102
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderServiceTest.java121
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/NetconfEndpointTest.java135
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListenerTest.java73
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/resources/initialConfig.xml23
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/resources/invalidXmlFile.xml23
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/resources/newYangModel.yang8
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfig.xml24
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfigForCmHistory.xml24
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key49
-rw-r--r--test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key.pub1
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/.gitignore4
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/README.md299
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/config/config.json9
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/db/pnf_simulator.js28
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml118
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/docker-compose.yml36
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/docker/Dockerfile8
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/docker/certificates/AAF_RootCA.crt31
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/docker/certificates/dcaelocal.crt20
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/pom.xml130
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorController.java59
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorService.java36
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/application.properties1
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/keystorebin2196 -> 0 bytes
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/BasicAvailabilityTest.java235
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/OptionalTemplatesTest.java204
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/SearchInTemplatesTest.java269
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TemplatesManagementTest.java175
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TestConfiguration.java36
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/suites/DockerBasedTestsSuite.java46
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/application.properties6
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/notification.json45
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwrite_template.json6
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwritten_template.json6
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/registration.json36
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/complicated_template.json43
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/simple_template.json12
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_array.json23
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_booleans.json12
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_floats.json13
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_ints.json12
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/upload_template.json6
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/pom.xml364
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/assembly/resources.xml57
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/Main.java57
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/SwaggerConfig.java43
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Row.java34
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventData.java76
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataRepository.java26
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataService.java63
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessor.java110
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherService.java44
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherThread.java81
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java35
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java211
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/TemplateController.java105
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/FullEvent.java48
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorParams.java46
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorRequest.java51
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java35
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/JsonObjectDeserializer.java42
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java62
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/DBTemplateReader.java49
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/EventNotFoundException.java28
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/FilesystemTemplateReader.java54
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProviderImpl.java47
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/JsonTokenProcessor.java134
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsExtractor.java118
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsHandler.java72
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsValueProvider.java80
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorService.java117
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplatePatcher.java53
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplateReader.java28
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java95
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevel.java81
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/Keyword.java74
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/NonParameterKeyword.java65
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/SingleParameterKeyword.java73
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/TwoParameterKeyword.java80
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventJob.java88
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventScheduler.java116
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/QuartzConfiguration.java38
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfig.java49
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigRepository.java26
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigService.java52
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/FsToDbTemplateSynchronizer.java74
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/Template.java92
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateRepository.java26
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateService.java81
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/IllegalJsonValueException.java28
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/JsonUtils.java104
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/TemplateSearchHelper.java95
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilder.java103
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/resources/application.properties7
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/main/resources/logback.xml70
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/event/EventDataServiceTest.java133
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/InMemoryTemplateStorage.java71
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessorTest.java124
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java211
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/TemplateControllerTest.java256
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java38
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java65
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/IncrementProviderImplTest.java78
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomIntegerTest.java67
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomStringTest.java67
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidTimestampTest.java65
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomIntegerTest.java66
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomPrimitiveIntegerTest.java66
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomStringTest.java69
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampPrimitiveTest.java66
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampTest.java67
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsHandlerTest.java304
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsValueProviderTest.java81
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorServiceTest.java226
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplatePatcherTest.java164
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplateReaderTest.java51
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java97
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevelTest.java52
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventJobTest.java90
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventSchedulerTest.java143
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigServiceTest.java104
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/TemplateServiceTest.java152
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/JsonUtilsTest.java166
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/TemplateSearchHelperTest.java160
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilderTest.java75
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/resources/application.properties1
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/resources/logback-test.xml69
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/filesystem/test1.json8
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/invalidJsonStructureEvent.json1
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validExampleMeasurementEvent.json89
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/templates/measurement.json45
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/templates/notification.json40
-rw-r--r--test/mocks/pnfsimulator/pnfsimulator/templates/registration.json33
-rw-r--r--test/mocks/pnfsimulator/pom.xml77
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/.gitignore7
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/README.md304
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/cli/client/tailf_client.py59
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/cli/data/logging.ini20
-rwxr-xr-xtest/mocks/pnfsimulator/simulator-cli/cli/netconf_simulator.py278
-rwxr-xr-xtest/mocks/pnfsimulator/simulator-cli/cli/pnf_simulator.py374
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/tests/resources/notification.json15
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/tests/test_netconf_simulator.py165
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/tests/test_pnf_simulator.py270
-rw-r--r--test/mocks/pnfsimulator/simulator-cli/tests/test_tailf_client.py47
-rw-r--r--test/mocks/prov-mns-provider/Dockerfile13
-rw-r--r--test/mocks/prov-mns-provider/README.txt60
-rw-r--r--test/mocks/prov-mns-provider/docker-compose.yaml9
-rw-r--r--test/mocks/prov-mns-provider/src/ConfigInfo.json5
-rw-r--r--test/mocks/prov-mns-provider/src/DefinedNRMFunction.json11
-rw-r--r--test/mocks/prov-mns-provider/src/ProvMnSProvider.py282
-rw-r--r--test/mocks/prov-mns-provider/src/UserInfo.json4
-rw-r--r--test/mocks/prov-mns-provider/src/preSetMOI.json18
-rw-r--r--test/mocks/prov-mns-provider/src/requirements.txt2
l---------test/mocks/prov-mns-provider/src/tests/ConfigInfo.json1
l---------test/mocks/prov-mns-provider/src/tests/DefinedNRMFunction.json1
l---------test/mocks/prov-mns-provider/src/tests/ProvMnSProvider.py1
l---------test/mocks/prov-mns-provider/src/tests/UserInfo.json1
-rw-r--r--test/mocks/prov-mns-provider/src/tests/common.py37
l---------test/mocks/prov-mns-provider/src/tests/preSetMOI.json1
-rw-r--r--test/mocks/prov-mns-provider/src/tests/test-requirements.txt2
-rw-r--r--test/mocks/prov-mns-provider/src/tests/test_invalid_requests.py49
-rw-r--r--test/mocks/prov-mns-provider/src/tests/test_rest_api_endpoints.py35
-rw-r--r--test/mocks/prov-mns-provider/src/tox.ini10
-rw-r--r--test/mocks/ran-nssmf-simulator/.gitignore4
-rw-r--r--test/mocks/ran-nssmf-simulator/Dockerfile25
-rw-r--r--test/mocks/ran-nssmf-simulator/README.md19
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py127
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py150
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py87
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py75
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py0
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json7
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py44
-rw-r--r--test/mocks/ran-nssmf-simulator/container-tag.yaml1
-rw-r--r--test/mocks/ran-nssmf-simulator/main.py23
-rw-r--r--test/mocks/ran-nssmf-simulator/requirements.txt3
-rw-r--r--test/mocks/ran-nssmf-simulator/setup.py35
-rw-r--r--test/mocks/ran-nssmf-simulator/test-requirements.txt2
-rw-r--r--test/mocks/ran-nssmf-simulator/test/conftest.py13
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_auth.json7
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_main.py10
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_settings.py6
-rw-r--r--test/mocks/ran-nssmf-simulator/tox.ini10
-rw-r--r--test/onaptests_bench/MANIFEST.in2
-rw-r--r--test/onaptests_bench/requirements.txt6
-rw-r--r--test/onaptests_bench/setup.cfg22
-rw-r--r--test/onaptests_bench/setup.py (renamed from test/mocks/pnfsimulator/simulator-cli/tests/__init__.py)16
-rw-r--r--test/onaptests_bench/src/onaptests_bench/__init__.py (renamed from test/mocks/pnfsimulator/simulator-cli/cli/__init__.py)10
-rw-r--r--test/onaptests_bench/src/onaptests_bench/artifacts/settings.py81
-rw-r--r--test/onaptests_bench/src/onaptests_bench/launcher.py287
-rw-r--r--test/onaptests_bench/src/onaptests_bench/reporting.py351
-rw-r--r--test/onaptests_bench/src/onaptests_bench/templates/base.html.j2231
-rw-r--r--test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j279
-rw-r--r--test/onaptests_bench/test-requirements.txt6
-rw-r--r--test/onaptests_bench/tox.ini15
-rw-r--r--test/postman/01_Onboard_Vendor.postman_collection.json363
-rw-r--r--test/postman/02_Onboard_VSP_part1.postman_collection.json424
-rw-r--r--test/postman/03_Onboard_VSP_part2.postman_collection.json87
-rw-r--r--test/postman/04_Onboard_VSP_part3.postman_collection.json398
-rw-r--r--test/postman/05_Onboard_VF.postman_collection.json632
-rw-r--r--test/postman/06_Onboard_Service.postman_collection.json1410
-rw-r--r--test/postman/06_Onboard_Service_Macro.postman_collection.json1149
-rw-r--r--test/postman/07_Declare_owningEntity_LineOfBusiness_project_platform.postman_collection.json302
-rw-r--r--test/postman/08_Declare_Customer_Service_Subscription_Cloud.postman_collection.json1647
-rw-r--r--test/postman/10_instantiate_service_vnf_vfmodule.postman_collection.json1360
-rw-r--r--test/postman/11_delete_instances.postman_collection.json759
-rw-r--r--test/postman/12_NS_Instance.postman_collection.json234
-rw-r--r--test/postman/13_Ns_package_onboard.postman_collection.json223
-rw-r--r--test/postman/14_Vnf_package_onboard.postman_collection.json185
-rw-r--r--test/postman/15_Pnf_package_onboard.postman_collection.json244
-rw-r--r--test/postman/16_Catalog_package_onboard.postman_collection.json163
-rw-r--r--test/postman/globals.postman_globals.json8
-rwxr-xr-xtest/postman/images/collection-detail-test.pngbin123615 -> 0 bytes
-rwxr-xr-xtest/postman/images/collection-detail.pngbin83671 -> 0 bytes
-rwxr-xr-xtest/postman/images/collections.pngbin81167 -> 0 bytes
-rwxr-xr-xtest/postman/images/globals.pngbin111790 -> 0 bytes
-rwxr-xr-xtest/postman/images/import.pngbin83172 -> 0 bytes
-rwxr-xr-xtest/postman/images/run.pngbin93958 -> 0 bytes
-rwxr-xr-xtest/postman/images/zipfile.pngbin97349 -> 0 bytes
-rw-r--r--test/postman/integration_test_urls.postman_environment.json410
-rwxr-xr-xtest/s3p/collector/get_resource_stats.py7
-rw-r--r--test/s3p/generator/locustfile.py125
-rw-r--r--test/s3p/util/docker_util.py22
-rw-r--r--test/security/check_certificates/MANIFEST.in1
-rw-r--r--test/security/check_certificates/check_certificates/__init__.py0
-rw-r--r--test/security/check_certificates/check_certificates/check_certificates_validity.py326
-rw-r--r--test/security/check_certificates/check_certificates/templates/base.html.j2231
-rw-r--r--test/security/check_certificates/check_certificates/templates/cert-internal.html.j2129
-rw-r--r--test/security/check_certificates/check_certificates/templates/cert-nodeports.html.j2129
-rw-r--r--test/security/check_certificates/requirements.txt3
-rw-r--r--test/security/check_certificates/setup.cfg3
-rw-r--r--test/security/check_certificates/setup.py5
-rw-r--r--test/security/check_certificates/test-requirements.txt6
-rw-r--r--test/security/check_certificates/tox.ini9
-rwxr-xr-xtest/security/check_for_http_endpoints.sh79
-rwxr-xr-xtest/security/check_for_ingress_and_nodeports.py339
-rwxr-xr-xtest/security/check_for_jdwp.sh138
-rwxr-xr-xtest/security/check_for_nonssl_endpoints.sh126
-rw-r--r--test/security/check_versions/.gitignore4
-rw-r--r--test/security/check_versions/README.md92
-rw-r--r--test/security/check_versions/pyproject.toml24
-rw-r--r--test/security/check_versions/requirements.txt7
-rw-r--r--test/security/check_versions/tests/conftest.py12
-rw-r--r--test/security/check_versions/tests/test_gather_containers_informations.py38
-rw-r--r--test/security/check_versions/tests/test_list_all_containers.py52
-rw-r--r--test/security/check_versions/tests/test_main.py80
-rw-r--r--test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py74
-rw-r--r--test/security/check_versions/tests/test_verify_versions_acceptability.py54
-rw-r--r--test/security/check_versions/tox.ini19
-rw-r--r--test/security/check_versions/versions/__init__.py0
-rw-r--r--test/security/check_versions/versions/k8s_bin_versions_inspector.py769
-rw-r--r--test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py116
-rw-r--r--test/security/check_versions/versions/reporting.py265
-rw-r--r--test/security/check_versions/versions/templates/base.html.j2232
-rw-r--r--test/security/check_versions/versions/templates/versions.html.j285
-rw-r--r--test/security/k8s/README56
-rw-r--r--[l---------]test/security/k8s/README.rst46
-rw-r--r--test/security/k8s/src/check/check.go32
-rw-r--r--test/security/k8s/src/check/cmd/check/check.go23
-rw-r--r--test/security/k8s/src/check/errors.go10
-rw-r--r--test/security/k8s/src/check/rancher/rancher.go118
-rw-r--r--test/security/k8s/src/check/raw/raw.go10
-rw-r--r--test/security/k8s/src/check/validators/master/api/api_test.go69
-rw-r--r--test/security/k8s/src/check/validators/master/controllermanager/controllermanager.go15
-rw-r--r--test/security/k8s/src/check/validators/master/controllermanager/controllermanager_test.go55
-rw-r--r--test/security/k8s/src/check/validators/master/master.go3
-rw-r--r--test/security/k8s/src/check/validators/master/scheduler/scheduler_test.go11
-rwxr-xr-xtest/security/k8s/tools/casablanca/get_customization_scripts.sh5
-rw-r--r--test/security/k8s/tools/casablanca/imported/openstack-k8s-node.sh46
-rw-r--r--test/security/k8s/tools/casablanca/imported/openstack-rancher.sh51
-rw-r--r--test/security/k8s/vagrant/casablanca/Vagrantfile48
-rw-r--r--test/security/k8s/vagrant/dublin/cluster.yml1
-rw-r--r--test/security/requirements.txt2
-rw-r--r--test/security/setup.py28
-rw-r--r--test/security/sslendpoints/.dockerignore5
-rw-r--r--test/security/sslendpoints/.gitignore2
-rw-r--r--test/security/sslendpoints/Dockerfile11
-rw-r--r--test/security/sslendpoints/Makefile47
-rw-r--r--test/security/sslendpoints/README.rst135
-rw-r--r--test/security/sslendpoints/go.mod16
-rw-r--r--test/security/sslendpoints/go.sum168
-rw-r--r--test/security/sslendpoints/main.go174
-rw-r--r--test/security/sslendpoints/ports/ports.go66
-rw-r--r--test/security/sslendpoints/ports/ports_suite_test.go13
-rw-r--r--test/security/sslendpoints/ports/ports_test.go423
-rw-r--r--test/security/tox.ini9
-rwxr-xr-xtest/vcpe/bin/setup.sh15
-rwxr-xr-xtest/vcpe/cleanvGMUX.py12
-rw-r--r--test/vcpe/clouds.yaml.example23
-rwxr-xr-xtest/vcpe/config_sdnc_so.py67
-rwxr-xr-xtest/vcpe/csar_parser.py4
-rwxr-xr-xtest/vcpe/get_info.py24
-rwxr-xr-xtest/vcpe/healthcheck-k8s.py1
-rwxr-xr-xtest/vcpe/loop.py18
-rwxr-xr-xtest/vcpe/preload.py98
-rw-r--r--test/vcpe/preload_templates/simple_neutron_heat.yaml6
-rw-r--r--test/vcpe/preload_templates/template.network.json2
-rw-r--r--test/vcpe/preload_templates/template.networkgra.json33
-rw-r--r--test/vcpe/preload_templates/template.vcpe_bng_vfmodule.json338
-rw-r--r--test/vcpe/preload_templates/template.vcpe_bnggra_vfmodule.json339
-rw-r--r--test/vcpe/preload_templates/template.vcpe_brgemu_vfmodule.json268
-rw-r--r--test/vcpe/preload_templates/template.vcpe_brgemugra_vfmodule.json277
-rw-r--r--test/vcpe/preload_templates/template.vcpe_gmux_vfmodule.json314
-rw-r--r--test/vcpe/preload_templates/template.vcpe_gmuxgra_vfmodule.json315
-rw-r--r--test/vcpe/preload_templates/template.vcpe_gwgra_vfmodule.json263
-rw-r--r--test/vcpe/preload_templates/template.vcpe_infra_vfmodule.json354
-rw-r--r--test/vcpe/preload_templates/template.vcpe_infragra_vfmodule.json358
-rw-r--r--test/vcpe/preload_templates/template.vcpe_vgw_vfmodule.json254
-rw-r--r--test/vcpe/preload_templates/template_aai_region_data.json11
-rw-r--r--test/vcpe/preload_templates/template_sniro_data.json97
-rw-r--r--test/vcpe/preload_templates/template_sniro_request.json24
-rwxr-xr-xtest/vcpe/sdcutils.py8
-rw-r--r--test/vcpe/setup.py41
-rwxr-xr-xtest/vcpe/soutils.py34
-rw-r--r--test/vcpe/tests/test_imports.py17
-rw-r--r--test/vcpe/tox.ini15
-rwxr-xr-xtest/vcpe/vcpe.py153
-rwxr-xr-xtest/vcpe/vcpe_custom_service.py8
-rwxr-xr-xtest/vcpe/vcpecommon.py224
-rw-r--r--test/vcpe/vcpeconfig-oom_disabled.yaml65
-rw-r--r--test/vcpe/vcpeconfig.yaml65
-rw-r--r--test/vcpe_tosca/local/config/vcpe_config.json94
-rw-r--r--test/vcpe_tosca/local/csar/infra.csarbin0 -> 25035 bytes
-rw-r--r--test/vcpe_tosca/local/csar/ns.csarbin0 -> 3488 bytes
-rw-r--r--test/vcpe_tosca/local/csar/vbng.csarbin0 -> 24656 bytes
-rw-r--r--test/vcpe_tosca/local/csar/vbrgemu.csarbin0 -> 23810 bytes
-rw-r--r--test/vcpe_tosca/local/csar/vgmux.csarbin0 -> 24267 bytes
-rw-r--r--test/vcpe_tosca/local/csar/vgw.csarbin0 -> 24309 bytes
-rwxr-xr-xtest/vcpe_tosca/local/scripts/install-alpine.sh76
-rw-r--r--test/vcpe_tosca/local/vcpe_tosca_test.py651
-rw-r--r--test/xtesting/robot/Dockerfile26
-rw-r--r--test/xtesting/robot/README.md93
-rw-r--r--test/xtesting/robot/testcases.yaml94
-rw-r--r--test/xtesting/robot/thirdparty-requirements.txt15
-rw-r--r--tox.ini56
1293 files changed, 46443 insertions, 48117 deletions
diff --git a/.coafile b/.coafile
new file mode 100644
index 000000000..81bf44021
--- /dev/null
+++ b/.coafile
@@ -0,0 +1,34 @@
+[yaml]
+bears = YAMLLintBear
+yamllint_config = .yamllint
+ignore =
+ .tox/**,
+ # Doesn't look like a valid YAML although it pretends one
+ deployment/heat/onap-rke/parts/onap-oom-2.yaml
+
+[json]
+bears = JSONFormatBear
+json_sort = False
+indent_size = 2
+ignore =
+ .tox/**,
+ # This one is empty
+ test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json
+
+[py]
+bears = PyLintBear
+pylint_disable = all
+pylint_enable =
+ bad-indentation, trailing-whitespace, unused-wildcard-import, unused-import,
+ unnecessary-semicolon, unnecessary-semicolon, undefined-variable,
+ syntax-error, unused-variable, using-constant-test,unused-argument,
+ len-as-condition, trailing-newlines, missing-final-newline, reimported,
+ too-many-function-args, singleton-comparison
+ignore =
+ .tox/**
+
+[md]
+bears = MarkdownBear
+ignore =
+ .tox/**
+
diff --git a/.gitattributes b/.gitattributes
index 3185c448b..2eb674959 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -2,3 +2,4 @@
*.png binary
*.jpg binary
*.xlsx binary
+*.csar -text
diff --git a/.gitignore b/.gitignore
index 6d453634b..ecf7f10ea 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,3 +30,6 @@ csit/
*.class
*.csar
benchmark/
+.tox/
+build/
+deployment/noheat/**/artifacts/*
diff --git a/.gitreview b/.gitreview
index 6e77300ea..476096444 100644
--- a/.gitreview
+++ b/.gitreview
@@ -2,3 +2,4 @@
host=gerrit.onap.org
port=29418
project=integration.git
+defaultbranch=master
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 000000000..f56b3b71f
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,18 @@
+---
+# .readthedocs.yml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+# Required
+
+version: 2
+build:
+ os: ubuntu-20.04
+ tools:
+ python: "3.8"
+
+python:
+ install:
+ - requirements: docs/requirements-docs.txt
+
+sphinx:
+ configuration: docs/conf.py
diff --git a/.yamllint b/.yamllint
new file mode 100644
index 000000000..ab5bdc378
--- /dev/null
+++ b/.yamllint
@@ -0,0 +1,16 @@
+---
+
+extends: default
+
+rules:
+ line-length: disable
+ truthy: disable
+ braces:
+ max-spaces-inside: 1
+ brackets:
+ max-spaces-inside: 1
+ comments-indentation: disable
+ comments: disable
+ document-start: disable
+ indentation:
+ indent-sequences: whatever
diff --git a/INFO.yaml b/INFO.yaml
index dded2f874..e5873860e 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -4,11 +4,11 @@ project_creation_date: '2017-02-10'
lifecycle_state: 'Incubation'
project_category: ''
project_lead: &onap_releng_ptl
- name: 'Morgan Richomme'
- email: 'morgan.richomme@orange.com'
- id: 'mrichomme'
- company: 'Orange'
- timezone: 'France/Paris'
+ name: 'Marek Szwalkiewicz'
+ email: 'marek.szwalkiewicz@external.t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'mszwalkiewicz'
+ timezone: 'Europe/Warsaw'
primary_contact: *onap_releng_ptl
issue_tracking:
type: 'jira'
@@ -26,52 +26,43 @@ meetings:
server: 'n/a'
channel: 'n/a'
repeats: 'weekly'
- time: '14:00 UTC'
+ time: '13:00 UTC'
repositories:
- 'integration'
committers:
- <<: *onap_releng_ptl
- - name: 'Christophe Closset'
- email: 'cc697w@intl.att.com'
- company: 'ATT'
- id: 'ChrisC'
- timezone: 'Belgium/Namur'
- - name: 'Daniel Rose'
- email: 'DR695H@att.com'
- company: 'ATT'
- id: 'DR695H'
- timezone: 'America/New_York'
- - name: 'Gary Wu'
- email: 'gary.i.wu@huawei.com'
- company: 'Huawei'
- id: 'gwu'
- timezone: 'America/Santa_Clara'
- name: 'Catherine Lefevre'
email: 'cl664y@att.com'
company: 'AT&T'
id: 'Katel34'
timezone: 'Europe/Belgium'
- - name: 'Marco Platania'
- email: 'platania@research.att.com'
- company: 'ATT'
- id: 'platania'
- timezone: 'America/New_York'
- - name: 'Brian Freeman'
- email: 'bf1936@att.com'
- company: 'AT&T'
- id: 'bdfreeman1421'
- timezone: 'America/New_York'
- - name: 'Mariusz Wagner'
- email: 'mariusz.wagner@nokia.com'
- company: 'Nokia'
- id: 'mwagner9'
- timezone: 'Poland/Wroclaw'
- - name: 'Morgan Richomme'
- email: 'morgan.richomme@orange.com'
- company: 'orange'
- id: 'mrichomme'
- timezone: 'France/Paris'
+ - name: 'Krzysztof Kuzmicki'
+ email: 'krzysztof.kuzmicki@nokia.com'
+ company: 'nokia'
+ id: 'kkuzmick'
+ timezone: 'Europe/Warsaw'
+ - name: 'Andreas Geissler'
+ email: 'andreas-geissler@telekom.de'
+ company: 'Deutsche Telekom'
+ id: 'andreasgeissler'
+ timezone: 'Europe/Berlin'
+ - name: 'Michal Jaggielo'
+ email: 'Michal.Jagiello@t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'MichalJagielloTMPL'
+ timezone: 'Europe/Warsaw'
+ - name: 'Lukasz Rajewski'
+ email: 'lukasz.rajewski@t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'rajewluk'
+ timezone: 'Europe/Warsaw'
+ - name: 'Fiachra Corcoran'
+ email: 'fiachra.corcoran@est.tech'
+ company: 'Ericsson'
+ id: 'efiacor'
+ timezone: 'Europe/Dublin'
tsc:
+ # yamllint disable rule:line-length
approval: 'https://lists.onap.org/pipermail/onap-tsc'
changes:
- type: 'Addition'
@@ -80,3 +71,58 @@ tsc:
- type: 'Addition'
name: 'Morgan Richomme'
link: 'https://wiki.onap.org/display/DW/TSC+2019-10-17'
+ - type: 'Addition'
+ name: 'Bartek, Marcin, Eric'
+ link: 'https://lists.onap.org/g/onap-tsc/message/5772'
+ - type: 'Addition'
+ name: 'Krzysztof Kuzmicki'
+ link: 'https://lists.onap.org/g/onap-tsc/topic/onap_integration_committer/73303463'
+ - type: 'Addition'
+ name: 'Pawel Wieczorek'
+ link: 'https://lists.onap.org/g/onap-tsc/topic/onap_integration_committer/73303462'
+ - type: 'Addition'
+ name: 'Andreas Geissler'
+ link: 'https://lists.onap.org/g/onap-tsc/topic/onap_integration_committer/73303461'
+ - type: 'Addition'
+ name: 'Michal Jaggiello'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Lukasz Rajewski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Thierry Hardy'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Lasse Kaihlavirta'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7280'
+ - type: 'Addition'
+ name: 'Illia Halych'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7696'
+ - type: 'Addition'
+ name: 'Bartosz Gardziejewski, Alexander Mazuruk'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Thierry Hardy, Lasse Kaihlavirta'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Marcin Przybysz'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Bartosz Gardziejewski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8286'
+ - type: 'Deletion'
+ name: 'Christophe Closset, Brian Freeman'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8449'
+ - type: 'Addition'
+ name: 'Maciej Lisowski, Marcin Sebastian Krasowski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8449'
+ - type: 'Addition'
+ name: 'Fiachra Corcoran'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8634'
+ - type: 'Deletion'
+ name: 'Morgan Richomme, Bartek Grzybowski, Illia Halych, Alexander Mazuruk,
+ Maciej Lisowski, Marcin Sebastian Krasowski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/9017'
+ - type: 'Addition'
+ name: 'Marek Szwalkiewicz'
+ link: 'https://lists.onap.org/g/onap-tsc/message/9275'
diff --git a/README.md b/README.md
index b6c71420e..1953e2705 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,19 @@
-
# ONAP Integration
## Description
-Responsible for the integration framework / automated tools, code and scripts, best practice guidance related to cross-project Continuous System Integration Testing (CSIT), and delivery of the ONAP project.
-
-See https://wiki.onap.org/display/DW/Integration+Project for additional details.
+Responsible for:
+- the integration repositories (integration, testsuite, demo)
+- automated tools
+- tests, code and scripts
+- baseline images
+- best practice guidance for usecases
+- Continuous System Integration Testing (CSIT)
+- CI/CD
-## Sub-projects
+## References
-See respective directories for additional details about each sub-project.
+Wiki page: <https://wiki.onap.org/display/DW/Integration+Project>
+Official documentation: <https://docs.onap.org/projects/onap-integration/en/guilin/index.html?highlight=integration>
diff --git a/S3Ptools/locust_Grafana.sh b/S3Ptools/locust_Grafana.sh
index 5fa6fdcdd..5168f0e92 100644
--- a/S3Ptools/locust_Grafana.sh
+++ b/S3Ptools/locust_Grafana.sh
@@ -1,7 +1,7 @@
#!/bin/bash
#grafana install for the use of locust
# localgosh:80(grafana) & localhost:81
-pip install docker==3.1.4
+pip install --no-cache-dir docker==3.1.4
git clone https://github.com/kamon-io/docker-grafana-graphite.git
cd docker-grafana-graphite
make up
diff --git a/S3Ptools/locustfile.py b/S3Ptools/locustfile.py
index 5fe5b3786..91cd49974 100644
--- a/S3Ptools/locustfile.py
+++ b/S3Ptools/locustfile.py
@@ -1,15 +1,15 @@
import random
import string
from locust import HttpLocust, TaskSet, task
-
+
class UserBehavior(TaskSet):
def on_start(self):
""" on_start is called when a Locust start before any task is scheduled """
self.init()
-
+
def init(self):
pass
-
+
@task(1)
def DCI(self):
method = "POST"
@@ -20,7 +20,7 @@ class UserBehavior(TaskSet):
print(data)
response = self.client.request(method, url, headers=headers, data=data)
print(response.json())
-
+
class WebsiteUser(HttpLocust):
task_set = UserBehavior
host = "http://10.0.5.1:8080"
diff --git a/bootstrap/README.md b/bootstrap/README.md
index bad686157..6c6553de8 100644
--- a/bootstrap/README.md
+++ b/bootstrap/README.md
@@ -1,11 +1,11 @@
-
# ONAP Integration - Bootstrap
## Description
-* A framework to automatically install and test a set of base infrastructure components and environments for new developers.
+A framework to automatically install and test a set of base infrastructure components and environments for new developers.
## Sub-components
-* jenkins - A set of vagrant scripts that will set up a simple Jenkins environment with jobs configured to build all ONAP java code and docker images.
-
+- codesearch: A set of vagrant scripts that will set up Hound daemon configured to search through all ONAP repositories.
+- jenkins: A set of vagrant scripts that will set up a simple Jenkins environment with jobs configured to build all ONAP java code and docker images.
+- vagrant-minimal-onap: A set of vagrant scripts that will set up minimal ONAP environment for research purposes.
diff --git a/bootstrap/codesearch/README.rst b/bootstrap/codesearch/README.rst
new file mode 100644
index 000000000..d534e28cf
--- /dev/null
+++ b/bootstrap/codesearch/README.rst
@@ -0,0 +1,78 @@
+============================================
+ ONAP Integration > Bootstrap > Code search
+============================================
+
+This directory contains a set of Vagrant scripts that will automatically set up a Hound_ instance
+with config generator to index all ONAP code.
+
+This is intended to show a beginning ONAP developer how to set up and configure an environment that
+allows to search through ONAP code repositories quickly. It is not intended to be used as
+a production code search solution.
+
+`Upstream Docker image` has not been used due to lack of project activity. This environment
+(together with daemon configuration generator) might be migrated to a new Docker image recipe in
+future, though.
+
+.. _Hound: https://github.com/hound-search/hound
+.. _`Upstream Docker image`: https://hub.docker.com/r/etsy/hound
+
+
+Prerequisites
+-------------
+
+Virtualisation provider
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Provided vagrantfile is generic enough that it should work with any Vagrant provider.
+It has been tested using default VirtualBox provider and also libvirt_ provider with vagrant-libvirt_ plugin.
+Plugin documentation provides detailed `installation instructions`_ that will guide through the process.
+
+.. note::
+ Remember to uncomment `deb-src` repositories for `apt-get build-dep` step on Debian/Ubuntu.
+
+.. _libvirt: https://libvirt.org
+.. _vagrant-libvirt: https://github.com/vagrant-libvirt/vagrant-libvirt
+.. _`installation instructions`: https://github.com/vagrant-libvirt/vagrant-libvirt#installation
+
+Virtual machine manager
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Environment has been tested using latest Vagrant_ as of writing this documentation (`v2.2.16`_). Some
+features (e.g. triggers_) might not be supported on older versions.
+
+.. _Vagrant: https://www.vagrantup.com/downloads.html
+.. _`v2.2.16`: https://github.com/hashicorp/vagrant/blob/v2.2.16/CHANGELOG.md
+.. _triggers: https://www.vagrantup.com/docs/triggers/
+
+
+Running
+-------
+
+If using vagrant-libvirt provider additional `--provider` flag or setting `VAGRANT_DEFAULT_PROVIDER` environmental
+variable might be required in case there are multiple providers available.
+
+.. note::
+ One of the following commands should be executed depending on the provider you'd like to use. Run it within the
+ directory where `Vagrantfile` is stored (`integration/bootstrap/codesearch`).
+
+.. code-block:: sh
+
+ vagrant up --provider=libvirt # to leverage vagrant-libvirt provider
+ vagrant up # to leverage default VirtualBox provider
+
+This will:
+
+#. Start and prepare virtual machine
+#. Generate configuration files
+#. Run Hound instance as a tmux_ session named `codesearch`
+
+At any time you can reload or stop and later start the box, it's set up to automatically run the hound process.
+
+.. _tmux: https://github.com/tmux/tmux/wiki
+
+
+Usage
+-----
+
+Once ready (cloning repositories and building index might initially take some time) code search will
+be available at http://localhost:6080
diff --git a/bootstrap/codesearch/Vagrantfile b/bootstrap/codesearch/Vagrantfile
new file mode 100644
index 000000000..ba2339d0c
--- /dev/null
+++ b/bootstrap/codesearch/Vagrantfile
@@ -0,0 +1,40 @@
+# -*- mode: ruby -*-
+# -*- coding: utf-8 -*-
+
+nameserver = "8.8.8.8"
+synced_folder = "/vagrant"
+houndd_bin = "${HOME}/go/bin/houndd"
+houndd_config = "${HOME}/config.json"
+onap_git = "git.onap.org"
+
+$replace_dns = <<-SCRIPT
+ ns="$1"
+ # persist resolver settings
+ sed -i "s/addresses:.*/addresses: [${ns}]/" /etc/netplan/01-netcfg.yaml
+ # setup resolver for current boot session
+ resolvectl dns eth0 ${ns}
+SCRIPT
+
+Vagrant.configure("2") do |config|
+ config.vm.box = "generic/ubuntu2004"
+ config.vm.synced_folder ".", synced_folder, type: "rsync", rsync__exclude: "Vagrantfile"
+ config.vm.network "forwarded_port", guest: 6080, host: 6080
+ config.vm.provision "replace_dns", type: :shell, inline: $replace_dns, args: nameserver
+ config.vm.provision "dependencies", type: :shell, inline: <<-SHELL
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get update
+ apt-get install --assume-yes --quiet golang tmux
+ SHELL
+ config.vm.provision "binaries", type: :shell, privileged: false, inline: <<-SHELL
+ export GOPATH="${HOME}/go"
+ go get -u github.com/hound-search/hound/cmds/...
+ SHELL
+ config.vm.provision "generate_config", type: :shell do |s|
+ s.privileged = false
+ s.inline = "python3 #{synced_folder}/create_config.py --git ${1} > #{houndd_config}"
+ s.args = [onap_git]
+ end
+ config.vm.provision "run_codesearch", type: :shell, run: "always", privileged: false, inline: <<-SHELL
+ tmux new -d -s codesearch #{houndd_bin} -conf #{houndd_config}
+ SHELL
+end
diff --git a/bootstrap/codesearch/create_config.py b/bootstrap/codesearch/create_config.py
new file mode 100755
index 000000000..b881476e7
--- /dev/null
+++ b/bootstrap/codesearch/create_config.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+"""Create configuration for code search."""
+
+import argparse
+import json
+import urllib.request
+import sys
+
+DEFAULT_GERRIT = "gerrit.onap.org"
+API_PREFIX = "/r"
+API_PROJECTS = "/projects/"
+
+MAGIC_PREFIX = ")]}'"
+
+CODE_LOCATION = "{path}{anchor}"
+GITWEB_ANCHOR = "#l{line}"
+GIT_ANCHOR = "#n{line}"
+
+DEFAULT_POLL = 3600
+
+def get_projects_list(gerrit):
+ """Request list of all available projects from ONAP Gerrit."""
+ resp = urllib.request.urlopen("https://{}{}{}".format(gerrit, API_PREFIX, API_PROJECTS))
+ resp_body = resp.read()
+
+ no_magic = resp_body[len(MAGIC_PREFIX):]
+ decoded = no_magic.decode("utf-8")
+ projects = json.loads(decoded)
+
+ return projects.keys()
+
+
+def create_repos_list(projects, gerrit, ssh, git, poll):
+ """Create a map of all projects to their repositories' URLs."""
+ gerrit_url = "https://{}{}".format(gerrit, API_PREFIX)
+ git_url = "git://{}".format(git)
+ gerrit_project_url_base = "{}/{{}}.git".format(gerrit_url)
+ gitweb_code_url_base = "{}/gitweb?p={{}}.git;hb=HEAD;a=blob;f=".format(gerrit_url)
+ git_project_url_base = "{}/{{}}.git".format(git_url)
+
+ repos_list = {}
+ for project in projects:
+ project_url = gerrit_project_url_base.format(project)
+ code_url = gitweb_code_url_base.format(project) + CODE_LOCATION
+ anchor = GITWEB_ANCHOR
+
+ if ssh and len(ssh) == 2:
+ user, port = ssh[0], ssh[1]
+ project_url = "ssh://{}@{}:{}/{}.git".format(user, gerrit, port, project)
+ if git:
+ code_url = "https://{}/{}/tree/".format(git, project) + CODE_LOCATION
+ project_url = git_project_url_base.format(project)
+ anchor = GIT_ANCHOR
+
+ repos_list[project] = {
+ "url": project_url,
+ "url-pattern": {
+ "base-url": code_url,
+ "anchor": anchor,
+ "ms-between-poll": poll * 1000
+ }
+ }
+
+ return repos_list
+
+
+def parse_arguments():
+ """Return parsed command-line arguments."""
+ parser = argparse.ArgumentParser(description=__doc__)
+ group = parser.add_mutually_exclusive_group()
+ parser.add_argument('--gerrit', help='Gerrit address', default=DEFAULT_GERRIT)
+ group.add_argument('--ssh', help='SSH information for Gerrit access: user, port', nargs=2)
+ group.add_argument('--git', help='External git address. Does not support --ssh')
+ parser.add_argument('--poll-interval', help='Repositories polling interval in seconds', type=int, default=DEFAULT_POLL)
+
+ return parser.parse_args()
+
+
+def main():
+ """Main entry point for the script."""
+ arguments = parse_arguments()
+
+ projects = get_projects_list(arguments.gerrit)
+ repos = create_repos_list(projects, arguments.gerrit, arguments.ssh, arguments.git, arguments.poll_interval)
+ config = {
+ "max-concurrent-indexers": 2,
+ "dbpath": "data",
+ "health-check-uri": "/healthz",
+ "repos": repos
+ }
+ print(json.dumps(config, sort_keys=True, indent=4, separators=(',', ': ')))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/bootstrap/codesearch/test-requirements.txt b/bootstrap/codesearch/test-requirements.txt
new file mode 100644
index 000000000..897e59f51
--- /dev/null
+++ b/bootstrap/codesearch/test-requirements.txt
@@ -0,0 +1,2 @@
+flake8
+pylint
diff --git a/bootstrap/codesearch/tox.ini b/bootstrap/codesearch/tox.ini
new file mode 100644
index 000000000..42089bb93
--- /dev/null
+++ b/bootstrap/codesearch/tox.ini
@@ -0,0 +1,14 @@
+[tox]
+envlist = pep8, pylint
+skipsdist = true
+modules = create_config
+
+[testenv]
+basepython = python3.8
+deps = -r{toxinidir}/test-requirements.txt
+
+[testenv:pep8]
+commands = flake8 --max-line-length 100
+
+[testenv:pylint]
+commands = pylint -f parseable {[tox]modules}
diff --git a/bootstrap/jenkins/README.md b/bootstrap/jenkins/README.md
index b9cb8645f..170134d4f 100644
--- a/bootstrap/jenkins/README.md
+++ b/bootstrap/jenkins/README.md
@@ -1,4 +1,3 @@
-
# ONAP Integration > Bootstrap > Jenkins
This directory contains a set of vagrant scripts that will automatically set up a Jenkins instance
@@ -9,4 +8,3 @@ can successfully build ONAP code from scratch. It is not intended to be used as
Jenkins CI/CD environment.
NOTE: the Jenkins instance is by default NOT SECURED, with the default admin user and password as "jenkins".
-
diff --git a/bootstrap/jenkins/scripts/gen-java-jobs.py b/bootstrap/jenkins/scripts/gen-java-jobs.py
index 0fc538886..b4546c00f 100755
--- a/bootstrap/jenkins/scripts/gen-java-jobs.py
+++ b/bootstrap/jenkins/scripts/gen-java-jobs.py
@@ -12,11 +12,11 @@ import fileinput
import os
import subprocess
-print """- project:
+print("""- project:
name: onap-java
jobs:
- 'java-{project}'
- project:"""
+ project:""")
for line in fileinput.input():
repo = line.strip()
@@ -28,9 +28,9 @@ for line in fileinput.input():
project = repo.replace("/", "_")
if pompath:
project += "_" + pompath.replace("/", "_")
- print " - '{}':".format(project)
- print " repo: '{}'".format(repo)
- if pompath:
- print " pom: '{}/pom.xml'".format(pompath)
+ print(" - '{}':".format(project))
+ print(" repo: '{}'".format(repo))
+ if pompath:
+ print(" pom: '{}/pom.xml'".format(pompath))
else:
- print " pom: 'pom.xml'"
+ print(" pom: 'pom.xml'")
diff --git a/bootstrap/jenkins/vagrant/jjb/docker-jobs.yaml b/bootstrap/jenkins/vagrant/jjb/docker-jobs.yaml
index 5925b5d01..928267434 100644
--- a/bootstrap/jenkins/vagrant/jjb/docker-jobs.yaml
+++ b/bootstrap/jenkins/vagrant/jjb/docker-jobs.yaml
@@ -1,76 +1,76 @@
- project:
name: onap-docker-with-pom
jobs:
- - 'docker-with-pom'
+ - 'docker-with-pom'
project:
- - 'aai_aai-service_ajsc-aai':
- repo: 'aai/aai-service'
- pom: 'ajsc-aai/pom.xml'
- mvn-options: 'docker:build'
- - 'aai_data-router':
- repo: 'aai/data-router'
- pom: 'pom.xml'
- mvn-options: 'docker:build'
- - 'aai_model-loader':
- repo: 'aai/model-loader'
- pom: 'pom.xml'
- mvn-options: 'docker:build'
- - 'aai_resources_aai-resources':
- repo: 'aai/resources'
- pom: 'aai-resources/pom.xml'
- mvn-options: 'docker:build'
- - 'aai_search-data-service':
- repo: 'aai/search-data-service'
- mvn-options: 'docker:build'
- - 'aai_sparky-be':
- repo: 'aai/sparky-be'
- mvn-options: 'docker:build'
- - 'aai_traversal_aai-traversal':
- repo: 'aai/traversal'
- pom: 'aai-traversal/pom.xml'
- mvn-options: 'docker:build'
- - 'appc':
- repo: 'appc/deployment'
- mvn-options: '-P docker'
- - 'dcae_dcae-controller':
- repo: 'dcae/demo'
- shell-cmd: 'bash dcae-demo-controller/src/main/docker-build/build.sh'
- - 'dcae_dcae-dmaapbc':
- repo: 'dcae/dmaapbc'
- shell-cmd: 'docker build -f ./Dockerfile .'
- - 'dcae_dcae-inventory':
- repo: 'dcae/dcae-inventory'
- mvn-options: 'docker:build'
- - 'mso':
- repo: 'mso'
- pom: 'packages/docker/pom.xml'
- mvn-options: '-P docker'
- - 'policy':
- repo: 'policy/docker'
- shell-cmd: './docker_build.sh'
- - 'portal_ecomp-portal-widget-ms':
- repo: 'portal'
- pom: 'ecomp-portal-widget-ms/pom.xml'
- mvn-options: 'docker:build'
- - 'sdc':
- repo: 'sdc'
- pom: 'pom.xml'
- mvn-options: '-P docker'
- - 'sdnc':
- repo: 'sdnc/oam'
- mvn-options: '-P docker'
- - 'vid':
- repo: 'vid'
- mvn-options: '-P docker'
+ - 'aai_aai-service_ajsc-aai':
+ repo: 'aai/aai-service'
+ pom: 'ajsc-aai/pom.xml'
+ mvn-options: 'docker:build'
+ - 'aai_data-router':
+ repo: 'aai/data-router'
+ pom: 'pom.xml'
+ mvn-options: 'docker:build'
+ - 'aai_model-loader':
+ repo: 'aai/model-loader'
+ pom: 'pom.xml'
+ mvn-options: 'docker:build'
+ - 'aai_resources_aai-resources':
+ repo: 'aai/resources'
+ pom: 'aai-resources/pom.xml'
+ mvn-options: 'docker:build'
+ - 'aai_search-data-service':
+ repo: 'aai/search-data-service'
+ mvn-options: 'docker:build'
+ - 'aai_sparky-be':
+ repo: 'aai/sparky-be'
+ mvn-options: 'docker:build'
+ - 'aai_traversal_aai-traversal':
+ repo: 'aai/traversal'
+ pom: 'aai-traversal/pom.xml'
+ mvn-options: 'docker:build'
+ - 'appc':
+ repo: 'appc/deployment'
+ mvn-options: '-P docker'
+ - 'dcae_dcae-controller':
+ repo: 'dcae/demo'
+ shell-cmd: 'bash dcae-demo-controller/src/main/docker-build/build.sh'
+ - 'dcae_dcae-dmaapbc':
+ repo: 'dcae/dmaapbc'
+ shell-cmd: 'docker build -f ./Dockerfile .'
+ - 'dcae_dcae-inventory':
+ repo: 'dcae/dcae-inventory'
+ mvn-options: 'docker:build'
+ - 'mso':
+ repo: 'mso'
+ pom: 'packages/docker/pom.xml'
+ mvn-options: '-P docker'
+ - 'policy':
+ repo: 'policy/docker'
+ shell-cmd: './docker_build.sh'
+ - 'portal_ecomp-portal-widget-ms':
+ repo: 'portal'
+ pom: 'ecomp-portal-widget-ms/pom.xml'
+ mvn-options: 'docker:build'
+ - 'sdc':
+ repo: 'sdc'
+ pom: 'pom.xml'
+ mvn-options: '-P docker'
+ - 'sdnc':
+ repo: 'sdnc/oam'
+ mvn-options: '-P docker'
+ - 'vid':
+ repo: 'vid'
+ mvn-options: '-P docker'
- project:
name: onap-docker-without-pom
jobs:
- - 'docker-without-pom'
+ - 'docker-without-pom'
project:
- - 'dcae_dcae-orch-dispatcher':
- repo: 'dcae/orch-dispatcher'
- shell-cmd: 'docker build -f ./Dockerfile .'
- - 'portal':
- repo: 'portal'
- shell-cmd: 'git -C ecompsdkos pull || git clone $GIT_NO_PROJECT/ecompsdkos; git -C dmaapbc pull || git clone $GIT_NO_PROJECT/ui/dmaapbc dmaapbc; cd deliveries; chmod +x *.sh; ./run.sh'
+ - 'dcae_dcae-orch-dispatcher':
+ repo: 'dcae/orch-dispatcher'
+ shell-cmd: 'docker build -f ./Dockerfile .'
+ - 'portal':
+ repo: 'portal'
+ shell-cmd: 'git -C ecompsdkos pull || git clone $GIT_NO_PROJECT/ecompsdkos; git -C dmaapbc pull || git clone $GIT_NO_PROJECT/ui/dmaapbc dmaapbc; cd deliveries; chmod +x *.sh; ./run.sh'
diff --git a/bootstrap/jenkins/vagrant/jjb/java-jobs.yaml b/bootstrap/jenkins/vagrant/jjb/java-jobs.yaml
index 0cbaf5e5f..c75c69bbe 100644
--- a/bootstrap/jenkins/vagrant/jjb/java-jobs.yaml
+++ b/bootstrap/jenkins/vagrant/jjb/java-jobs.yaml
@@ -1,461 +1,461 @@
- project:
name: onap-java
jobs:
- - 'java-{project}'
+ - 'java-{project}'
project:
- - 'aaf_authz':
- repo: 'aaf/authz'
- pom: 'pom.xml'
- - 'aaf_cadi':
- repo: 'aaf/cadi'
- pom: 'pom.xml'
- - 'aaf_inno':
- repo: 'aaf/inno'
- pom: 'pom.xml'
- - 'aaf_luaplugin':
- repo: 'aaf/luaplugin'
- pom: 'pom.xml'
- - 'aai_aai-common':
- repo: 'aai/aai-common'
- pom: 'pom.xml'
- - 'aai_aai-service':
- repo: 'aai/aai-service'
- pom: 'pom.xml'
- - 'aai_babel':
- repo: 'aai/babel'
- pom: 'pom.xml'
- - 'aai_champ':
- repo: 'aai/champ'
- pom: 'pom.xml'
- - 'aai_data-router':
- repo: 'aai/data-router'
- pom: 'pom.xml'
- - 'aai_esr-gui':
- repo: 'aai/esr-gui'
- pom: 'pom.xml'
- - 'aai_esr-server':
- repo: 'aai/esr-server'
- pom: 'pom.xml'
- - 'aai_gizmo':
- repo: 'aai/gizmo'
- pom: 'pom.xml'
- - 'aai_logging-service':
- repo: 'aai/logging-service'
- pom: 'pom.xml'
- - 'aai_model-loader':
- repo: 'aai/model-loader'
- pom: 'pom.xml'
- - 'aai_resources':
- repo: 'aai/resources'
- pom: 'pom.xml'
- - 'aai_rest-client':
- repo: 'aai/rest-client'
- pom: 'pom.xml'
- - 'aai_router-core':
- repo: 'aai/router-core'
- pom: 'pom.xml'
- - 'aai_search-data-service':
- repo: 'aai/search-data-service'
- pom: 'pom.xml'
- - 'aai_sparky-be':
- repo: 'aai/sparky-be'
- pom: 'pom.xml'
- - 'aai_sparky-fe':
- repo: 'aai/sparky-fe'
- pom: 'pom.xml'
- - 'aai_traversal':
- repo: 'aai/traversal'
- pom: 'pom.xml'
- - 'appc':
- repo: 'appc'
- pom: 'pom.xml'
- - 'appc_deployment':
- repo: 'appc/deployment'
- pom: 'pom.xml'
- - 'ccsdk_dashboard':
- repo: 'ccsdk/dashboard'
- pom: 'pom.xml'
- - 'ccsdk_distribution':
- repo: 'ccsdk/distribution'
- pom: 'pom.xml'
- - 'ccsdk_parent':
- repo: 'ccsdk/parent'
- pom: 'pom.xml'
- - 'ccsdk_platform_blueprints':
- repo: 'ccsdk/platform/blueprints'
- pom: 'pom.xml'
- - 'ccsdk_platform_nbapi':
- repo: 'ccsdk/platform/nbapi'
- pom: 'pom.xml'
- - 'ccsdk_platform_plugins':
- repo: 'ccsdk/platform/plugins'
- pom: 'pom.xml'
- - 'ccsdk_sli_adaptors':
- repo: 'ccsdk/sli/adaptors'
- pom: 'pom.xml'
- - 'ccsdk_sli_core':
- repo: 'ccsdk/sli/core'
- pom: 'pom.xml'
- - 'ccsdk_sli_northbound':
- repo: 'ccsdk/sli/northbound'
- pom: 'pom.xml'
- - 'ccsdk_sli_plugins':
- repo: 'ccsdk/sli/plugins'
- pom: 'pom.xml'
- - 'ccsdk_storage_esaas':
- repo: 'ccsdk/storage/esaas'
- pom: 'pom.xml'
- - 'ccsdk_storage_pgaas':
- repo: 'ccsdk/storage/pgaas'
- pom: 'pom.xml'
- - 'ccsdk_utils':
- repo: 'ccsdk/utils'
- pom: 'pom.xml'
- - 'clamp':
- repo: 'clamp'
- pom: 'pom.xml'
- - 'cli':
- repo: 'cli'
- pom: 'pom.xml'
- - 'dcae_apod_analytics':
- repo: 'dcae/apod/analytics'
- pom: 'pom.xml'
- - 'dcae_collectors_ves':
- repo: 'dcae/collectors/ves'
- pom: 'pom.xml'
- - 'dcae_controller':
- repo: 'dcae/controller'
- pom: 'pom.xml'
- - 'dcae_controller_analytics':
- repo: 'dcae/controller/analytics'
- pom: 'pom.xml'
- - 'dcae_dcae-inventory':
- repo: 'dcae/dcae-inventory'
- pom: 'pom.xml'
- - 'dcae_demo':
- repo: 'dcae/demo'
- pom: 'pom.xml'
- - 'dcae_dmaapbc':
- repo: 'dcae/dmaapbc'
- pom: 'pom.xml'
- - 'dcae_operation_utils':
- repo: 'dcae/operation/utils'
- pom: 'pom.xml'
- - 'dcae_pgaas_cdf_src_cdf-prop-value_cdf-prop-value':
- repo: 'dcae/pgaas'
- pom: 'cdf/src/cdf-prop-value/cdf-prop-value/pom.xml'
- - 'dcae_pgaas_cdf_src_cdf-prop-value_cdf-util':
- repo: 'dcae/pgaas'
- pom: 'cdf/src/cdf-prop-value/cdf-util/pom.xml'
- - 'dcaegen2':
- repo: 'dcaegen2'
- pom: 'pom.xml'
- - 'dcaegen2_analytics':
- repo: 'dcaegen2/analytics'
- pom: 'pom.xml'
- - 'dcaegen2_analytics_tca':
- repo: 'dcaegen2/analytics/tca'
- pom: 'pom.xml'
- - 'dcaegen2_collectors':
- repo: 'dcaegen2/collectors'
- pom: 'pom.xml'
- - 'dcaegen2_collectors_snmptrap':
- repo: 'dcaegen2/collectors/snmptrap'
- pom: 'pom.xml'
- - 'dcaegen2_collectors_ves':
- repo: 'dcaegen2/collectors/ves'
- pom: 'pom.xml'
- - 'dcaegen2_deployments':
- repo: 'dcaegen2/deployments'
- pom: 'pom.xml'
- - 'dcaegen2_platform':
- repo: 'dcaegen2/platform'
- pom: 'pom.xml'
- - 'dcaegen2_platform_blueprints':
- repo: 'dcaegen2/platform/blueprints'
- pom: 'pom.xml'
- - 'dcaegen2_platform_cdapbroker':
- repo: 'dcaegen2/platform/cdapbroker'
- pom: 'pom.xml'
- - 'dcaegen2_platform_cli':
- repo: 'dcaegen2/platform/cli'
- pom: 'pom.xml'
- - 'dcaegen2_platform_configbinding':
- repo: 'dcaegen2/platform/configbinding'
- pom: 'pom.xml'
- - 'dcaegen2_platform_deployment-handler':
- repo: 'dcaegen2/platform/deployment-handler'
- pom: 'pom.xml'
- - 'dcaegen2_platform_inventory-api':
- repo: 'dcaegen2/platform/inventory-api'
- pom: 'pom.xml'
- - 'dcaegen2_platform_plugins':
- repo: 'dcaegen2/platform/plugins'
- pom: 'pom.xml'
- - 'dcaegen2_platform_policy-handler':
- repo: 'dcaegen2/platform/policy-handler'
- pom: 'pom.xml'
- - 'dcaegen2_platform_servicechange-handler':
- repo: 'dcaegen2/platform/servicechange-handler'
- pom: 'pom.xml'
- - 'dcaegen2_utils':
- repo: 'dcaegen2/utils'
- pom: 'pom.xml'
- - 'demo':
- repo: 'demo'
- pom: 'pom.xml'
- - 'dmaap_buscontroller':
- repo: 'dmaap/buscontroller'
- pom: 'pom.xml'
- - 'dmaap_datarouter':
- repo: 'dmaap/datarouter'
- pom: 'pom.xml'
- - 'dmaap_dbcapi':
- repo: 'dmaap/dbcapi'
- pom: 'pom.xml'
- - 'dmaap_messagerouter_dmaapclient':
- repo: 'dmaap/messagerouter/dmaapclient'
- pom: 'pom.xml'
- - 'dmaap_messagerouter_messageservice':
- repo: 'dmaap/messagerouter/messageservice'
- pom: 'pom.xml'
- - 'dmaap_messagerouter_mirroragent':
- repo: 'dmaap/messagerouter/mirroragent'
- pom: 'pom.xml'
- - 'dmaap_messagerouter_msgrtr':
- repo: 'dmaap/messagerouter/msgrtr'
- pom: 'pom.xml'
- - 'ecompsdkos_ecomp-sdk':
- repo: 'ecompsdkos'
- pom: 'ecomp-sdk/pom.xml'
- - 'holmes_common':
- repo: 'holmes/common'
- pom: 'pom.xml'
- - 'holmes_dsa':
- repo: 'holmes/dsa'
- pom: 'pom.xml'
- - 'holmes_engine-management':
- repo: 'holmes/engine-management'
- pom: 'pom.xml'
- - 'holmes_rule-management':
- repo: 'holmes/rule-management'
- pom: 'pom.xml'
- - 'integration_test_mocks_sniroemulator':
- repo: 'integration'
- pom: 'test/mocks/sniroemulator/pom.xml'
- - 'integration_version-manifest':
- repo: 'integration'
- pom: 'version-manifest/pom.xml'
- - 'modeling_toscaparsers_javatoscachecker':
- repo: 'modeling/toscaparsers'
- pom: 'javatoscachecker/pom.xml'
- - 'modeling_toscaparsers_nfvparser':
- repo: 'modeling/toscaparsers'
- pom: 'nfvparser/pom.xml'
- - 'msb_apigateway':
- repo: 'msb/apigateway'
- pom: 'pom.xml'
- - 'msb_discovery':
- repo: 'msb/discovery'
- pom: 'pom.xml'
- - 'msb_java-sdk':
- repo: 'msb/java-sdk'
- pom: 'pom.xml'
- - 'msb_swagger-sdk':
- repo: 'msb/swagger-sdk'
- pom: 'pom.xml'
- - 'mso':
- repo: 'mso'
- pom: 'pom.xml'
- - 'mso_libs':
- repo: 'mso/libs'
- pom: 'pom.xml'
- - 'multicloud_framework':
- repo: 'multicloud/framework'
- pom: 'pom.xml'
- - 'multicloud_openstack':
- repo: 'multicloud/openstack'
- pom: 'pom.xml'
- - 'multicloud_openstack_vmware':
- repo: 'multicloud/openstack/vmware'
- pom: 'pom.xml'
- - 'multicloud_openstack_windriver':
- repo: 'multicloud/openstack/windriver'
- pom: 'pom.xml'
- - 'ncomp_cdap':
- repo: 'ncomp/cdap'
- pom: 'pom.xml'
- - 'ncomp_core':
- repo: 'ncomp/core'
- pom: 'pom.xml'
- - 'ncomp_docker':
- repo: 'ncomp/docker'
- pom: 'pom.xml'
- - 'ncomp_maven':
- repo: 'ncomp/maven'
- pom: 'pom.xml'
- - 'ncomp_openstack':
- repo: 'ncomp/openstack'
- pom: 'pom.xml'
- - 'ncomp_sirius_manager':
- repo: 'ncomp/sirius/manager'
- pom: 'pom.xml'
- - 'ncomp_utils':
- repo: 'ncomp/utils'
- pom: 'pom.xml'
- - 'oom_registrator':
- repo: 'oom/registrator'
- pom: 'pom.xml'
- - 'oparent':
- repo: 'oparent'
- pom: 'pom.xml'
- - 'policy_api':
- repo: 'policy/api'
- pom: 'pom.xml'
- - 'policy_common':
- repo: 'policy/common'
- pom: 'pom.xml'
- - 'policy_docker':
- repo: 'policy/docker'
- pom: 'pom.xml'
- - 'policy_drools-applications':
- repo: 'policy/drools-applications'
- pom: 'pom.xml'
- - 'policy_drools-pdp':
- repo: 'policy/drools-pdp'
- pom: 'pom.xml'
- - 'policy_engine':
- repo: 'policy/engine'
- pom: 'pom.xml'
- - 'policy_gui':
- repo: 'policy/gui'
- pom: 'pom.xml'
- - 'policy_pap':
- repo: 'policy/pap'
- pom: 'pom.xml'
- - 'policy_pdp':
- repo: 'policy/pdp'
- pom: 'pom.xml'
- - 'portal':
- repo: 'portal'
- pom: 'pom.xml'
- - 'portal_sdk_ecomp-sdk':
- repo: 'portal/sdk'
- pom: 'ecomp-sdk/pom.xml'
- - 'sdc':
- repo: 'sdc'
- pom: 'pom.xml'
- - 'sdc_jtosca':
- repo: 'sdc/jtosca'
- pom: 'pom.xml'
- - 'sdc_sdc-distribution-client':
- repo: 'sdc/sdc-distribution-client'
- pom: 'pom.xml'
- - 'sdc_sdc-docker-base':
- repo: 'sdc/sdc-docker-base'
- pom: 'pom.xml'
- - 'sdc_sdc-titan-cassandra':
- repo: 'sdc/sdc-titan-cassandra'
- pom: 'pom.xml'
- - 'sdc_sdc-tosca':
- repo: 'sdc/sdc-tosca'
- pom: 'pom.xml'
- - 'sdc_sdc-workflow-designer':
- repo: 'sdc/sdc-workflow-designer'
- pom: 'pom.xml'
- - 'sdnc_adaptors':
- repo: 'sdnc/adaptors'
- pom: 'pom.xml'
- - 'sdnc_core':
- repo: 'sdnc/core'
- pom: 'pom.xml'
- - 'sdnc_northbound':
- repo: 'sdnc/northbound'
- pom: 'pom.xml'
- - 'sdnc_oam':
- repo: 'sdnc/oam'
- pom: 'pom.xml'
- - 'sdnc_plugins':
- repo: 'sdnc/plugins'
- pom: 'pom.xml'
- - 'so':
- repo: 'so'
- pom: 'pom.xml'
- - 'so_libs':
- repo: 'so/libs'
- pom: 'pom.xml'
- - 'testsuite_heatbridge':
- repo: 'testsuite/heatbridge'
- pom: 'pom.xml'
- - 'ui_dmaapbc':
- repo: 'ui/dmaapbc'
- pom: 'pom.xml'
- - 'usecase-ui':
- repo: 'usecase-ui'
- pom: 'pom.xml'
- - 'usecase-ui_server':
- repo: 'usecase-ui/server'
- pom: 'pom.xml'
- - 'vfc_gvnfm_vnflcm':
- repo: 'vfc/gvnfm/vnflcm'
- pom: 'pom.xml'
- - 'vfc_gvnfm_vnfmgr':
- repo: 'vfc/gvnfm/vnfmgr'
- pom: 'pom.xml'
- - 'vfc_gvnfm_vnfres':
- repo: 'vfc/gvnfm/vnfres'
- pom: 'pom.xml'
- - 'vfc_nfvo_catalog':
- repo: 'vfc/nfvo/catalog'
- pom: 'pom.xml'
- - 'vfc_nfvo_driver_ems':
- repo: 'vfc/nfvo/driver/ems'
- pom: 'pom.xml'
- - 'vfc_nfvo_driver_sfc':
- repo: 'vfc/nfvo/driver/sfc'
- pom: 'pom.xml'
- - 'vfc_nfvo_driver_vnfm_gvnfm':
- repo: 'vfc/nfvo/driver/vnfm/gvnfm'
- pom: 'pom.xml'
- - 'vfc_nfvo_driver_vnfm_svnfm':
- repo: 'vfc/nfvo/driver/vnfm/svnfm'
- pom: 'pom.xml'
- - 'vfc_nfvo_lcm':
- repo: 'vfc/nfvo/lcm'
- pom: 'pom.xml'
- - 'vfc_nfvo_resmanagement':
- repo: 'vfc/nfvo/resmanagement'
- pom: 'pom.xml'
- - 'vfc_nfvo_wfengine':
- repo: 'vfc/nfvo/wfengine'
- pom: 'pom.xml'
- - 'vid':
- repo: 'vid'
- pom: 'pom.xml'
- - 'vid_asdcclient':
- repo: 'vid/asdcclient'
- pom: 'pom.xml'
- - 'vnfsdk_compliance_veslibrary_ves_clibrary_VESreporting_vFW':
- repo: 'vnfsdk/compliance'
- pom: 'veslibrary/ves_clibrary/VESreporting_vFW/pom.xml'
- - 'vnfsdk_compliance_veslibrary_ves_clibrary_VESreporting_vLB':
- repo: 'vnfsdk/compliance'
- pom: 'veslibrary/ves_clibrary/VESreporting_vLB/pom.xml'
- - 'vnfsdk_compliance_veslibrary_ves_javalibrary_evel_javalib2':
- repo: 'vnfsdk/compliance'
- pom: 'veslibrary/ves_javalibrary/evel_javalib2/pom.xml'
- - 'vnfsdk_functest':
- repo: 'vnfsdk/functest'
- pom: 'pom.xml'
- - 'vnfsdk_lctest':
- repo: 'vnfsdk/lctest'
- pom: 'pom.xml'
- - 'vnfsdk_pkgtools':
- repo: 'vnfsdk/pkgtools'
- pom: 'pom.xml'
- - 'vnfsdk_refrepo':
- repo: 'vnfsdk/refrepo'
- pom: 'pom.xml'
- - 'vnfsdk_validation':
- repo: 'vnfsdk/validation'
- pom: 'pom.xml'
+ - 'aaf_authz':
+ repo: 'aaf/authz'
+ pom: 'pom.xml'
+ - 'aaf_cadi':
+ repo: 'aaf/cadi'
+ pom: 'pom.xml'
+ - 'aaf_inno':
+ repo: 'aaf/inno'
+ pom: 'pom.xml'
+ - 'aaf_luaplugin':
+ repo: 'aaf/luaplugin'
+ pom: 'pom.xml'
+ - 'aai_aai-common':
+ repo: 'aai/aai-common'
+ pom: 'pom.xml'
+ - 'aai_aai-service':
+ repo: 'aai/aai-service'
+ pom: 'pom.xml'
+ - 'aai_babel':
+ repo: 'aai/babel'
+ pom: 'pom.xml'
+ - 'aai_champ':
+ repo: 'aai/champ'
+ pom: 'pom.xml'
+ - 'aai_data-router':
+ repo: 'aai/data-router'
+ pom: 'pom.xml'
+ - 'aai_esr-gui':
+ repo: 'aai/esr-gui'
+ pom: 'pom.xml'
+ - 'aai_esr-server':
+ repo: 'aai/esr-server'
+ pom: 'pom.xml'
+ - 'aai_gizmo':
+ repo: 'aai/gizmo'
+ pom: 'pom.xml'
+ - 'aai_logging-service':
+ repo: 'aai/logging-service'
+ pom: 'pom.xml'
+ - 'aai_model-loader':
+ repo: 'aai/model-loader'
+ pom: 'pom.xml'
+ - 'aai_resources':
+ repo: 'aai/resources'
+ pom: 'pom.xml'
+ - 'aai_rest-client':
+ repo: 'aai/rest-client'
+ pom: 'pom.xml'
+ - 'aai_router-core':
+ repo: 'aai/router-core'
+ pom: 'pom.xml'
+ - 'aai_search-data-service':
+ repo: 'aai/search-data-service'
+ pom: 'pom.xml'
+ - 'aai_sparky-be':
+ repo: 'aai/sparky-be'
+ pom: 'pom.xml'
+ - 'aai_sparky-fe':
+ repo: 'aai/sparky-fe'
+ pom: 'pom.xml'
+ - 'aai_traversal':
+ repo: 'aai/traversal'
+ pom: 'pom.xml'
+ - 'appc':
+ repo: 'appc'
+ pom: 'pom.xml'
+ - 'appc_deployment':
+ repo: 'appc/deployment'
+ pom: 'pom.xml'
+ - 'ccsdk_dashboard':
+ repo: 'ccsdk/dashboard'
+ pom: 'pom.xml'
+ - 'ccsdk_distribution':
+ repo: 'ccsdk/distribution'
+ pom: 'pom.xml'
+ - 'ccsdk_parent':
+ repo: 'ccsdk/parent'
+ pom: 'pom.xml'
+ - 'ccsdk_platform_blueprints':
+ repo: 'ccsdk/platform/blueprints'
+ pom: 'pom.xml'
+ - 'ccsdk_platform_nbapi':
+ repo: 'ccsdk/platform/nbapi'
+ pom: 'pom.xml'
+ - 'ccsdk_platform_plugins':
+ repo: 'ccsdk/platform/plugins'
+ pom: 'pom.xml'
+ - 'ccsdk_sli_adaptors':
+ repo: 'ccsdk/sli/adaptors'
+ pom: 'pom.xml'
+ - 'ccsdk_sli_core':
+ repo: 'ccsdk/sli/core'
+ pom: 'pom.xml'
+ - 'ccsdk_sli_northbound':
+ repo: 'ccsdk/sli/northbound'
+ pom: 'pom.xml'
+ - 'ccsdk_sli_plugins':
+ repo: 'ccsdk/sli/plugins'
+ pom: 'pom.xml'
+ - 'ccsdk_storage_esaas':
+ repo: 'ccsdk/storage/esaas'
+ pom: 'pom.xml'
+ - 'ccsdk_storage_pgaas':
+ repo: 'ccsdk/storage/pgaas'
+ pom: 'pom.xml'
+ - 'ccsdk_utils':
+ repo: 'ccsdk/utils'
+ pom: 'pom.xml'
+ - 'clamp':
+ repo: 'clamp'
+ pom: 'pom.xml'
+ - 'cli':
+ repo: 'cli'
+ pom: 'pom.xml'
+ - 'dcae_apod_analytics':
+ repo: 'dcae/apod/analytics'
+ pom: 'pom.xml'
+ - 'dcae_collectors_ves':
+ repo: 'dcae/collectors/ves'
+ pom: 'pom.xml'
+ - 'dcae_controller':
+ repo: 'dcae/controller'
+ pom: 'pom.xml'
+ - 'dcae_controller_analytics':
+ repo: 'dcae/controller/analytics'
+ pom: 'pom.xml'
+ - 'dcae_dcae-inventory':
+ repo: 'dcae/dcae-inventory'
+ pom: 'pom.xml'
+ - 'dcae_demo':
+ repo: 'dcae/demo'
+ pom: 'pom.xml'
+ - 'dcae_dmaapbc':
+ repo: 'dcae/dmaapbc'
+ pom: 'pom.xml'
+ - 'dcae_operation_utils':
+ repo: 'dcae/operation/utils'
+ pom: 'pom.xml'
+ - 'dcae_pgaas_cdf_src_cdf-prop-value_cdf-prop-value':
+ repo: 'dcae/pgaas'
+ pom: 'cdf/src/cdf-prop-value/cdf-prop-value/pom.xml'
+ - 'dcae_pgaas_cdf_src_cdf-prop-value_cdf-util':
+ repo: 'dcae/pgaas'
+ pom: 'cdf/src/cdf-prop-value/cdf-util/pom.xml'
+ - 'dcaegen2':
+ repo: 'dcaegen2'
+ pom: 'pom.xml'
+ - 'dcaegen2_analytics':
+ repo: 'dcaegen2/analytics'
+ pom: 'pom.xml'
+ - 'dcaegen2_analytics_tca':
+ repo: 'dcaegen2/analytics/tca'
+ pom: 'pom.xml'
+ - 'dcaegen2_collectors':
+ repo: 'dcaegen2/collectors'
+ pom: 'pom.xml'
+ - 'dcaegen2_collectors_snmptrap':
+ repo: 'dcaegen2/collectors/snmptrap'
+ pom: 'pom.xml'
+ - 'dcaegen2_collectors_ves':
+ repo: 'dcaegen2/collectors/ves'
+ pom: 'pom.xml'
+ - 'dcaegen2_deployments':
+ repo: 'dcaegen2/deployments'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform':
+ repo: 'dcaegen2/platform'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_blueprints':
+ repo: 'dcaegen2/platform/blueprints'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_cdapbroker':
+ repo: 'dcaegen2/platform/cdapbroker'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_cli':
+ repo: 'dcaegen2/platform/cli'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_configbinding':
+ repo: 'dcaegen2/platform/configbinding'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_deployment-handler':
+ repo: 'dcaegen2/platform/deployment-handler'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_inventory-api':
+ repo: 'dcaegen2/platform/inventory-api'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_plugins':
+ repo: 'dcaegen2/platform/plugins'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_policy-handler':
+ repo: 'dcaegen2/platform/policy-handler'
+ pom: 'pom.xml'
+ - 'dcaegen2_platform_servicechange-handler':
+ repo: 'dcaegen2/platform/servicechange-handler'
+ pom: 'pom.xml'
+ - 'dcaegen2_utils':
+ repo: 'dcaegen2/utils'
+ pom: 'pom.xml'
+ - 'demo':
+ repo: 'demo'
+ pom: 'pom.xml'
+ - 'dmaap_buscontroller':
+ repo: 'dmaap/buscontroller'
+ pom: 'pom.xml'
+ - 'dmaap_datarouter':
+ repo: 'dmaap/datarouter'
+ pom: 'pom.xml'
+ - 'dmaap_dbcapi':
+ repo: 'dmaap/dbcapi'
+ pom: 'pom.xml'
+ - 'dmaap_messagerouter_dmaapclient':
+ repo: 'dmaap/messagerouter/dmaapclient'
+ pom: 'pom.xml'
+ - 'dmaap_messagerouter_messageservice':
+ repo: 'dmaap/messagerouter/messageservice'
+ pom: 'pom.xml'
+ - 'dmaap_messagerouter_mirroragent':
+ repo: 'dmaap/messagerouter/mirroragent'
+ pom: 'pom.xml'
+ - 'dmaap_messagerouter_msgrtr':
+ repo: 'dmaap/messagerouter/msgrtr'
+ pom: 'pom.xml'
+ - 'ecompsdkos_ecomp-sdk':
+ repo: 'ecompsdkos'
+ pom: 'ecomp-sdk/pom.xml'
+ - 'holmes_common':
+ repo: 'holmes/common'
+ pom: 'pom.xml'
+ - 'holmes_dsa':
+ repo: 'holmes/dsa'
+ pom: 'pom.xml'
+ - 'holmes_engine-management':
+ repo: 'holmes/engine-management'
+ pom: 'pom.xml'
+ - 'holmes_rule-management':
+ repo: 'holmes/rule-management'
+ pom: 'pom.xml'
+ - 'integration_test_mocks_sniroemulator':
+ repo: 'integration'
+ pom: 'test/mocks/sniroemulator/pom.xml'
+ - 'integration_version-manifest':
+ repo: 'integration'
+ pom: 'version-manifest/pom.xml'
+ - 'modeling_toscaparsers_javatoscachecker':
+ repo: 'modeling/toscaparsers'
+ pom: 'javatoscachecker/pom.xml'
+ - 'modeling_toscaparsers_nfvparser':
+ repo: 'modeling/toscaparsers'
+ pom: 'nfvparser/pom.xml'
+ - 'msb_apigateway':
+ repo: 'msb/apigateway'
+ pom: 'pom.xml'
+ - 'msb_discovery':
+ repo: 'msb/discovery'
+ pom: 'pom.xml'
+ - 'msb_java-sdk':
+ repo: 'msb/java-sdk'
+ pom: 'pom.xml'
+ - 'msb_swagger-sdk':
+ repo: 'msb/swagger-sdk'
+ pom: 'pom.xml'
+ - 'mso':
+ repo: 'mso'
+ pom: 'pom.xml'
+ - 'mso_libs':
+ repo: 'mso/libs'
+ pom: 'pom.xml'
+ - 'multicloud_framework':
+ repo: 'multicloud/framework'
+ pom: 'pom.xml'
+ - 'multicloud_openstack':
+ repo: 'multicloud/openstack'
+ pom: 'pom.xml'
+ - 'multicloud_openstack_vmware':
+ repo: 'multicloud/openstack/vmware'
+ pom: 'pom.xml'
+ - 'multicloud_openstack_windriver':
+ repo: 'multicloud/openstack/windriver'
+ pom: 'pom.xml'
+ - 'ncomp_cdap':
+ repo: 'ncomp/cdap'
+ pom: 'pom.xml'
+ - 'ncomp_core':
+ repo: 'ncomp/core'
+ pom: 'pom.xml'
+ - 'ncomp_docker':
+ repo: 'ncomp/docker'
+ pom: 'pom.xml'
+ - 'ncomp_maven':
+ repo: 'ncomp/maven'
+ pom: 'pom.xml'
+ - 'ncomp_openstack':
+ repo: 'ncomp/openstack'
+ pom: 'pom.xml'
+ - 'ncomp_sirius_manager':
+ repo: 'ncomp/sirius/manager'
+ pom: 'pom.xml'
+ - 'ncomp_utils':
+ repo: 'ncomp/utils'
+ pom: 'pom.xml'
+ - 'oom_registrator':
+ repo: 'oom/registrator'
+ pom: 'pom.xml'
+ - 'oparent':
+ repo: 'oparent'
+ pom: 'pom.xml'
+ - 'policy_api':
+ repo: 'policy/api'
+ pom: 'pom.xml'
+ - 'policy_common':
+ repo: 'policy/common'
+ pom: 'pom.xml'
+ - 'policy_docker':
+ repo: 'policy/docker'
+ pom: 'pom.xml'
+ - 'policy_drools-applications':
+ repo: 'policy/drools-applications'
+ pom: 'pom.xml'
+ - 'policy_drools-pdp':
+ repo: 'policy/drools-pdp'
+ pom: 'pom.xml'
+ - 'policy_engine':
+ repo: 'policy/engine'
+ pom: 'pom.xml'
+ - 'policy_gui':
+ repo: 'policy/gui'
+ pom: 'pom.xml'
+ - 'policy_pap':
+ repo: 'policy/pap'
+ pom: 'pom.xml'
+ - 'policy_pdp':
+ repo: 'policy/pdp'
+ pom: 'pom.xml'
+ - 'portal':
+ repo: 'portal'
+ pom: 'pom.xml'
+ - 'portal_sdk_ecomp-sdk':
+ repo: 'portal/sdk'
+ pom: 'ecomp-sdk/pom.xml'
+ - 'sdc':
+ repo: 'sdc'
+ pom: 'pom.xml'
+ - 'sdc_jtosca':
+ repo: 'sdc/jtosca'
+ pom: 'pom.xml'
+ - 'sdc_sdc-distribution-client':
+ repo: 'sdc/sdc-distribution-client'
+ pom: 'pom.xml'
+ - 'sdc_sdc-docker-base':
+ repo: 'sdc/sdc-docker-base'
+ pom: 'pom.xml'
+ - 'sdc_sdc-titan-cassandra':
+ repo: 'sdc/sdc-titan-cassandra'
+ pom: 'pom.xml'
+ - 'sdc_sdc-tosca':
+ repo: 'sdc/sdc-tosca'
+ pom: 'pom.xml'
+ - 'sdc_sdc-workflow-designer':
+ repo: 'sdc/sdc-workflow-designer'
+ pom: 'pom.xml'
+ - 'sdnc_adaptors':
+ repo: 'sdnc/adaptors'
+ pom: 'pom.xml'
+ - 'sdnc_core':
+ repo: 'sdnc/core'
+ pom: 'pom.xml'
+ - 'sdnc_northbound':
+ repo: 'sdnc/northbound'
+ pom: 'pom.xml'
+ - 'sdnc_oam':
+ repo: 'sdnc/oam'
+ pom: 'pom.xml'
+ - 'sdnc_plugins':
+ repo: 'sdnc/plugins'
+ pom: 'pom.xml'
+ - 'so':
+ repo: 'so'
+ pom: 'pom.xml'
+ - 'so_libs':
+ repo: 'so/libs'
+ pom: 'pom.xml'
+ - 'testsuite_heatbridge':
+ repo: 'testsuite/heatbridge'
+ pom: 'pom.xml'
+ - 'ui_dmaapbc':
+ repo: 'ui/dmaapbc'
+ pom: 'pom.xml'
+ - 'usecase-ui':
+ repo: 'usecase-ui'
+ pom: 'pom.xml'
+ - 'usecase-ui_server':
+ repo: 'usecase-ui/server'
+ pom: 'pom.xml'
+ - 'vfc_gvnfm_vnflcm':
+ repo: 'vfc/gvnfm/vnflcm'
+ pom: 'pom.xml'
+ - 'vfc_gvnfm_vnfmgr':
+ repo: 'vfc/gvnfm/vnfmgr'
+ pom: 'pom.xml'
+ - 'vfc_gvnfm_vnfres':
+ repo: 'vfc/gvnfm/vnfres'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_catalog':
+ repo: 'vfc/nfvo/catalog'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_driver_ems':
+ repo: 'vfc/nfvo/driver/ems'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_driver_sfc':
+ repo: 'vfc/nfvo/driver/sfc'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_driver_vnfm_gvnfm':
+ repo: 'vfc/nfvo/driver/vnfm/gvnfm'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_driver_vnfm_svnfm':
+ repo: 'vfc/nfvo/driver/vnfm/svnfm'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_lcm':
+ repo: 'vfc/nfvo/lcm'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_resmanagement':
+ repo: 'vfc/nfvo/resmanagement'
+ pom: 'pom.xml'
+ - 'vfc_nfvo_wfengine':
+ repo: 'vfc/nfvo/wfengine'
+ pom: 'pom.xml'
+ - 'vid':
+ repo: 'vid'
+ pom: 'pom.xml'
+ - 'vid_asdcclient':
+ repo: 'vid/asdcclient'
+ pom: 'pom.xml'
+ - 'vnfsdk_compliance_veslibrary_ves_clibrary_VESreporting_vFW':
+ repo: 'vnfsdk/compliance'
+ pom: 'veslibrary/ves_clibrary/VESreporting_vFW/pom.xml'
+ - 'vnfsdk_compliance_veslibrary_ves_clibrary_VESreporting_vLB':
+ repo: 'vnfsdk/compliance'
+ pom: 'veslibrary/ves_clibrary/VESreporting_vLB/pom.xml'
+ - 'vnfsdk_compliance_veslibrary_ves_javalibrary_evel_javalib2':
+ repo: 'vnfsdk/compliance'
+ pom: 'veslibrary/ves_javalibrary/evel_javalib2/pom.xml'
+ - 'vnfsdk_functest':
+ repo: 'vnfsdk/functest'
+ pom: 'pom.xml'
+ - 'vnfsdk_lctest':
+ repo: 'vnfsdk/lctest'
+ pom: 'pom.xml'
+ - 'vnfsdk_pkgtools':
+ repo: 'vnfsdk/pkgtools'
+ pom: 'pom.xml'
+ - 'vnfsdk_refrepo':
+ repo: 'vnfsdk/refrepo'
+ pom: 'pom.xml'
+ - 'vnfsdk_validation':
+ repo: 'vnfsdk/validation'
+ pom: 'pom.xml'
diff --git a/bootstrap/vagrant-minimal-onap/HACKING.rst b/bootstrap/vagrant-minimal-onap/HACKING.rst
new file mode 100644
index 000000000..dcac46586
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/HACKING.rst
@@ -0,0 +1,20 @@
+=========================
+ Development environment
+=========================
+
+Modifications made within this environment focus mainly on adjusting environment and override files
+[#]_ located in `oom` repository. This repository is cloned to the Operator's machine and used
+during initial provisioning. Editing said files on the host machine (instead of using remote editor
+within `vagrant ssh operator` or Emacs TRAMP) requires synchronizing them from guest (operator) to
+host using reverse_ SSHFS [#]_.
+
+When Operator's machine is up, repository content is available in `./oom` directory on the host. It
+vanishes if machine is halted, but then it is no longer relevant.
+
+.. [#] Used by `helm deploy` command
+.. [#] Other mechanisms_ considered: rsync (unidirectional, synchronized on machine reload) and NFS
+ (requires privilege_ elevation to edit host configuration files for synchronization)
+
+.. _reverse: https://github.com/dustymabe/vagrant-sshfs#options-specific-to-reverse-mounting-guesthost-mount
+.. _mechanisms: https://github.com/vagrant-libvirt/vagrant-libvirt#synced-folders
+.. _privilege: https://www.vagrantup.com/docs/synced-folders/nfs.html
diff --git a/bootstrap/vagrant-minimal-onap/README.rst b/bootstrap/vagrant-minimal-onap/README.rst
new file mode 100644
index 000000000..8a9850d4e
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/README.rst
@@ -0,0 +1,137 @@
+=====================================================
+ ONAP Integration > Bootstrap > Vagrant Minimal ONAP
+=====================================================
+
+This directory contains a set of Vagrant scripts that will automatically set up:
+
+- Devstack,
+- RKE-based Kubernetes cluster (single control plane node and single worker plane node),
+- Operator's machine with configured tools (kubectl, helm).
+
+This is intended to show a beginning ONAP operator how to set up and configure an environment that
+can successfully deploy minimal ONAP instance from scratch. Its main purpose are ONAP demos and
+proofs of concepts. It is not intended to be used as a production ONAP environment.
+
+NOTE: the Devstack instance is NOT SECURED, with default credentials:
+
++-------+----------------+
+| User | Password |
++-------+----------------+
+| admin | default123456! |
++-------+----------------+
+| demo | default123456! |
++-------+----------------+
+
+
+Quickstart
+----------
+
+Following set of commands can be used to prepare a machine running Ubuntu 18.04 for this setup:
+
+.. code-block:: sh
+
+ sudo sed -i'.bak' 's/^#.*deb-src/deb-src/' /etc/apt/sources.list
+ sudo apt-get update
+ sudo apt-get build-dep vagrant ruby-libvirt
+ sudo apt-get install qemu libvirt-bin ebtables dnsmasq-base
+ sudo apt-get install libxslt-dev libxml2-dev libvirt-dev zlib1g-dev ruby-dev
+
+ sudo apt-get install sshfs
+
+ wget https://releases.hashicorp.com/vagrant/2.2.7/vagrant_2.2.7_x86_64.deb
+ sudo dpkg -i vagrant_2.2.7_x86_64.deb
+
+ vagrant plugin install vagrant-libvirt
+ vagrant plugin install vagrant-sshfs
+
+ sudo mv /etc/apt/sources.list{.bak,}
+ rm vagrant_2.2.7_x86_64.deb
+
+
+Requirements
+------------
+
++-------------+-----+-------+---------+
+| Machine | CPU | RAM | Storage |
++-------------+-----+-------+---------+
+| Operator | 1 | 1GiB | 32GiB |
++-------------+-----+-------+---------+
+| Devstack | 1 | 4GiB | 32GiB |
++-------------+-----+-------+---------+
+| K8s Control | 1 | 1GiB | 32GiB |
++-------------+-----+-------+---------+
+| K8s Worker | 8 | 64GiB | 64GiB |
++-------------+-----+-------+---------+
+| TOTAL | 11 | 70GiB | 160GiB |
++-------------+-----+-------+---------+
+
+Table above is based on current experience and may be subject to change.
+
+
+Prerequisites
+-------------
+
+Virtualisation provider
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Environment has been tested using libvirt_ provider with vagrant-libvirt_ plugin. Plugin
+documentation provides detailed `installation instructions`_ that will guide through the process.
+
+.. note::
+ Remember to uncomment `deb-src` repositories for `apt-get build-dep` step on Debian/Ubuntu.
+
+.. _libvirt: https://libvirt.org
+.. _vagrant-libvirt: https://github.com/vagrant-libvirt/vagrant-libvirt
+.. _`installation instructions`: https://github.com/vagrant-libvirt/vagrant-libvirt#installation
+
+Virtual machine manager
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Environment has been tested using latest Vagrant_ as of writing this documentation (`v2.2.6`_). Some
+features (e.g. triggers_) might not be supported on older versions.
+
+.. _Vagrant: https://www.vagrantup.com/downloads.html
+.. _`v2.2.6`: https://github.com/hashicorp/vagrant/blob/v2.2.6/CHANGELOG.md#226-october-14-2019
+.. _triggers: https://www.vagrantup.com/docs/triggers/
+
+Synced Folders
+~~~~~~~~~~~~~~
+
+Environment uses reverse-SSHFS-based file synchronization for applying non-upstream changes. This
+requires installing vagrant-sshfs_ plugin and presence of `sshfs` package on the host system.
+
+.. _vagrant-sshfs: https://github.com/dustymabe/vagrant-sshfs#install-plugin
+
+
+Running
+-------
+
+Additional `--provider` flag or setting `VAGRANT_DEFAULT_PROVIDER` environmental variable might be
+useful in case there are multiple providers available.
+
+.. note::
+ Following command should be executed within the directory where `Vagrantfile` is stored
+ (`integration/bootstrap/vagrant-minimal-onap`).
+
+.. code-block:: sh
+
+ vagrant up --provider=libvirt
+
+
+Usage
+-----
+
+Once ready (bringing up machines might initially take some time), tools for cluster management will
+be available on Operator's machine. It can be accessed by executing:
+
+.. code-block:: sh
+
+ vagrant ssh operator
+
+Although appropriate context is set for `kubectl` on login, when interacting with the cluster the
+`onap` namespace has to be explicitly specified. Example:
+
+.. code-block:: sh
+
+ # Operator's machine shell
+ kubectl -nonap get pods
diff --git a/bootstrap/vagrant-minimal-onap/Vagrantfile b/bootstrap/vagrant-minimal-onap/Vagrantfile
new file mode 100644
index 000000000..e0ddafd9d
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/Vagrantfile
@@ -0,0 +1,345 @@
+# -*- mode: ruby -*-
+# -*- coding: utf-8 -*-
+
+host_ip = "192.168.121.1"
+operator_key = "${HOME}/.ssh/onap-key"
+vagrant_user = "vagrant"
+vagrant_password = "vagrant"
+synced_folder_main = "/vagrant"
+synced_folder_config = "#{synced_folder_main}/config"
+synced_folder_tools_config = "#{synced_folder_main}/tools/config"
+os_config = "#{synced_folder_config}/local.conf"
+os_env = "#{synced_folder_config}/dot_env"
+cluster_yml = "cluster.yml"
+apt_prefs_dir = "/etc/apt/apt.conf.d"
+apt_prefs = "95silent-approval"
+helm_timeout = "1800"
+helm_master_password = "vagrant"
+
+vm_memory = 1 * 1024
+vm_memory_os = 4 * 1024
+vm_memory_onap = 64 * 1024
+vm_cpu = 1
+vm_cpus = 8
+vm_box = "generic/ubuntu1804"
+vm_box_op = "generic/ubuntu2004"
+vm_disk = 32
+vm_disk_onap = 64
+vm_storage_pool = "default"
+
+operation = {
+ name: 'operator',
+ hostname: 'operator',
+ ip: '172.17.4.254',
+ box: vm_box_op,
+ cpus: vm_cpu,
+ memory: vm_memory,
+ disk: vm_disk,
+ pool: vm_storage_pool
+}
+devstack = {
+ name: 'devstack',
+ hostname: 'devstack',
+ ip: '172.17.4.200',
+ box: vm_box,
+ cpus: vm_cpu,
+ memory: vm_memory_os,
+ disk: vm_disk,
+ pool: vm_storage_pool
+}
+control = {
+ name: 'control',
+ hostname: 'control',
+ ip: '172.17.4.100',
+ box: vm_box,
+ cpus: vm_cpu,
+ memory: vm_memory,
+ disk: vm_disk,
+ pool: vm_storage_pool
+}
+worker = {
+ name: 'worker',
+ hostname: 'worker',
+ ip: '172.17.4.101',
+ box: vm_box,
+ cpus: vm_cpus,
+ memory: vm_memory_onap,
+ disk: vm_disk_onap,
+ pool: vm_storage_pool
+}
+
+cluster = [] << control << worker
+all = cluster.dup << operation << devstack
+
+operation_post_msg = "Run: \"vagrant provision #{operation[:name]} --provision-with=rke_up,setup_kubectl,setup_helm_cluster,setup_helm_repo,deploy_onap\" to complete ONAP deployment"
+
+$replace_dns = <<-SCRIPT
+ HOST_IP="$1"
+ rm -f /etc/resolv.conf # drop its dynamic management by systemd-resolved
+ echo nameserver "$HOST_IP" | tee /etc/resolv.conf
+SCRIPT
+
+$enable_ipv6 = <<-SCRIPT
+ sed -i'' 's/net.ipv6.conf.all.disable_ipv6.*$/net.ipv6.conf.all.disable_ipv6 = 0/' /etc/sysctl.conf
+ sysctl -p
+SCRIPT
+
+$setup_devstack = <<-SCRIPT
+ CONFIG="$1"
+ git clone https://opendev.org/openstack/devstack
+ cd devstack
+ cp "$CONFIG" .
+ ./stack.sh
+SCRIPT
+
+$add_to_docker_group = <<-SCRIPT
+ USER="$1"
+ echo "Adding ${USER} to 'docker' group"
+ usermod -aG docker "$USER"
+SCRIPT
+
+$setup_debconf = <<-SCRIPT
+ echo "Setting debconf frontend to noninteractive"
+ sed -i'.orig' '/^Config:/a Frontend: noninteractive' /etc/debconf.conf
+SCRIPT
+
+$install_sshpass = <<-SCRIPT
+ apt-get update
+ echo "Installing 'sshpass'"
+ apt-get install sshpass
+SCRIPT
+
+$install_make = <<-SCRIPT
+ apt-get update
+ echo "Installing 'make'"
+ apt-get install make
+SCRIPT
+
+$install_docker = <<-SCRIPT
+ apt-get update
+ echo "Setting up 'docker' repository"
+ apt-get install \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ gnupg-agent \
+ software-properties-common
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
+ add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable"
+ apt-get update
+
+ echo "Installing 'docker'"
+ apt-get install docker-ce docker-ce-cli containerd.io
+SCRIPT
+
+$resize_disk = <<-SCRIPT
+ DEV=${1:-vda}
+ PART=${2:-3}
+ echo ", +" | sfdisk -N "$PART" "/dev/$DEV" --no-reread
+ partprobe
+ resize2fs "/dev/${DEV}${PART}"
+SCRIPT
+
+$generate_key = <<-SCRIPT
+ KEY_FILE="$1"
+ echo "Generating SSH key (${KEY_FILE})"
+ ssh-keygen -q -b 4096 -t rsa -f "$KEY_FILE" -N ""
+SCRIPT
+
+$deploy_key = <<-SCRIPT
+ KEY="$1"
+ USER="$2"
+ PASS="$PASSWORD"
+ IPS="$3"
+ echo "Deploying ${KEY} for ${USER}"
+ for ip in $IPS; do
+ echo "on ${ip}"
+ sshpass -p "$PASS" ssh-copy-id -o StrictHostKeyChecking=no -i "$KEY" "${USER}@${ip}"
+ done
+SCRIPT
+
+$link_dotfiles = <<-SCRIPT
+ SYNC_DIR="$1"
+ for rc in ${SYNC_DIR}/dot_*; do
+ src="$rc"
+ dst="${HOME}/.${rc##*dot_}"
+ echo "Symlinking ${src} to ${dst}"
+ ln -sf "$src" "$dst"
+ done
+SCRIPT
+
+$link_file = <<-SCRIPT
+ SYNC_DIR="$1"
+ FILE="$2"
+ src="${SYNC_DIR}/${FILE}"
+ dst="$3"
+ echo "Symlinking ${src} to ${dst}"
+ ln -sf "$src" "$dst"
+SCRIPT
+
+$rke_up = "rke up"
+$rke_down = "rke remove --force || true" # best effort
+
+$get_oom = <<-SCRIPT
+ BRANCH="${1:-7.0.0-ONAP}"
+ REPO="${2:-https://git.onap.org/oom}"
+ git clone -b "$BRANCH" "$REPO" --recurse-submodules
+SCRIPT
+
+$get_helm_plugins = "mkdir -p ${HOME}/.helm && cp -R ${HOME}/oom/kubernetes/helm/plugins/ ${HOME}/.helm"
+
+$setup_helm_cluster = <<-SCRIPT
+ export KUBECONFIG="${HOME}/.kube/config.onap"
+ kubectl config use-context onap
+ kubectl -n kube-system create serviceaccount tiller
+ kubectl create clusterrolebinding tiller --clusterrole=cluster-admin --serviceaccount=kube-system:tiller
+ helm init --stable-repo-url https://charts.helm.sh/stable --service-account tiller
+ kubectl -n kube-system rollout status deploy/tiller-deploy
+SCRIPT
+
+# FIXME: replace sleep command with helm repo readiness probe
+$setup_helm_repo = <<-SCRIPT
+ helm serve &
+ sleep 3
+ helm repo add local http://127.0.0.1:8879
+ make -e SKIP_LINT=TRUE -C ${HOME}/oom/kubernetes all
+SCRIPT
+
+$deploy_onap = <<-SCRIPT
+ OVERRIDE="${1:-${HOME}/oom/kubernetes/onap/resources/environments/core-onap.yaml}"
+
+ ENV="${2:-#{os_env}}"
+ export $(cat "$ENV" | xargs)
+
+ TIMEOUT="${3:-#{helm_timeout}}"
+ MASTER_PASSWORD="${4:-#{helm_master_password}}"
+
+ encrypt () {
+ KEY="${HOME}/oom/kubernetes/so/resources/config/mso/encryption.key"
+ echo -n "$1" \
+ | openssl aes-128-ecb -e -K `cat "$KEY"` -nosalt \
+ | xxd -c 256 -p
+ }
+
+ export OPENSTACK_ENCRYPTED_PASSWORD="$(encrypt $OPENSTACK_PASSWORD)"
+
+ export KUBECONFIG="${HOME}/.kube/config.onap"
+
+ helm deploy core local/onap --verbose \
+ --set global.masterPassword="$MASTER_PASSWORD" \
+ --namespace onap --timeout "$TIMEOUT" -f "$OVERRIDE"
+SCRIPT
+
+Vagrant.configure('2') do |config|
+ all.each do |machine|
+ config.vm.define machine[:name] do |config|
+ config.vm.box = machine[:box]
+ config.vm.hostname = machine[:hostname]
+
+ config.vm.provider :virtualbox do |v|
+ v.name = machine[:name]
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ end
+
+ config.vm.provider :libvirt do |v|
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ v.machine_virtual_size = machine[:disk] # set at VM creation
+ v.storage_pool_name = machine[:pool]
+ end
+
+ config.vm.network :private_network, ip: machine[:ip]
+ config.vm.provision "replace_dns", type: :shell, run: "always", inline: $replace_dns, args: host_ip
+
+ if machine[:name] == 'devstack'
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: "Vagrantfile"
+
+ config.vm.provision "enable_ipv6", type: :shell, run: "always", inline: $enable_ipv6
+ config.vm.provision "setup_devstack", type: :shell, privileged: false, inline: $setup_devstack, args: os_config
+ end
+
+ if machine[:name] == 'control'
+ config.vm.provision "customize_control", type: :shell, path: "tools/imported/openstack-k8s-controlnode.sh"
+ config.vm.provision "fix_groups_control", type: :shell, inline: $add_to_docker_group, args: vagrant_user
+ end
+
+ if machine[:name] == 'worker'
+ config.vm.provision "customize_worker", type: :shell, path: "tools/imported/openstack-k8s-workernode.sh"
+ config.vm.provision "fix_groups_worker", type: :shell, inline: $add_to_docker_group, args: vagrant_user
+ config.vm.provision "resize_disk", type: :shell, inline: $resize_disk
+ end
+
+ if machine[:name] == 'operator'
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: ["Vagrantfile", "operator"]
+ config.vm.synced_folder "~/.ssh", "/home/#{vagrant_user}/.ssh", type: "rsync", rsync__exclude: "authorized_keys"
+ config.vm.synced_folder "./operator", "/home/#{vagrant_user}", type: "sshfs", reverse: true, sshfs_opts_append: "-o nonempty"
+
+ config.vm.provision "setup_debconf", type: :shell, inline: $setup_debconf
+ config.vm.provision "link_apt_prefs", type: :shell, run: "always" do |s|
+ s.inline = $link_file
+ s.args = [synced_folder_tools_config, apt_prefs, apt_prefs_dir]
+ end
+ config.vm.provision "link_dotfiles_root", type: :shell, run: "always" do |s|
+ s.inline = $link_dotfiles
+ s.args = synced_folder_tools_config
+ end
+ config.vm.provision "link_dotfiles_user", type: :shell, run: "always" do |s|
+ s.privileged = false
+ s.inline = $link_dotfiles
+ s.args = synced_folder_tools_config
+ end
+
+ config.vm.provision "install_docker", type: :shell, inline: $install_docker
+ config.vm.provision "fix_groups_operator", type: :shell, inline: $add_to_docker_group, args: vagrant_user
+ config.vm.provision "install_sshpass", type: :shell, inline: $install_sshpass
+ config.vm.provision "generate_key", type: :shell, privileged: false, inline: $generate_key, args: operator_key
+
+ ips = ""
+ cluster.each { |node| ips << node[:ip] << " " }
+ config.vm.provision "deploy_key", type: :shell do |s|
+ s.privileged = false
+ s.inline = $deploy_key
+ s.args = [operator_key, vagrant_user, ips]
+ s.env = {'PASSWORD': vagrant_password}
+ end
+
+ config.vm.provision "get_rke", type: :shell, path: "tools/get_rke.sh"
+ config.vm.provision "link_cluster_yml", type: :shell, run: "always" do |s|
+ s.privileged = false
+ s.inline = $link_file
+ s.args = [synced_folder_config, cluster_yml, "$HOME"]
+ end
+
+ config.vm.post_up_message = operation_post_msg
+ config.vm.provision "rke_up", type: :shell, run: "never", privileged: false, inline: $rke_up
+ config.trigger.before :destroy do |trigger|
+ trigger.warn = "Removing cluster"
+ trigger.run_remote = {privileged: false, inline: $rke_down}
+ end
+
+ config.vm.provision "get_kubectl", type: :shell, path: "tools/get_kubectl.sh"
+ config.vm.provision "setup_kubectl", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.path = "tools/setup_kubectl.sh"
+ end
+ config.vm.provision "get_helm", type: :shell, path: "tools/get_helm.sh"
+ config.vm.provision "get_oom", type: :shell do |s|
+ s.privileged = false
+ s.inline = $get_oom
+ end
+ config.vm.provision "get_helm_plugins", type: :shell, privileged: false, inline: $get_helm_plugins
+ config.vm.provision "install_make", type: :shell, inline: $install_make
+ config.vm.provision "setup_helm_cluster", type: :shell, run: "never", privileged: false, inline: $setup_helm_cluster
+ config.vm.provision "setup_helm_repo", type: :shell, run: "never", privileged: false, inline: $setup_helm_repo
+ config.vm.provision "deploy_onap", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.inline = $deploy_onap
+ end
+ end
+ end
+ end
+end
diff --git a/bootstrap/vagrant-minimal-onap/config/cluster.yml b/bootstrap/vagrant-minimal-onap/config/cluster.yml
new file mode 100644
index 000000000..45ca6b6d5
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/config/cluster.yml
@@ -0,0 +1,48 @@
+# An example of a Kubernetes cluster for ONAP
+ssh_key_path: &ssh_key_path "~/.ssh/onap-key"
+nodes:
+- address: 172.17.4.100
+ port: "22"
+ role:
+ - controlplane
+ - etcd
+ hostname_override: "onap-control-1"
+ user: vagrant
+ ssh_key_path: *ssh_key_path
+- address: 172.17.4.101
+ port: "22"
+ role:
+ - worker
+ hostname_override: "onap-k8s-1"
+ user: vagrant
+ ssh_key_path: *ssh_key_path
+services:
+ kube-api:
+ service_cluster_ip_range: 10.43.0.0/16
+ pod_security_policy: false
+ always_pull_images: false
+ kube-controller:
+ cluster_cidr: 10.42.0.0/16
+ service_cluster_ip_range: 10.43.0.0/16
+ kubelet:
+ cluster_domain: cluster.local
+ cluster_dns_server: 10.43.0.10
+ fail_swap_on: false
+network:
+ plugin: canal
+authentication:
+ strategy: x509
+ssh_agent_auth: false
+authorization:
+ mode: rbac
+ignore_docker_version: false
+kubernetes_version: "v1.15.11-rancher1-2"
+private_registries:
+- url: nexus3.onap.org:10001
+ user: docker
+ password: docker
+ is_default: true
+cluster_name: "onap"
+restore:
+ restore: false
+ snapshot_name: ""
diff --git a/bootstrap/vagrant-minimal-onap/config/dot_env b/bootstrap/vagrant-minimal-onap/config/dot_env
new file mode 100644
index 000000000..4e87c2a20
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/config/dot_env
@@ -0,0 +1,5 @@
+OPENSTACK_USER_NAME='demo'
+OPENSTACK_REGION='RegionOne'
+OPENSTACK_KEYSTONE_URL='http://172.17.4.200/identity'
+OPENSTACK_TENANT_NAME='demo'
+OPENSTACK_PASSWORD='default123456!'
diff --git a/bootstrap/vagrant-minimal-onap/config/local.conf b/bootstrap/vagrant-minimal-onap/config/local.conf
new file mode 100644
index 000000000..f8912868a
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/config/local.conf
@@ -0,0 +1,5 @@
+[[local|localrc]]
+ADMIN_PASSWORD=default123456!
+DATABASE_PASSWORD=$ADMIN_PASSWORD
+RABBIT_PASSWORD=$ADMIN_PASSWORD
+SERVICE_PASSWORD=$ADMIN_PASSWORD
diff --git a/bootstrap/vagrant-minimal-onap/operator/.gitignore b/bootstrap/vagrant-minimal-onap/operator/.gitignore
new file mode 100644
index 000000000..260825bbc
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/operator/.gitignore
@@ -0,0 +1,5 @@
+# This file is intended to store (almost) empty directory for reverse-SSHFS mount point
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
diff --git a/bootstrap/vagrant-minimal-onap/tools/config/95silent-approval b/bootstrap/vagrant-minimal-onap/tools/config/95silent-approval
new file mode 100644
index 000000000..dadbfbd86
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/config/95silent-approval
@@ -0,0 +1,2 @@
+Quiet "1";
+APT::Get::Assume-Yes "true";
diff --git a/bootstrap/vagrant-minimal-onap/tools/config/dot_curlrc b/bootstrap/vagrant-minimal-onap/tools/config/dot_curlrc
new file mode 100644
index 000000000..ecf9792f5
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/config/dot_curlrc
@@ -0,0 +1,8 @@
+# Disable progress meter
+--silent
+# Show error messages
+--show-error
+# Fail silently on server errors
+--fail
+# Follow redirections
+--location
diff --git a/bootstrap/vagrant-minimal-onap/tools/config/dot_wgetrc b/bootstrap/vagrant-minimal-onap/tools/config/dot_wgetrc
new file mode 100644
index 000000000..ac472b77a
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/config/dot_wgetrc
@@ -0,0 +1,2 @@
+# Turn off output
+quiet = on
diff --git a/bootstrap/vagrant-minimal-onap/tools/get_customization_scripts.sh b/bootstrap/vagrant-minimal-onap/tools/get_customization_scripts.sh
new file mode 100755
index 000000000..a99b10288
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/get_customization_scripts.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+wget \
+ 'https://docs.onap.org/en/dublin/_downloads/4d5001735f875448b25f11e270d5bc5a/openstack-k8s-controlnode.sh' \
+ 'https://docs.onap.org/en/dublin/_downloads/53998444dcd1b6a8b7396f7f2d35d21e/openstack-k8s-workernode.sh'
diff --git a/test/security/k8s/tools/casablanca/get_ranchercli.sh b/bootstrap/vagrant-minimal-onap/tools/get_helm.sh
index 8ffbc5f58..1cd2fc42e 100755..100644
--- a/test/security/k8s/tools/casablanca/get_ranchercli.sh
+++ b/bootstrap/vagrant-minimal-onap/tools/get_helm.sh
@@ -1,9 +1,9 @@
#!/usr/bin/env bash
#
-# @file test/security/k8s/tools/casablanca/get_ranchercli.sh
+# @file test/security/k8s/tools/dublin/get_helm.sh
# @author Pawel Wieczorek <p.wieczorek2@samsung.com>
-# @brief Utility for obtaining Rancher CLI tool
+# @brief Utility for obtaining helm tool
#
# Dependencies:
@@ -15,11 +15,14 @@
# Script expects to be run with administrative privileges for accessing /usr/local/bin
#
# Usage:
-# # ./get_ranchercli.sh [VERSION [ARCH [SYSTEM]]]
+# # ./get_helm.sh [VERSION [ARCH [SYSTEM]]]
#
# Constants
-DEFAULT_VERSION='v0.6.12'
+BINARY='helm'
+INSTALL_DIR='/usr/local/bin/'
+
+DEFAULT_VERSION='v2.16.10'
DEFAULT_ARCH='amd64'
DEFAULT_SYSTEM='linux'
@@ -28,9 +31,9 @@ VERSION="${1:-$DEFAULT_VERSION}"
ARCH="${2:-$DEFAULT_ARCH}"
SYSTEM="${3:-$DEFAULT_SYSTEM}"
-ARCHIVE="rancher-${SYSTEM}-${ARCH}-${VERSION}.tar.gz"
-DIRECTORY="rancher-${VERSION}"
-URL="https://releases.rancher.com/cli/${VERSION}/${ARCHIVE}"
+URL="https://storage.googleapis.com/kubernetes-helm/${BINARY}-${VERSION}-${SYSTEM}-${ARCH}.tar.gz"
+ARCHIVE="${URL##*/}"
+DIR="${SYSTEM}-${ARCH}"
# Prerequistes
@@ -38,8 +41,8 @@ wget "$URL"
tar xf "$ARCHIVE"
# Installation
-mv "${DIRECTORY}/rancher" /usr/local/bin/
+mv "${DIR}/${BINARY}" "$INSTALL_DIR"
# Cleanup
-rmdir "$DIRECTORY"
rm "$ARCHIVE"
+rm -r "$DIR"
diff --git a/bootstrap/vagrant-minimal-onap/tools/get_kubectl.sh b/bootstrap/vagrant-minimal-onap/tools/get_kubectl.sh
new file mode 100755
index 000000000..752c286c2
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/get_kubectl.sh
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+
+#
+# @file test/security/k8s/tools/dublin/get_kubectl.sh
+# @author Pawel Wieczorek <p.wieczorek2@samsung.com>
+# @brief Utility for obtaining kubectl tool
+#
+
+# Dependencies:
+# wget
+# coreutils
+#
+# Privileges:
+# Script expects to be run with administrative privileges for accessing /usr/local/bin
+#
+# Usage:
+# # ./get_kubectl.sh [VERSION [ARCH [SYSTEM]]]
+#
+
+# Constants
+BINARY='kubectl'
+INSTALL_DIR='/usr/local/bin/'
+
+DEFAULT_VERSION='v1.13.5'
+DEFAULT_ARCH='amd64'
+DEFAULT_SYSTEM='linux'
+
+# Variables
+VERSION="${1:-$DEFAULT_VERSION}"
+ARCH="${2:-$DEFAULT_ARCH}"
+SYSTEM="${3:-$DEFAULT_SYSTEM}"
+
+URL="https://storage.googleapis.com/kubernetes-release/release/${VERSION}/bin/${SYSTEM}/${ARCH}/${BINARY}"
+
+
+# Prerequistes
+wget "$URL"
+chmod +x "$BINARY"
+
+# Installation
+mv "$BINARY" "$INSTALL_DIR"
diff --git a/bootstrap/vagrant-minimal-onap/tools/get_rke.sh b/bootstrap/vagrant-minimal-onap/tools/get_rke.sh
new file mode 100755
index 000000000..1aed2dc02
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/get_rke.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+#
+# @file test/security/k8s/tools/dublin/get_rke.sh
+# @author Pawel Wieczorek <p.wieczorek2@samsung.com>
+# @brief Utility for obtaining RKE tool
+#
+
+# Dependencies:
+# wget
+# coreutils
+#
+# Privileges:
+# Script expects to be run with administrative privileges for accessing /usr/local/bin
+#
+# Usage:
+# # ./get_rke.sh [VERSION [ARCH [SYSTEM]]]
+#
+
+# Constants
+DEFAULT_VERSION='v1.0.6'
+DEFAULT_ARCH='amd64'
+DEFAULT_SYSTEM='linux'
+
+# Variables
+VERSION="${1:-$DEFAULT_VERSION}"
+ARCH="${2:-$DEFAULT_ARCH}"
+SYSTEM="${3:-$DEFAULT_SYSTEM}"
+
+BINARY="rke_${SYSTEM}-${ARCH}"
+URL="https://github.com/rancher/rke/releases/download/${VERSION}/${BINARY}"
+
+
+# Prerequistes
+wget "$URL"
+chmod +x "$BINARY"
+
+# Installation
+mv "$BINARY" "/usr/local/bin/${BINARY%%_*}" # this also renames binary to "rke"
diff --git a/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-controlnode.sh b/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-controlnode.sh
new file mode 100644
index 000000000..1d230c2da
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-controlnode.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+DOCKER_VERSION=18.09.5
+
+apt-get update
+
+curl https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh | sh
+mkdir -p /etc/systemd/system/docker.service.d/
+cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
+[Service]
+ExecStart=
+ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10001
+EOF
+
+sudo usermod -aG docker ubuntu
+
+systemctl daemon-reload
+systemctl restart docker
+apt-mark hold docker-ce
+
+IP_ADDR=`ip address |grep ens|grep inet|awk '{print $2}'| awk -F / '{print $1}'`
+HOSTNAME=`hostname`
+
+echo "$IP_ADDR $HOSTNAME" >> /etc/hosts
+
+docker login -u docker -p docker nexus3.onap.org:10001
+
+sudo apt-get install make -y
+
+
+exit 0
diff --git a/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-workernode.sh b/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-workernode.sh
new file mode 100644
index 000000000..3f32d050a
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/imported/openstack-k8s-workernode.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+DOCKER_VERSION=18.09.5
+
+apt-get update
+
+curl https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh | sh
+mkdir -p /etc/systemd/system/docker.service.d/
+cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
+[Service]
+ExecStart=
+ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10001
+EOF
+
+sudo usermod -aG docker ubuntu
+
+systemctl daemon-reload
+systemctl restart docker
+apt-mark hold docker-ce
+
+IP_ADDR=`ip address |grep ens|grep inet|awk '{print $2}'| awk -F / '{print $1}'`
+HOSTNAME=`hostname`
+
+echo "$IP_ADDR $HOSTNAME" >> /etc/hosts
+
+docker login -u docker -p docker nexus3.onap.org:10001
+
+sudo apt-get install make -y
+
+# install nfs
+sudo apt-get install nfs-common -y
+
+
+exit 0
diff --git a/bootstrap/vagrant-minimal-onap/tools/setup_kubectl.sh b/bootstrap/vagrant-minimal-onap/tools/setup_kubectl.sh
new file mode 100755
index 000000000..bbd31a930
--- /dev/null
+++ b/bootstrap/vagrant-minimal-onap/tools/setup_kubectl.sh
@@ -0,0 +1,52 @@
+#!/usr/bin/env bash
+
+#
+# @file test/security/k8s/tools/dublin/setup_kubectl.sh
+# @author Pawel Wieczorek <p.wieczorek2@samsung.com>
+# @brief Utility for setting up kubectl tool for Dublin cluster
+#
+
+# Dependencies:
+# coreutils
+#
+# Privileges:
+# Script expects to be run with administrative privileges for accessing /usr/local/bin
+#
+# Usage:
+# # ./setup_kubectl.sh [RKE_CONFIG [KUBE_DIR [KUBE_CONFIG [KUBE_CONTEXT]]]]
+#
+
+# Constants
+BASHRC='.bashrc'
+BASH_ALIASES='.bash_aliases'
+USE_ONAP_ALIAS='useonap'
+
+DEFAULT_RKE_CONFIG='kube_config_cluster.yml'
+DEFAULT_KUBE_DIR='.kube'
+DEFAULT_KUBE_CONFIG='config.onap'
+DEFAULT_KUBE_CONTEXT='onap'
+
+# Variables
+RKE_CONFIG="${1:-$DEFAULT_RKE_CONFIG}"
+KUBE_DIR="${2:-${HOME}/${DEFAULT_KUBE_DIR}}"
+KUBE_CONFIG="${3:-$DEFAULT_KUBE_CONFIG}"
+KUBE_CONTEXT="${4:-$DEFAULT_KUBE_CONTEXT}"
+
+USE_ONAP="f() { export KUBECONFIG=${KUBE_DIR}/${KUBE_CONFIG}; kubectl config use-context ${KUBE_CONTEXT}; }; f"
+USE_ONAP_CONFIG="$(cat<<CONFIG
+
+# Use ONAP context for kubectl utility (defined in ${HOME}/${BASH_ALIASES})
+${USE_ONAP_ALIAS}
+CONFIG
+)"
+
+
+# Prerequistes
+mkdir -p "$KUBE_DIR"
+echo "alias ${USE_ONAP_ALIAS}='${USE_ONAP}'" >> "${HOME}/${BASH_ALIASES}"
+
+# Setup
+cp "$RKE_CONFIG" "${KUBE_DIR}/${KUBE_CONFIG}"
+
+# Post-setup
+echo "$USE_ONAP_CONFIG" >> "${HOME}/${BASHRC}"
diff --git a/bootstrap/vagrant-onap/README.md b/bootstrap/vagrant-onap/README.md
index 8a8d1bd9b..edfbe85dc 100644
--- a/bootstrap/vagrant-onap/README.md
+++ b/bootstrap/vagrant-onap/README.md
@@ -1,4 +1,3 @@
# Deprecated
ONAP on Vagrant tool's code has been migrated to [Devtool repo](https://git.onap.org/integration/devtool/)
-
diff --git a/deployment/Azure_ARM_Template/arm_cluster_deploy_beijing.json b/deployment/Azure_ARM_Template/arm_cluster_deploy_beijing.json
index ed4dc10c2..cc5885c34 100644
--- a/deployment/Azure_ARM_Template/arm_cluster_deploy_beijing.json
+++ b/deployment/Azure_ARM_Template/arm_cluster_deploy_beijing.json
@@ -6,7 +6,10 @@
"type": "string",
"defaultValue": "16.04.0-LTS",
"allowedValues": [
- "12.04.5-LTS","14.04.5-LTS","15.10","16.04.0-LTS"
+ "12.04.5-LTS",
+ "14.04.5-LTS",
+ "15.10",
+ "16.04.0-LTS"
],
"metadata": {
"description": "The OS"
@@ -23,7 +26,7 @@
},
"privateIPAddress": {
"type": "string",
- "defaultValue": "10.0.0.2",
+ "defaultValue": "10.0.0.2",
"metadata": {
"description": "Static Private IP will be assigned to the machine"
}
@@ -47,7 +50,7 @@
"Standard_E8_v3",
"Standard_E2_v3",
"Standard_D1",
- "Standard_D16s_v3",
+ "Standard_D16s_v3",
"Standard_D4_v2",
"Standard_D32s_v3",
"Standard_E16_v3",
@@ -65,11 +68,12 @@
"description": "Unique DNS label to assign DNS name"
}
}
-
},
"variables": {
"dnsLabelPrefix": "[concat('dns-',uniquestring(resourceGroup().id))]",
- "customData": [ "userdata.txt" ],
+ "customData": [
+ "userdata.txt"
+ ],
"vmName": "[concat('k8s-host-', substring(uniquestring(resourceGroup().id),0,4))]",
"adminUsername": "[concat('ubuntu')]",
"adminPassword": "Qwertyuiop@@1",
@@ -84,7 +88,7 @@
"subnetPrefix": "10.0.0.0/24",
"storageAccountType": "Standard_LRS",
"publicIPAddressType": "Dynamic",
- "publicIPAddressName": "nicLoop100",
+ "publicIPAddressName": "nicLoop100",
"virtualNetworkName": "[concat('VNET-',variables('vmName'))]",
"subnetRef": "[resourceId('Microsoft.Network/virtualNetworks/subnets/', variables('virtualNetworkName'), variables('subnetName'))]",
"networkSecurityGroupName": "[concat(variables('vmName'), '_obrien_local_nsg')]",
@@ -125,7 +129,7 @@
},
"properties": {
"securityRules": [
- {
+ {
"name": "SSHAllowAny",
"properties": {
"description": "SSHAllowAny",
@@ -181,7 +185,6 @@
"direction": "Inbound"
}
},
-
{
"name": "port_10249-10255_172",
"properties": {
@@ -306,7 +309,7 @@
"location": "[resourceGroup().location]",
"copy": {
"name": "nicLoop",
- "count": "[parameters('numberOfVms')]"
+ "count": "[parameters('numberOfVms')]"
},
"properties": {
"publicIPAllocationMethod": "Dynamic",
@@ -378,45 +381,51 @@
}
}
},
- {
- "apiVersion": "2015-06-15",
- "type": "Microsoft.Compute/virtualMachines/extensions",
- "name": "[concat(variables('vmName'), '0','/onap')]",
- "location": "[resourceGroup().location]",
- "dependsOn": ["virtualMachineLoop"],
- "properties": {
+ {
+ "apiVersion": "2015-06-15",
+ "type": "Microsoft.Compute/virtualMachines/extensions",
+ "name": "[concat(variables('vmName'), '0','/onap')]",
+ "location": "[resourceGroup().location]",
+ "dependsOn": [
+ "virtualMachineLoop"
+ ],
+ "properties": {
"publisher": "Microsoft.Azure.Extensions",
"type": "CustomScript",
"typeHandlerVersion": "2.0",
"autoUpgradeMinorVersion": true,
"settings": {
- "fileUris": [ "https://git.onap.org/integration/plain/deployment/Azure_ARM_Template/scripts/azure-rancher-server.sh" ],
- "commandToExecute": "[concat('./' , parameters('masterScriptName'),' ',reference(variables('publicIPAddressName')).dnsSettings.fqdn,' ',parameters('privateIPAddress'),' ',parameters('numberOfVms'))]"
- }
+ "fileUris": [
+ "https://git.onap.org/integration/plain/deployment/Azure_ARM_Template/scripts/azure-rancher-server.sh"
+ ],
+ "commandToExecute": "[concat('./' , parameters('masterScriptName'),' ',reference(variables('publicIPAddressName')).dnsSettings.fqdn,' ',parameters('privateIPAddress'),' ',parameters('numberOfVms'))]"
}
- },
- {
- "apiVersion": "2015-06-15",
- "type": "Microsoft.Compute/virtualMachines/extensions",
- "name": "[concat(variables('vmName'), copyindex(1),'/onap')]",
- "copy": {
+ }
+ },
+ {
+ "apiVersion": "2015-06-15",
+ "type": "Microsoft.Compute/virtualMachines/extensions",
+ "name": "[concat(variables('vmName'), copyindex(1),'/onap')]",
+ "copy": {
"name": "virtualMachineExtnLoop",
- "count": "[sub(parameters('numberOfVms'),1)]"
+ "count": "[sub(parameters('numberOfVms'),1)]"
},
- "location": "[resourceGroup().location]",
- "dependsOn": [
- "virtualMachineLoop"
- ],
- "properties": {
+ "location": "[resourceGroup().location]",
+ "dependsOn": [
+ "virtualMachineLoop"
+ ],
+ "properties": {
"publisher": "Microsoft.Azure.Extensions",
"type": "CustomScript",
"typeHandlerVersion": "2.0",
"autoUpgradeMinorVersion": true,
"settings": {
- "fileUris": [ "https://git.onap.org/integration/plain/deployment/Azure_ARM_Template/scripts/azure-k8s-node.sh" ],
- "commandToExecute": "[concat('./' , parameters('nodeScriptName'),' ',concat(parameters('privateIPAddress'),'0'))]"
- }
+ "fileUris": [
+ "https://git.onap.org/integration/plain/deployment/Azure_ARM_Template/scripts/azure-k8s-node.sh"
+ ],
+ "commandToExecute": "[concat('./' , parameters('nodeScriptName'),' ',concat(parameters('privateIPAddress'),'0'))]"
}
- }
+ }
+ }
]
}
diff --git a/deployment/Azure_ARM_Template/arm_cluster_deploy_parameters.json b/deployment/Azure_ARM_Template/arm_cluster_deploy_parameters.json
index 081ef7ffc..a823bb6ea 100644
--- a/deployment/Azure_ARM_Template/arm_cluster_deploy_parameters.json
+++ b/deployment/Azure_ARM_Template/arm_cluster_deploy_parameters.json
@@ -2,10 +2,20 @@
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
- "numberOfVms": { "value": 12 },
- "vmSize": { "value": "Standard_D4_v2" },
- "masterScriptName": { "value": "azure-rancher-server.sh" },
- "nodeScriptName": { "value": "azure-k8s-node.sh" },
- "dnslabel": { "value": "ranchercluster" }
+ "numberOfVms": {
+ "value": 12
+ },
+ "vmSize": {
+ "value": "Standard_D4_v2"
+ },
+ "masterScriptName": {
+ "value": "azure-rancher-server.sh"
+ },
+ "nodeScriptName": {
+ "value": "azure-k8s-node.sh"
+ },
+ "dnslabel": {
+ "value": "ranchercluster"
+ }
}
}
diff --git a/deployment/README.md b/deployment/README.md
index e1b8b8748..a2f432a28 100644
--- a/deployment/README.md
+++ b/deployment/README.md
@@ -1,8 +1,8 @@
-
# ONAP Integration - Deployment
## Description
-* Heat templates and scripts for automatic deployments for system testing and continuous integration test flows
-* Sample OPENRC and heat environment settings files for ONAP deployment in ONAP External Labs
-
+- Heat templates and scripts for automatic deployments for system testing and continuous integration test flows
+- Sample OPENRC and heat environment settings files for ONAP deployment in ONAP External Labs
+- Ansible roles and sample playbooks for automatic deployments for system testing and continuous
+ integration test flows (if Heat is unavailable)
diff --git a/deployment/aks/README.md b/deployment/aks/README.md
index 4eb37cbc3..383fb9982 100644
--- a/deployment/aks/README.md
+++ b/deployment/aks/README.md
@@ -6,14 +6,12 @@ Copyright 2019 AT&T Intellectual Property. All rights reserved.
This file is licensed under the CREATIVE COMMONS ATTRIBUTION 4.0 INTERNATIONAL LICENSE
-Full license text at https://creativecommons.org/licenses/by/4.0/legalcode
-
+Full license text at <https://creativecommons.org/licenses/by/4.0/legalcode>
## About
ONAP on AKS will orchestrate an Azure Kubernetes Service (AKS) deployment, a DevStack deployment, an ONAP + NFS deployment, as well as configuration to link the Azure resources together. After ONAP is installed, a cloud region will also be added to ONAP with the new DevStack details that can be used to instantiate a VNF.
-
### Pre-Reqs
The following software is required to be installed:
@@ -22,12 +20,11 @@ The following software is required to be installed:
- [helm](https://helm.sh/docs/using_helm/)
- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/)
- [azure command line](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli-apt?view=azure-cli-latest)
-- make, openjdk-8-jdk, openjdk-8-jre (``apt-get update && apt-get install make openjdk-8-jre openjdk-8-jdk``)
+- make, openjdk-8-jdk, openjdk-8-jre (`apt-get update && apt-get install make openjdk-8-jre openjdk-8-jdk`)
Check the [OOM Cloud Setup Guide](https://docs.onap.org/en/latest/submodules/oom.git/docs/oom_cloud_setup_guide.html#cloud-setup-guide-label) for the versions of kubectl and helm to use.
-After installing the above software, run ``az login`` and follow the instructions to finalize the azure command line installation. **You'll need to be either an owner or co-owner of the azure subscription, or some of the deployment steps may not complete successfully**. If you have multiple azure subscriptions, use ``az account set --subscription <subscription name>`` prior to running ``az login`` so that resources are deployed to the correct subscription. See [the azure docs](https://docs.microsoft.com/en-us/cli/azure/get-started-with-azure-cli?view=azure-cli-latest) for more details on using the azure command line.
-
+After installing the above software, run `az login` and follow the instructions to finalize the azure command line installation. **You'll need to be either an owner or co-owner of the azure subscription, or some of the deployment steps may not complete successfully**. If you have multiple azure subscriptions, use `az account set --subscription <subscription name>` prior to running `az login` so that resources are deployed to the correct subscription. See [the azure docs](https://docs.microsoft.com/en-us/cli/azure/get-started-with-azure-cli?view=azure-cli-latest) for more details on using the azure command line.
### The following resources will be created in Azure
@@ -35,14 +32,11 @@ After installing the above software, run ``az login`` and follow the instruction
- VM running NFS server application
- VM running latest DevStack version
-
## Usage
-
### cloud.sh
-
-``cloud.sh`` is the main driver script, and deploys a Kubernetes Cluster (AKS), DevStack, NFS, and bootstraps ONAP with configuration needed to instantiate a VNF. The script creates ONAP in "about" an hour.
+`cloud.sh` is the main driver script, and deploys a Kubernetes Cluster (AKS), DevStack, NFS, and bootstraps ONAP with configuration needed to instantiate a VNF. The script creates ONAP in "about" an hour.
```
@@ -54,6 +48,8 @@ options:
-f, --no-prompt executes with no prompt for confirmation
-n, --no-install don't install ONAP
-o, --override create integration override for robot configuration
+-d, --no-validate dont validate pre-reqs before executing deployment
+-p, --post-install execute post-install scripts
-h, --help provide brief overview of script
This script deploys a cloud environment in Azure.
@@ -74,17 +70,14 @@ It:
$ ./cloud.sh --override
```
-
### cloud.conf
+This file contains the parameters that will be used when executing `cloud.sh`. The parameter `BUILD` will be generated at runtime.
-This file contains the parameters that will be used when executing ``cloud.sh``. The parameter ``BUILD`` will be generated at runtime.
-
-For an example with all of the parameters filled out, check [here](./cloud.conf.example). You can copy this and modify to suit your deployment. The parameters that MUST be modified from ``cloud.conf.example`` are ``USER_PUBLIC_IP_PREFIX`` and ``BUILD_DIR``.
+For an example with all of the parameters filled out, check [here](./cloud.conf.example). You can copy this and modify to suit your deployment. The parameters that MUST be modified from `cloud.conf.example` are `USER_PUBLIC_IP_PREFIX` and `BUILD_DIR`.
All other parameters will work out of the box, however you can also customize them to suit your own deployment. See below for a description of the available parameters and how they're used.
-
```
# The variable $BUILD will be generated dynamically when this file is sourced
@@ -133,6 +126,7 @@ DEVSTACK_ADMIN_USER= User name that will be created on DevStack VM. Use th
DEVSTACK_VNET_NAME= Name of VNET that DevStack VM will attach to.
DEVSTACK_SUBNET_NAME= Name of SUBNET attached to DEVSTACK_VNET_NAME.
DEVSTACK_DISK_SIZE= Size of OS Disk for DevStack VM.
+DEVSTACK_BRANCH= Branch to use when installing devstack.
OPENSTACK_USER= User name that will be added to OpenStack after devstack has finished installing. This is also the username that will be used to create a cloud site in ONAP SO.
OPENSTACK_PASS= Password to use for OPENSTACK_USER.
OPENSTACK_TENANT= Tenant name that will be added to OpenStack after devstack has finished installing. This is also the username that will be used to create a cloud site in ONAP SO.
@@ -163,30 +157,64 @@ DOCKER_REPOSITORY= Image repository url to pull ONAP images to use for i
### Integration Override
-When you execute ``cloud.sh``, you have the option to create an ``integration-override.yaml`` file that will be used during ``helm deploy ...`` to install ONAP. This is done by passing the ``--override`` flag to cloud.sh.
-
-The template used to create the override file is ``./util/integration-override.template``, and is invoked by ``./util/create_robot_config.sh``. It's very possible this isn't complete or sufficient for how you'd like to customize your deployment. You can update the template file and/or the script to provide additional customization for your ONAP install.
+When you execute `cloud.sh`, you have the option to create an `integration-override.yaml` file that will be used during `helm deploy ...` to install ONAP. This is done by passing the `--override` flag to cloud.sh.
+The template used to create the override file is `./util/integration-override.template`, and is invoked by `./util/create_robot_config.sh`. It's very possible this isn't complete or sufficient for how you'd like to customize your deployment. You can update the template file and/or the script to provide additional customization for your ONAP install.
### OOM Overrides
-In ``cloud.conf``, there's a parameter ``OOM_OVERRIDES`` available that's used to provide command line overrides to ``helm deploy``. This uses the standard helm syntax, so if you're using it the value should look like ``OOM_OVERRIDES="--set vid.enabled=false,so.image=abc"``. If you don't want to override anything, just set this value to an empty string.
+In `cloud.conf`, there's a parameter `OOM_OVERRIDES` available that's used to provide command line overrides to `helm deploy`. This uses the standard helm syntax, so if you're using it the value should look like `OOM_OVERRIDES="--set vid.enabled=false,so.image=abc"`. If you don't want to override anything, just set this value to an empty string.
+### Pre Install
-## Post Deployment
+When you run `cloud.sh` it will execute `pre_install.sh` first, which checks a few things:
+
+- It checks you have the correct pre-reqs installed. So, it'll make sure you have kubectl, azure cli, helm, etc...
+- It checks that the version of kubernetes in `cloud.conf` is available in Azure.
+- It checks the version of azure cli is >= to the baselined version (you can check this version by looking at the top of `pre_install.sh`). The Azure cli is introduced changes in minor versions that aren't backwards compatible.
+- It checks that the version of kubectl installed is at **MOST** 1 minor version different than the version of kubernetes in `cloud.conf`.
+
+If you would like to skip `pre_install.sh` and run the deployment anyways, pass the flag `--no-validate` to `cloud.sh`, like this:
+
+```
+$ ./cloud.sh --no-validate
+
+```
+
+### Post Install
-After ONAP and DevStack are deployed, there will be a ``deployment.notes`` file with instructions on how to access the various components. The ``BUILD_DIR`` specified in ``cloud.conf`` will contain a new ssh key, kubeconfig, and other deployment artifacts as well.
+After ONAP is deployed, you have the option of executing an arbitrary set of post-install scripts. This is enabled by passing the `--post-install` flag to `cloud.sh`, like this:
-All of the access information below will be in ``deployment.notes``.
+```
+$ ./cloud.sh --post-install
+
+```
+
+These post-install scripts need to be executable from the command line, and will be provided two parameters that they can use to perform their function:
+
+- /path/to/onap.conf : This is created during the deployment, and has various ONAP and OpenStack parameters.
+- /path/to/cloud.conf : this is the same `cloud.conf` that's used during the original deployment.
+
+Your post-install scripts can disregard these parameters, or source them and use the parameters as-needed.
+
+Included with this repo is one post-install script (`000_bootstrap_onap.sh`)that bootstraps AAI, VID, and SO with cloud and customer details so that ONAP is ready to model and instantiate a VNF.
+
+In order to include other custom post-install scripts, simply put them in the `post-install` directory, and make sure to set its mode to executable. They are executed in alphabetical order.
+
+## Post Deployment
+
+After ONAP and DevStack are deployed, there will be a `deployment.notes` file with instructions on how to access the various components. The `BUILD_DIR` specified in `cloud.conf` will contain a new ssh key, kubeconfig, and other deployment artifacts as well.
+All of the access information below will be in `deployment.notes`.
### Kubernetes Access
To access the Kubernetes dashboard:
-``az aks browse --resource-group $AKS_RESOURCE_GROUP_NAME --name $AKS_NAME``
+`az aks browse --resource-group $AKS_RESOURCE_GROUP_NAME --name $AKS_NAME`
To use kubectl:
+
```
export KUBECONFIG=$BUILD_DIR/kubeconfig
@@ -199,28 +227,25 @@ kubectl ...
To access Horizon:
Find the public IP address via the Azure portal, and go to
-``http://$DEVSTACK_PUBLIC_IP``
+`http://$DEVSTACK_PUBLIC_IP`
SSH access to DevStack node:
-``ssh -i $BUILD_DIR/id_rsa ${DEVSTACK_ADMIN_USER}@${DEVSTACK_PUBLIC_IP}``
+`ssh -i $BUILD_DIR/id_rsa ${DEVSTACK_ADMIN_USER}@${DEVSTACK_PUBLIC_IP}`
OpenStack cli access:
There's an openstack cli pod that's created in the default kubernetes default namespace. To use it, run:
-``kubectl exec $OPENSTACK_CLI_POD -- sh -lc "<openstack command>"``
-
+`kubectl exec $OPENSTACK_CLI_POD -- sh -lc "<openstack command>"`
### NFS Access
-``ssh -i $BUILD_DIR/id_rsa ${NFS_ADMIN_USER}@${NFS_PUBLIC_IP}``
-
+`ssh -i $BUILD_DIR/id_rsa ${NFS_ADMIN_USER}@${NFS_PUBLIC_IP}`
## Deleting the deployment
-After deployment, there will be a script named ``$BUILD_DIR/clean.sh`` that can be used to delete the resource groups that were created during deployment. This script is not required; you can always just navigate to the Azure portal to delete the resource groups manually.
-
+After deployment, there will be a script named `$BUILD_DIR/clean.sh` that can be used to delete the resource groups that were created during deployment. This script is not required; you can always just navigate to the Azure portal to delete the resource groups manually.
## Running the scripts separately
@@ -228,7 +253,6 @@ Below are instructions for how to create DevStack, NFS, or AKS cluster separatel
**NOTE: The configuration to link components together (network peering, route table modification, NFS setup, etc...) and the onap-bootstrap will not occur if you run the scripts separately**
-
### DevStack creation
```
@@ -262,7 +286,6 @@ additional options:
```
-
### NFS Creation
```
@@ -292,7 +315,6 @@ additional options:
```
-
### AKS Creation
```
diff --git a/deployment/aks/cloud.conf b/deployment/aks/cloud.conf
index 0865d7ae6..6223670e5 100644
--- a/deployment/aks/cloud.conf
+++ b/deployment/aks/cloud.conf
@@ -58,6 +58,7 @@ DEVSTACK_ADMIN_USER=
DEVSTACK_VNET_NAME=
DEVSTACK_SUBNET_NAME=
DEVSTACK_DISK_SIZE=
+DEVSTACK_BRANCH=
OPENSTACK_USER=
OPENSTACK_PASS=
OPENSTACK_TENANT=
@@ -83,3 +84,5 @@ OOM_BRANCH=
CHART_VERSION=
OOM_OVERRIDES=
DOCKER_REPOSITORY=
+MASTER_PASSWORD=
+
diff --git a/deployment/aks/cloud.conf.example b/deployment/aks/cloud.conf.example
index d3f5087ba..71bc7bf03 100644
--- a/deployment/aks/cloud.conf.example
+++ b/deployment/aks/cloud.conf.example
@@ -58,6 +58,7 @@ DEVSTACK_ADMIN_USER="onap_user"
DEVSTACK_VNET_NAME=$BUILD"-DEVSTACKVNET"
DEVSTACK_SUBNET_NAME=$BUILD"-DEVSTACKSUBNET"
DEVSTACK_DISK_SIZE=512
+DEVSTACK_BRANCH="stable/stein"
OPENSTACK_USER="onap_user"
OPENSTACK_PASS="supersecret"
OPENSTACK_TENANT="onap_project"
@@ -83,3 +84,4 @@ OOM_BRANCH="master"
CHART_VERSION="5.0.0"
OOM_OVERRIDES=""
DOCKER_REPOSITORY="nexus3.onap.org:10001"
+MASTER_PASSWORD=test123
diff --git a/deployment/aks/cloud.sh b/deployment/aks/cloud.sh
index ac471a91b..a1cbfe888 100755
--- a/deployment/aks/cloud.sh
+++ b/deployment/aks/cloud.sh
@@ -17,6 +17,8 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
NO_PROMPT=0
NO_INSTALL=0
+NO_VALIDATE=0
+POST_INSTALL=0
OVERRIDE=0
OPENSTACK_CLI_POD="os-cli-0"
@@ -38,6 +40,8 @@ while test $# -gt 0; do
echo "-f, --no-prompt executes with no prompt for confirmation"
echo "-n, --no-install don't install ONAP"
echo "-o, --override create integration override for robot configuration"
+ echo "-d, --no-validate dont validate pre-reqs before executing deployment"
+ echo "-p, --post-install execute post-install scripts"
echo "-h, --help provide brief overview of script"
echo " "
echo "This script deploys a cloud environment in Azure."
@@ -62,6 +66,14 @@ while test $# -gt 0; do
shift
OVERRIDE=1
;;
+ -d|--no-validate)
+ shift
+ NO_VALIDATE=1
+ ;;
+ -p|--post-install)
+ shift
+ POST_INSTALL=1
+ ;;
*)
echo "Unknown Argument. Try running with --help ."
exit 0
@@ -69,6 +81,13 @@ while test $# -gt 0; do
esac
done
+if [ $NO_VALIDATE = 0 ]; then
+ $DIR/pre_install.sh "$AKS_K8_VERSION" "$LOCATION"
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+fi
+
cat <<EOF
Here are the parameters to be used in this build:
@@ -114,6 +133,7 @@ DEVSTACK_ADMIN_USER = "$DEVSTACK_ADMIN_USER"
DEVSTACK_VNET_NAME = "$DEVSTACK_VNET_NAME"
DEVSTACK_SUBNET_NAME = "$DEVSTACK_SUBNET_NAME"
DEVSTACK_DISK_SIZE = "$DEVSTACK_DISK_SIZE"
+DEVSTACK_BRANCH = "$DEVSTACK_BRANCH"
OPENSTACK_USER = "$OPENSTACK_USER"
OPENSTACK_PASS = "$OPENSTACK_PASS"
OPENSTACK_TENANT = "$OPENSTACK_TENANT"
@@ -139,6 +159,7 @@ OOM_BRANCH = "$OOM_BRANCH"
CHART_VERSION = "$CHART_VERSION"
OOM_OVERRIDES = "$OOM_OVERRIDES"
DOCKER_REPOSITORY = "$DOCKER_REPOSITORY"
+MASTER_PASSWORD = "$MASTER_PASSWORD"
EOF
@@ -190,6 +211,7 @@ $DIR/create_devstack.sh --name "$DEVSTACK_NAME" \
--openstack-password "$OPENSTACK_PASS" \
--openstack-tenant "$OPENSTACK_TENANT" \
--image-list "$IMAGE_LIST" \
+ --devstack-branch "$DEVSTACK_BRANCH" \
--no-prompt
@@ -317,6 +339,7 @@ $DIR/util/create_openstack_cli.sh "$BUILD_DIR/kubeconfig" \
"$BUILD_DIR/openstack_rc" \
"$OPENSTACK_CLI_POD"
+
if [ $OVERRIDE = 1 ]; then
$DIR/util/create_integration_override.sh "$BUILD_DIR" \
@@ -325,52 +348,27 @@ $DIR/util/create_integration_override.sh "$BUILD_DIR" \
"$DOCKER_REPOSITORY" \
"$NFS_PRIVATE_IP" \
"$AKS_PUBLIC_IP_ADDRESS" \
- "$BUILD_DIR/kubeconfig"
+ "$BUILD_DIR/kubeconfig" \
+ "$MASTER_PASSWORD"
fi
+
if [ $NO_INSTALL = 0 ]; then
### Starting OOM install ###
echo "Installing ONAP..."
$DIR/create_onap.sh "$BUILD" \
"$BUILD_DIR/kubeconfig" \
- "$NFS_PRIVATE_IP" \
"$OOM_BRANCH" \
"$BUILD_DIR" \
"$CHART_VERSION" \
- "$OOM_OVERRIDES"
-
-### Starting OOM install ###
-echo "Configuring ONAP..."
-
-cat > "$BUILD_DIR/onap.conf" <<EOF
-export CLLI=$CLLI
-export CLOUD_OWNER=$CLOUD_OWNER
-export CLOUD_REGION=$CLOUD_REGION
-export OPENSTACK_IP=$DEVSTACK_PRIVATE_IP
-export OPENSTACK_USER=$OPENSTACK_USER
-export OPENSTACK_PASS=$OPENSTACK_PASS
-export OPENSTACK_TENANT=$OPENSTACK_TENANT
-export OPENSTACK_REGION=$OPENSTACK_REGION
-export CUSTOMER=$CUSTOMER
-export SUBSCRIBER=$SUBSCRIBER
-export SERVICE_TYPE=$SERVICE_TYPE
-export AZ=$AZ
-export OE=$OE
-export LOB=$LOB
-export PLATFORM=$PLATFORM
-export PROJECT=$PROJECT
-export OS_ID=$OS_ID
-export OS_TENANT_ROLE=$OS_TENANT_ROLE
-export OS_KEYSTONE=$OS_KEYSTONE
-export KUBECONFIG=$BUILD_DIR/kubeconfig
-EOF
-
-$DIR/bootstrap_onap.sh "$BUILD_DIR/onap.conf"
+ "$OOM_OVERRIDES" \
+ "$MASTER_PASSWORD"
fi
+
set +x
cat > "$BUILD_DIR/deployment.notes" <<EOF
@@ -422,3 +420,39 @@ $AKS_PUBLIC_IP_ADDRESS policy.api.simpledemo.onap.org
EOF
cat "$BUILD_DIR/deployment.notes"
+
+
+if [ $POST_INSTALL = 1 ]; then
+
+echo "Executing post installation scripts..."
+sleep 3
+
+cat > "$BUILD_DIR/onap.conf" <<EOF
+export CLLI=$CLLI
+export CLOUD_OWNER=$CLOUD_OWNER
+export CLOUD_REGION=$CLOUD_REGION
+export OPENSTACK_IP=$DEVSTACK_PRIVATE_IP
+export OPENSTACK_USER=$OPENSTACK_USER
+export OPENSTACK_PASS=$OPENSTACK_PASS
+export OPENSTACK_TENANT=$OPENSTACK_TENANT
+export OPENSTACK_REGION=$OPENSTACK_REGION
+export CUSTOMER=$CUSTOMER
+export SUBSCRIBER=$SUBSCRIBER
+export SERVICE_TYPE=$SERVICE_TYPE
+export AZ=$AZ
+export OE=$OE
+export LOB=$LOB
+export PLATFORM=$PLATFORM
+export PROJECT=$PROJECT
+export OS_ID=$OS_ID
+export OS_TENANT_ROLE=$OS_TENANT_ROLE
+export OS_KEYSTONE=$OS_KEYSTONE
+export KUBECONFIG=$BUILD_DIR/kubeconfig
+export NFS_PRIVATE_IP=$NFS_PRIVATE_IP
+export DEVSTACK_PRIVATE_IP=$DEVSTACK_PRIVATE_IP
+export PRIVATE_KEY=$PRIVATE_KEY
+EOF
+
+$DIR/post_install.sh "$BUILD_DIR/onap.conf" "$DIR/cloud.conf"
+
+fi
diff --git a/deployment/aks/create_aks.sh b/deployment/aks/create_aks.sh
index 23f6ae341..cd4680dff 100755
--- a/deployment/aks/create_aks.sh
+++ b/deployment/aks/create_aks.sh
@@ -216,7 +216,9 @@ az aks create --name "$AKS_NAME" \
--dns-service-ip "$AKS_DNS_IP" \
--admin-username "$AKS_ADMIN_USER" \
--ssh-key-value "$PUBLIC_KEY" \
- --vnet-subnet-id "$AKS_SUBNET_ID"
+ --vnet-subnet-id "$AKS_SUBNET_ID" \
+ --vm-set-type "AvailabilitySet" \
+ --load-balancer-sku "basic"
echo ""
AKS_MANAGEMENT_RESOURCE_GROUP_NAME=`az group list --query "[?starts_with(name, 'MC_${AKS_RESOURCE_GROUP_NAME}')].name | [0]" --output tsv`
diff --git a/deployment/aks/create_devstack.sh b/deployment/aks/create_devstack.sh
index fc136f00a..87c76a1a8 100755
--- a/deployment/aks/create_devstack.sh
+++ b/deployment/aks/create_devstack.sh
@@ -35,6 +35,7 @@ OPENSTACK_USER=
OPENSTACK_PASS=
OS_PROJECT_NAME=
IMAGE_LIST=
+DEVSTACK_BRANCH=
function check_required_parameter() {
# arg1 = parameter
@@ -84,6 +85,7 @@ while test $# -gt 0; do
echo "--openstack-username default user name for openstack [optional]"
echo "--openstack-password default password for openstack [optional]"
echo "--openstack-tenant default tenant name for openstack [optional]"
+ echo "--devstack-branch branch to use for devstack install [optional]"
echo ""
exit 0
;;
@@ -176,6 +178,11 @@ while test $# -gt 0; do
OS_PROJECT_NAME=$1
shift
;;
+ --devstack-branch)
+ shift
+ DEVSTACK_BRANCH=$1
+ shift
+ ;;
*)
echo "Unknown Argument $1. Try running with --help."
exit 0
@@ -201,7 +208,7 @@ OPENSTACK_USER=$(check_optional_paramater "$OPENSTACK_USER" "admin")
OPENSTACK_PASS=$(check_optional_paramater "$OPENSTACK_PASS" "secret")
OS_PROJECT_NAME=$(check_optional_paramater "$OS_PROJECT_NAME" "admin")
IMAGE_LIST=$(check_optional_paramater "$IMAGE_LIST" "")
-
+DEVSTACK_BRANCH=$(check_optional_paramater "$DEVSTACK_BRANCH" "master")
if [ $NO_PROMPT = 0 ]; then
read -p "Would you like to proceed? [y/n]" -n 1 -r
@@ -238,6 +245,7 @@ cat > $DATA_FILE <<EOF
package_upgrade: true
packages:
- resolvconf
+ - python3-dev
users:
- default
- name: stack
@@ -253,7 +261,7 @@ write_files:
DEBIAN_FRONTEND=noninteractive sudo apt-get install -qqy git || sudo yum install -qy git
sudo chown stack:stack /home/stack
cd /home/stack
- git clone https://git.openstack.org/openstack-dev/devstack
+ git clone -b $DEVSTACK_BRANCH https://git.openstack.org/openstack-dev/devstack
cd devstack
cat > local.conf <<EOF
[[local|localrc]]
@@ -271,8 +279,8 @@ write_files:
enable_service h-eng h-api h-api-cfn h-api-cw
disable_service tempest
- enable_plugin heat https://git.openstack.org/openstack/heat
- enable_plugin heat-dashboard https://opendev.org/openstack/heat-dashboard
+ enable_plugin heat https://git.openstack.org/openstack/heat $DEVSTACK_BRANCH
+ enable_plugin heat-dashboard https://opendev.org/openstack/heat-dashboard $DEVSTACK_BRANCH
## Neutron options
Q_USE_SECGROUP=True
@@ -292,7 +300,9 @@ write_files:
PUBLIC_BRIDGE=br-ex
OVS_BRIDGE_MAPPINGS=public:br-ex
- [[post-config|\$NOVA_CONF]]
+ USE_PYTHON3=True
+
+ [[post-config|/etc/nova/nova.conf]]
[libvirt]
cpu_mode = host-passthrough
@@ -364,4 +374,3 @@ DEVSTACK_NIC_ID=`az vm nic list --resource-group ${DEVSTACK_RG} --vm-name ${DEVS
### Enabling IP Forwarding on DEVSTACK vnic ###
az network nic update --ids "$DEVSTACK_NIC_ID" --ip-forwarding
-
diff --git a/deployment/aks/create_onap.sh b/deployment/aks/create_onap.sh
index 4abddd64f..958874403 100755
--- a/deployment/aks/create_onap.sh
+++ b/deployment/aks/create_onap.sh
@@ -17,11 +17,11 @@ set -x
BUILD_NAME=$1
KUBECONFIG=$2
-NFS_SERVER_IP=$3
-OOM_BRANCH=$4
-BUILD_DIR=$5
-CHART_VERSION=$6
-OOM_OVERRIDES=$7
+OOM_BRANCH=$3
+BUILD_DIR=$4
+CHART_VERSION=$5
+OOM_OVERRIDES=$6
+MASTER_PASSWORD=$7
pushd .
@@ -30,13 +30,10 @@ cd $BUILD_DIR
export KUBECONFIG="$KUBECONFIG"
kubectl get nodes
-COUNTER=0
-until [ $COUNTER -ge 10 ]; do
-
echo "overriding default storage class for AKS"
kubectl delete sc default
sleep 1
-cat <<EOF | kubectl apply -f -
+cat > "$BUILD_DIR/tmp-sc.yaml" <<EOF
apiVersion: storage.k8s.io/v1
kind: StorageClass
metadata:
@@ -50,14 +47,7 @@ reclaimPolicy: Delete
volumeBindingMode: Immediate
EOF
-if [ $? -eq 0 ]; then
- COUNTER=10
-else
- COUNTER=$((COUNTER +1))
-fi
-
-sleep 5
-done
+kubectl replace -f "$BUILD_DIR/tmp-sc.yaml" --force
git clone -b "$OOM_BRANCH" http://gerrit.onap.org/r/oom --recurse-submodules
@@ -84,13 +74,13 @@ helm repo add stable "https://kubernetes-charts.storage.googleapis.com/"
cp -R helm/plugins/ ~/.helm
-make all
+make all -e SKIP_LINT=TRUE
if [ $? -ne 0 ]; then
echo "Failed building helm charts, exiting..."
exit 1
fi
-make onap
+make onap -e SKIP_LINT=TRUE
if [ $? -ne 0 ]; then
echo "Failed building helm charts, exiting..."
exit 1
@@ -103,7 +93,7 @@ fi
helm repo remove stable
build_name=`echo "$BUILD_NAME" | tr '[:upper:]' '[:lower:]'`
-helm deploy "$build_name" local/onap --version v"$CHART_VERSION" "$OOM_OVERRIDES" --namespace onap "$TEMPLATE_OVERRIDES"
+helm deploy "$build_name" local/onap --version v"$CHART_VERSION" --set "global.masterPassword=$MASTER_PASSWORD" "$OOM_OVERRIDES" --namespace onap "$TEMPLATE_OVERRIDES"
kubectl get pods --namespace onap
diff --git a/deployment/aks/bootstrap_onap.sh b/deployment/aks/post-install/000_bootstrap_onap.sh
index 99661a9b6..9904a54d2 100755
--- a/deployment/aks/bootstrap_onap.sh
+++ b/deployment/aks/post-install/000_bootstrap_onap.sh
@@ -24,7 +24,7 @@ fi
. $CONF
-kubectl create configmap onap-bootstrap --from-file=$DIR/post-install/ --from-file=kubeconfig=$KUBECONFIG --from-file=onap.conf=$CONF
+kubectl create configmap onap-bootstrap --from-file=$DIR/bootstrap/ --from-file=kubeconfig=$KUBECONFIG --from-file=onap.conf=$CONF
cat <<EOF | kubectl apply -f -
apiVersion: v1
diff --git a/deployment/aks/post-install/bootstrap.sh b/deployment/aks/post-install/bootstrap/bootstrap.sh
index 646a502e2..646a502e2 100755
--- a/deployment/aks/post-install/bootstrap.sh
+++ b/deployment/aks/post-install/bootstrap/bootstrap.sh
diff --git a/deployment/aks/post-install/create_az.sh b/deployment/aks/post-install/bootstrap/create_az.sh
index 1e0407c12..1e0407c12 100755
--- a/deployment/aks/post-install/create_az.sh
+++ b/deployment/aks/post-install/bootstrap/create_az.sh
diff --git a/deployment/aks/post-install/create_clli.sh b/deployment/aks/post-install/bootstrap/create_clli.sh
index dec8b8b06..dec8b8b06 100755
--- a/deployment/aks/post-install/create_clli.sh
+++ b/deployment/aks/post-install/bootstrap/create_clli.sh
diff --git a/deployment/aks/post-install/create_cloud_region.sh b/deployment/aks/post-install/bootstrap/create_cloud_region.sh
index dd83cb387..dd83cb387 100755
--- a/deployment/aks/post-install/create_cloud_region.sh
+++ b/deployment/aks/post-install/bootstrap/create_cloud_region.sh
diff --git a/deployment/aks/post-install/create_cloud_region_relationship.sh b/deployment/aks/post-install/bootstrap/create_cloud_region_relationship.sh
index 7acdea9d2..7acdea9d2 100755
--- a/deployment/aks/post-install/create_cloud_region_relationship.sh
+++ b/deployment/aks/post-install/bootstrap/create_cloud_region_relationship.sh
diff --git a/deployment/aks/post-install/create_cloud_region_subscriber_relationship.sh b/deployment/aks/post-install/bootstrap/create_cloud_region_subscriber_relationship.sh
index d37b5653f..d37b5653f 100755
--- a/deployment/aks/post-install/create_cloud_region_subscriber_relationship.sh
+++ b/deployment/aks/post-install/bootstrap/create_cloud_region_subscriber_relationship.sh
diff --git a/deployment/aks/post-install/create_cloud_site.sh b/deployment/aks/post-install/bootstrap/create_cloud_site.sh
index 0253b0e16..daa4dc21d 100755
--- a/deployment/aks/post-install/create_cloud_site.sh
+++ b/deployment/aks/post-install/bootstrap/create_cloud_site.sh
@@ -31,7 +31,7 @@ popd
echo $SO_ENCRYPTED_KEY
-MARIADBPOD_STATUS=`kubectl -n onap get pods | grep mariadb-galera-mariadb-galera | head -1 | awk '{print $3}'`
+MARIADBPOD_STATUS=`kubectl -n onap get pods | grep mariadb-galera | head -1 | awk '{print $3}'`
COUNTER=0
until [ "$MARIADBPOD_STATUS" = "Running" ] || [ $COUNTER -gt 120 ]; do
@@ -40,11 +40,13 @@ COUNTER=$((COUNTER +1))
sleep 10
done
-MARIADBPOD=`kubectl -n onap get pods | grep mariadb-galera-mariadb-galera | head -1 | awk '{print $1}'`
+MARIADBPOD=`kubectl -n onap get pods | grep mariadb-galera | head -1 | awk '{print $1}'`
+MARIADBSECRET=`kubectl -n onap get secrets | grep mariadb-galera-db-root-password | head -1 | awk '{print $1}'`
+MARIADBPASSWORD=`kubectl -n onap get secret $MARIADBSECRET -o jsonpath="{.data.password}" | base64 -d`
COMMAND="INSERT INTO identity_services (id, identity_url, mso_id, mso_pass, admin_tenant, member_role, tenant_metadata, identity_server_type, identity_authentication_type, project_domain_name, user_domain_name) VALUES (\"$OS_ID\", \"http://$OPENSTACK_IP/identity/v3\", \"$OPENSTACK_USER\", \"$SO_ENCRYPTED_KEY\", \"$OPENSTACK_TENANT\", \"$OS_TENANT_ROLE\", 0, \"$OS_KEYSTONE\", \"USERNAME_PASSWORD\", \"default\", \"default\");"
-kubectl -n onap exec -it $MARIADBPOD -- bash -c "mysql -u root --password=secretpassword --database=catalogdb --execute='$COMMAND'"
+kubectl -n onap exec -it $MARIADBPOD -- bash -c "mysql -u root --password='$MARIADBPASSWORD' --database=catalogdb --execute='$COMMAND'"
COMMAND="INSERT INTO cloud_sites (id, region_id, identity_service_id, cloud_version, clli) VALUES (\"$CLOUD_REGION\", \"$OPENSTACK_REGION\", \"$OS_ID\", \"2.5\", \"$CLOUD_REGION\");"
-kubectl -n onap exec -it $MARIADBPOD -- bash -c "mysql -u root --password=secretpassword --database=catalogdb --execute='$COMMAND'"
+kubectl -n onap exec -it $MARIADBPOD -- bash -c "mysql -u root --password='$MARIADBPASSWORD' --database=catalogdb --execute='$COMMAND'"
diff --git a/deployment/aks/post-install/create_customer.sh b/deployment/aks/post-install/bootstrap/create_customer.sh
index 0beebe97a..0beebe97a 100755
--- a/deployment/aks/post-install/create_customer.sh
+++ b/deployment/aks/post-install/bootstrap/create_customer.sh
diff --git a/deployment/aks/post-install/create_lob.sh b/deployment/aks/post-install/bootstrap/create_lob.sh
index a14367ddf..a14367ddf 100755
--- a/deployment/aks/post-install/create_lob.sh
+++ b/deployment/aks/post-install/bootstrap/create_lob.sh
diff --git a/deployment/aks/post-install/create_owning_entity.sh b/deployment/aks/post-install/bootstrap/create_owning_entity.sh
index eba395591..eba395591 100755
--- a/deployment/aks/post-install/create_owning_entity.sh
+++ b/deployment/aks/post-install/bootstrap/create_owning_entity.sh
diff --git a/deployment/aks/post-install/create_platform.sh b/deployment/aks/post-install/bootstrap/create_platform.sh
index 46dc1b4e6..46dc1b4e6 100755
--- a/deployment/aks/post-install/create_platform.sh
+++ b/deployment/aks/post-install/bootstrap/create_platform.sh
diff --git a/deployment/aks/post-install/create_project.sh b/deployment/aks/post-install/bootstrap/create_project.sh
index 7b455e3b0..7b455e3b0 100755
--- a/deployment/aks/post-install/create_project.sh
+++ b/deployment/aks/post-install/bootstrap/create_project.sh
diff --git a/deployment/aks/post-install/create_service_type.sh b/deployment/aks/post-install/bootstrap/create_service_type.sh
index c712c53f6..c712c53f6 100755
--- a/deployment/aks/post-install/create_service_type.sh
+++ b/deployment/aks/post-install/bootstrap/create_service_type.sh
diff --git a/deployment/aks/post-install/create_subscription.sh b/deployment/aks/post-install/bootstrap/create_subscription.sh
index 4bb2e6412..4bb2e6412 100755
--- a/deployment/aks/post-install/create_subscription.sh
+++ b/deployment/aks/post-install/bootstrap/create_subscription.sh
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/__init__.py b/deployment/aks/post_install.sh
index 0f144c21e..c48f9d5c7 100644..100755
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/__init__.py
+++ b/deployment/aks/post_install.sh
@@ -1,19 +1,31 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
+#!/bin/bash
+# Copyright 2019 AT&T Intellectual Property. All rights reserved.
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-# ============LICENSE_END=========================================================
-###
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+
+ONAP_CONF=$1
+CLOUD_CONF=$2
+
+pushd .
+
+cd $DIR/post-install
+
+for filename in *; do
+ if [ -f $filename ]; then
+ ./$filename "$ONAP_CONF" "$CLOUD_CONF"
+ fi
+done
+
+popd \ No newline at end of file
diff --git a/deployment/aks/pre_install.sh b/deployment/aks/pre_install.sh
new file mode 100755
index 000000000..6bbbab9b6
--- /dev/null
+++ b/deployment/aks/pre_install.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+# Copyright 2019 AT&T Intellectual Property. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+KUBE_VERSION=$1
+LOCATION=$2
+
+COMMANDS="kubectl helm make java az"
+
+CLI_MAJOR="2"
+CLI_MINOR="0"
+CLI_INC="75"
+
+function check_requirement() {
+ req=$1
+
+ command -v $1
+ if [ $? -ne 0 ]; then
+ echo "$1 was not found on machine. Please install it before proceeding."
+ exit 1
+ fi
+}
+
+echo "Checking requirements are installed..."
+
+for req in $COMMANDS; do
+ check_requirement $req
+done
+
+echo "Checking K8 version is available in Azure..."
+if [ -z "$KUBE_VERSION" ]; then
+ echo "K8 version not provided in cloud.conf."
+ echo "Update cloud.conf with the desired version."
+ exit 1
+fi
+
+if [ -z "$LOCATION" ]; then
+ echo "Location not provided in cloud.conf."
+ echo "Update cloud.conf with the desired location."
+ exit 1
+fi
+
+supported_k8_versions=`az aks get-versions --location $LOCATION --output json --query 'orchestrators[].orchestratorVersion'`
+echo $supported_k8_versions | grep -q $KUBE_VERSION
+if [ $? -ne 0 ]; then
+ echo "K8 version $KUBE_VERSION is not supported in location $LOCATION"
+ echo "The supported versions are $supported_k8_versions."
+ echo "Update cloud.conf with a supported version."
+ exit 1
+fi
+
+echo "Checking Azure CLI version..."
+installed_cli_version=`az --version | grep -e "^azure-cli" | awk '{print $2}'`
+installed_major=`echo $installed_cli_version | cut -d "." -f 1`
+installed_minor=`echo $installed_cli_version | cut -d "." -f 2`
+installed_inc=`echo $installed_cli_version | cut -d "." -f 3`
+
+if [ $installed_major -lt $CLI_MAJOR ]; then
+ echo "Azure cli version is out of date."
+ echo "Major version required is $CLI_MAJOR but $installed_major is installed."
+ exit 1
+fi
+
+if [ $installed_minor -lt $CLI_MINOR ]; then
+ echo "Azure cli version is out of date."
+ echo "Minor version required is $CLI_INC but $installed_inc is installed."
+ exit 1
+fi
+
+if [ $installed_inc -lt $CLI_INC ]; then
+ echo "Azure cli version is out of date."
+ echo "Incremental version required is $CLI_INC but $installed_inc is installed."
+ exit 1
+fi
+
+echo "Checking kubectl version is compatible with the K8 version..."
+kubectl_version=`kubectl version --client --short | awk '{print $3}'`
+kubectl_major=`echo $kubectl_version | cut -d "." -f 1 | sed 's/v//'`
+kubectl_minor=`echo $kubectl_version | cut -d "." -f 2`
+k8_major=`echo $KUBE_VERSION | cut -d "." -f 1`
+k8_minor=`echo $KUBE_VERSION | cut -d "." -f 2`
+
+if [ $kubectl_major -ne $k8_major ]; then
+ echo "kubectl major version $kubectl_major doesn't equal kubernetes server version $k8_major"
+ exit 1
+fi
+
+minor_difference=`echo "$(($kubectl_minor-$k8_minor))"`
+minor_abs_diff=`echo $minor_difference | tr -d -`
+if [ $minor_abs_diff -gt 1 ]; then
+ echo "The difference between k8 minor version $KUBE_VERSION and kubectl minor version $kubectl_version is greater than 1"
+ echo "Kubernetes supports kubectl within 1 minor version."
+ exit 1
+fi
+
+echo "All requirements satisfied..."
+sleep 1
diff --git a/deployment/aks/util/create_integration_override.sh b/deployment/aks/util/create_integration_override.sh
index 15d853918..88c6f9f3b 100755
--- a/deployment/aks/util/create_integration_override.sh
+++ b/deployment/aks/util/create_integration_override.sh
@@ -22,6 +22,7 @@ DOCKER_REPOSITORY=$4
NFS_IP_ADDR=$5
K8S_01_VM_IP=$6
KUBECONFIG=$7
+MASTER_PASSWORD=$8
. $OPENSTACK_RC
@@ -72,4 +73,4 @@ OS_PROJECT_ID=`kubectl exec $OPENSTACK_CLI_POD -- sh -lc "openstack project show
echo "export OS_PROJECT_ID=$OS_PROJECT_ID" >> "$OPENSTACK_RC"
-$DIR/create_robot_config.sh "$OPENSTACK_RC" "$BUILD_DIR/openstack_params.conf" "$BUILD_DIR" "$DIR/integration_override.template"
+$DIR/create_robot_config.sh "$OPENSTACK_RC" "$BUILD_DIR/openstack_params.conf" "$BUILD_DIR" "$DIR/integration_override.template" "$MASTER_PASSWORD"
diff --git a/deployment/aks/util/create_openstack_cli.sh b/deployment/aks/util/create_openstack_cli.sh
index 01ae88673..3e69ac403 100755
--- a/deployment/aks/util/create_openstack_cli.sh
+++ b/deployment/aks/util/create_openstack_cli.sh
@@ -39,14 +39,14 @@ spec:
args:
- -c
- apk update && \
- apk add python && \
- apk add py-pip && \
- apk add python-dev && \
+ apk add python3 && \
+ apk add py3-pip && \
+ apk add python3-dev && \
apk add gcc && \
apk add musl-dev && \
apk add libffi-dev && \
apk add openssl-dev && \
- pip install python-openstackclient && \
+ pip3 install --no-cache-dir python-openstackclient && \
sh -c 'echo ". /openstack/openstack_rc" >> /root/.profile; while true; do sleep 60; done;'
restartPolicy: Never
volumes:
diff --git a/deployment/aks/util/create_robot_config.sh b/deployment/aks/util/create_robot_config.sh
index dea67b858..9c1e15e1b 100755
--- a/deployment/aks/util/create_robot_config.sh
+++ b/deployment/aks/util/create_robot_config.sh
@@ -17,6 +17,7 @@ OPENSTACK_RC=$1
OPENSTACK_PARAM=$2
BUILD_DIR=$3
INTEGRATION_TEMPLATE=$4
+MASTER_PASSWORD=$5
if [ "$OPENSTACK_RC" == "" ]
then
@@ -71,6 +72,8 @@ sed -ir -e "s/__sec_group__/$OS_SEC_GROUP/" $template
sed -ir -e "s/\${OS_UBUNTU_14_IMAGE}/$OS_UBUNTU_14_IMAGE/" $template
sed -ir -e "s/\${OS_UBUNTU_16_IMAGE}/$OS_UBUNTU_16_IMAGE/" $template
+sed -ir -e "s/\${MASTER_PASSWORD}/$MASTER_PASSWORD/" $template
+
sed -ir -e "s/__nfs_ip_addr__/$NFS_IP_ADDR/" $template
sed -ir -e "s/__k8s_01_vm_ip__/$K8S_01_VM_IP/" $template
diff --git a/deployment/aks/util/integration_override.template b/deployment/aks/util/integration_override.template
index 5f24824a3..108357d6d 100644
--- a/deployment/aks/util/integration_override.template
+++ b/deployment/aks/util/integration_override.template
@@ -1,6 +1,7 @@
global:
repository: __docker_proxy__
pullPolicy: IfNotPresent
+ masterPassword: "${MASTER_PASSWORD}"
robot:
enabled: true
flavor: large
diff --git a/deployment/heat/onap-rke/env/windriver/onap-oom.env b/deployment/heat/onap-rke/env/windriver/onap-oom.env
index 6c02a0d4a..cb749e63c 100644
--- a/deployment/heat/onap-rke/env/windriver/onap-oom.env
+++ b/deployment/heat/onap-rke/env/windriver/onap-oom.env
@@ -2,8 +2,11 @@ parameters:
ubuntu_1804_image: ubuntu-18.04
- apt_proxy: 10.12.5.2:8000
- docker_proxy: 10.12.5.2:5000
+# apt_proxy: 10.12.5.2:8000
+# docker_proxy: 10.12.5.2:5000
+
+ apt_proxy: ""
+ docker_proxy: nexus3.onap.org:10001
nfs_vm_flavor: m1.lm.xlarge
k8s_vm_flavor: m1.lm.xlarge
@@ -22,6 +25,9 @@ parameters:
global:
repository: __docker_proxy__
pullPolicy: IfNotPresent
+ masterPassword: secretpassword
+ addTestingComponents: true
+ cmpv2Enabled: true
robot:
enabled: true
flavor: large
@@ -41,6 +47,7 @@ parameters:
openStackSecurityGroup: "__sec_group__"
openStackOamNetworkCidrPrefix: "10.0"
dcaeCollectorIp: "__k8s_01_vm_ip__"
+ kubernetesExternalIp: "__k8s_01_vm_ip__"
vnfPubKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh"
demoArtifactsVersion: "1.6.0-SNAPSHOT"
demoArtifactsRepoUrl: "https://nexus.onap.org/content/repositories/releases"
@@ -212,4 +219,5 @@ parameters:
enabled: true
cds:
enabled: true
-
+ platform:
+ enabled: true
diff --git a/deployment/heat/onap-rke/nfs_vm_entrypoint.sh b/deployment/heat/onap-rke/nfs_vm_entrypoint.sh
index 842ce4082..7f5463041 100644
--- a/deployment/heat/onap-rke/nfs_vm_entrypoint.sh
+++ b/deployment/heat/onap-rke/nfs_vm_entrypoint.sh
@@ -213,7 +213,7 @@ sleep 10
cd ~/oom/kubernetes/
helm repo add local http://127.0.0.1:8879
helm repo list
-make all
+SKIP_LINT=TRUE make all
helm search -l | grep local
# install helm deploy plugin
@@ -226,9 +226,9 @@ fi
# Deploy ONAP
if [ ! -z "__additional_override__" ]; then
- helm deploy dev local/onap -f ~/oom/kubernetes/onap/resources/environments/public-cloud.yaml -f ~/integration-override.yaml -f __additional_override__ --namespace $NAMESPACE -–verbose
+ helm deploy dev local/onap -f ~/oom/kubernetes/onap/resources/environments/public-cloud.yaml -f ~/integration-override.yaml -f __additional_override__ --namespace $NAMESPACE --verbose
else
- helm deploy dev local/onap -f ~/oom/kubernetes/onap/resources/environments/public-cloud.yaml -f ~/integration-override.yaml --namespace $NAMESPACE -–verbose
+ helm deploy dev local/onap -f ~/oom/kubernetes/onap/resources/environments/public-cloud.yaml -f ~/integration-override.yaml --namespace $NAMESPACE --verbose
fi
diff --git a/deployment/heat/onap-rke/onap-oom.yaml b/deployment/heat/onap-rke/onap-oom.yaml
index effcd9151..d76b5c21d 100644
--- a/deployment/heat/onap-rke/onap-oom.yaml
+++ b/deployment/heat/onap-rke/onap-oom.yaml
@@ -30,12 +30,12 @@ parameters:
oam_ext_network_host_route:
type: json
description: >
- host routes
- "destination": '10.12.0.0/16'
- "nexthop": '10.100.0.1'
+ host routes
+ "destination": '10.12.0.0/16'
+ "nexthop": '10.100.0.1'
default:
- "destination": '10.12.0.0/16'
- "nexthop": '10.100.0.1'
+ "destination": '10.12.0.0/16'
+ "nexthop": '10.100.0.1'
ubuntu_1804_image:
type: string
@@ -83,11 +83,11 @@ parameters:
kubectl_version:
type: string
- default: "1.15.3"
+ default: "1.15.11"
helm_version:
type: string
- default: "2.14.2"
+ default: "2.16.6"
helm_deploy_delay:
type: string
@@ -237,7 +237,7 @@ resources:
template:
get_file: nfs_vm_entrypoint.sh
params:
- __additional_override__ : { get_param: additional_override}
+ __additional_override__: { get_param: additional_override}
__nfs_volume_id__: { get_resource: nfs_volume }
__docker_proxy__: { get_param: docker_proxy }
__apt_proxy__: { get_param: apt_proxy }
@@ -1217,4 +1217,3 @@ outputs:
orch_3_vm_private_ip:
description: The private IP address of the orch_3 instance
value: { get_attr: [orch_3_floating_ip, fixed_ip_address] }
-
diff --git a/deployment/heat/onap-rke/parts/onap-oom-1.yaml b/deployment/heat/onap-rke/parts/onap-oom-1.yaml
index eaa7159c1..13888e95f 100644
--- a/deployment/heat/onap-rke/parts/onap-oom-1.yaml
+++ b/deployment/heat/onap-rke/parts/onap-oom-1.yaml
@@ -25,12 +25,12 @@ parameters:
oam_ext_network_host_route:
type: json
description: >
- host routes
- "destination": '10.12.0.0/16'
- "nexthop": '10.100.0.1'
+ host routes
+ "destination": '10.12.0.0/16'
+ "nexthop": '10.100.0.1'
default:
- "destination": '10.12.0.0/16'
- "nexthop": '10.100.0.1'
+ "destination": '10.12.0.0/16'
+ "nexthop": '10.100.0.1'
ubuntu_1804_image:
type: string
diff --git a/deployment/heat/onap-rke/parts/onap-oom-3.yaml b/deployment/heat/onap-rke/parts/onap-oom-3.yaml
index b70150240..62e3ee29b 100644
--- a/deployment/heat/onap-rke/parts/onap-oom-3.yaml
+++ b/deployment/heat/onap-rke/parts/onap-oom-3.yaml
@@ -9,4 +9,3 @@ outputs:
nfs_vm_private_ip:
description: The private IP address of the nfs instance
value: { get_attr: [nfs_floating_ip, fixed_ip_address] }
-
diff --git a/deployment/heat/onap-rke/policy-staging-image-override.yaml b/deployment/heat/onap-rke/policy-staging-image-override.yaml
new file mode 100644
index 000000000..cf35cea6b
--- /dev/null
+++ b/deployment/heat/onap-rke/policy-staging-image-override.yaml
@@ -0,0 +1,14 @@
+---
+policy:
+ policy-pap:
+ image: onap/policy-pap:2.3-SNAPSHOT-latest
+ policy-xacml-pdp:
+ image: onap/policy-xacml-pdp:2.3-SNAPSHOT-latest
+ policy-drools-pdp:
+ image: onap/policy-pdpd-cl:1.7-SNAPSHOT-latest
+ policy-apex-pdp:
+ image: onap/policy-apex-pdp:2.4-SNAPSHOT-latest
+ policy-api:
+ image: onap/policy-api:2.3-SNAPSHOT-latest
+ policy-distribution:
+ image: onap/policy-distribution:2.4-SNAPSHOT-latest
diff --git a/deployment/heat/onap-rke/scripts/cleanup.sh b/deployment/heat/onap-rke/scripts/cleanup.sh
index b65c8097c..9a35791f5 100755
--- a/deployment/heat/onap-rke/scripts/cleanup.sh
+++ b/deployment/heat/onap-rke/scripts/cleanup.sh
@@ -31,19 +31,25 @@ fi
if [ $COMPONENT == "sdc" ]; then
for keyspace in sdctitan sdcrepository sdcartifact sdccomponent sdcaudit workflow dox zusammen_dox zusammen_workflow ; do
- kubectl -n $NAMESPACE exec dev-cassandra-cassandra-0 -- cqlsh -u cassandra -p cassandra --request-timeout=30 -e "drop keyspace ${keyspace}"
+ kubectl -n $NAMESPACE exec $DEPLOYMENT-cassandra-0 -- cqlsh -u cassandra -p cassandra --request-timeout=30 -e "drop keyspace ${keyspace}"
done
fi
if [ $COMPONENT == "so" ]; then
for database in camundabpmn catalogdb requestdb; do
- kubectl -n $NAMESPACE exec dev-mariadb-galera-mariadb-galera-0 -- mysql -uroot -psecretpassword -e "drop database ${database}"
+ kubectl -n $NAMESPACE exec $DEPLOYMENT-mariadb-galera-0 -- mysql -uroot -psecretpassword -e "drop database ${database}"
done
fi
if [ $COMPONENT == "sdnc" ]; then
for database in sdnctl; do
- kubectl -n $NAMESPACE exec dev-mariadb-galera-mariadb-galera-0 -- mysql -uroot -psecretpassword -e "drop database ${database}"
+ kubectl -n $NAMESPACE exec $DEPLOYMENT-mariadb-galera-0 -- mysql -uroot -psecretpassword -e "drop database ${database}"
+ done
+fi
+
+if [ $COMPONENT == "aai" ]; then
+ for keyspace in aaigraph ; do
+ kubectl -n $NAMESPACE exec dev-cassandra-cassandra-0 -- cqlsh -u cassandra -p cassandra --request-timeout=30 -e "drop keyspace ${keyspace}"
done
fi
diff --git a/deployment/heat/onap-rke/scripts/redeploy-module.sh b/deployment/heat/onap-rke/scripts/redeploy-module.sh
index 749739a06..740a17396 100755
--- a/deployment/heat/onap-rke/scripts/redeploy-module.sh
+++ b/deployment/heat/onap-rke/scripts/redeploy-module.sh
@@ -22,7 +22,7 @@ helm delete $deploy --purge
echo "Wait for 5 seconds before cleaning up deployment resource ..."
sleep 5
echo "Cleaning up deployment resource ..."
-/root/integration/deployment/heat/onap-rke/scripts/cleanup.sh $module
+/root/integration/deployment/heat/onap-rke/scripts/cleanup.sh $module onap dev
echo "Wait for 5 seconds before cleaning up deployment file system ..."
sleep 5
echo "Cleaning up deployment file system ..."
@@ -31,7 +31,7 @@ echo "Wait for 5 seconds before make $module and make onap ..."
sleep 5
echo "making $module and making onap ..."
make $module
-make onap
+make onap
echo "Wait for 5 seconds before deploying $deploy ..."
sleep 5
echo "Deploying $deploy ..."
diff --git a/deployment/heat/onap-rke/sdc-staging-image-override.yaml b/deployment/heat/onap-rke/sdc-staging-image-override.yaml
new file mode 100644
index 000000000..4d44f581a
--- /dev/null
+++ b/deployment/heat/onap-rke/sdc-staging-image-override.yaml
@@ -0,0 +1,13 @@
+---
+sdc:
+ sdc-onboarding-be:
+ image: onap/sdc-onboard-backend:1.6.5
+ onboardingInitImage: onap/sdc-onboard-cassandra-init:1.6.5
+ sdc-fe:
+ image: onap/sdc-frontend:1.6.5
+ sdc-be:
+ image: onap/sdc-backend:1.6.5
+ backendInitImage: onap/sdc-backend-init:1.6.5
+ sdc-cs:
+ image: onap/sdc-cassandra:1.6.5
+ cassandraInitImage: onap/sdc-cassandra-init:1.6.5
diff --git a/deployment/heat/onap-rke/staging-image-override.yaml b/deployment/heat/onap-rke/staging-image-override.yaml
index add178a8f..b5440f169 100644
--- a/deployment/heat/onap-rke/staging-image-override.yaml
+++ b/deployment/heat/onap-rke/staging-image-override.yaml
@@ -1,104 +1,123 @@
+---
+aai:
+ aai-babel:
+ image: onap/babel:1.6-STAGING-latest
+ aai-data-router:
+ image: onap/data-router:1.6-STAGING-latest
+ aai-graphadmin:
+ image: onap/aai-graphadmin:1.6-STAGING-latest
+ aai-modelloader:
+ image: onap/model-loader:1.6-STAGING-latest
+ aai-resources:
+ image: onap/aai-resources:1.6-STAGING-latest
+ aai-schema-service:
+ image: onap/aai-schema-service:1.6-STAGING-latest
+ aai-search-data:
+ image: onap/search-data-service:1.6-STAGING-latest
+ aai-sparky-be:
+ image: onap/sparky-be:1.6-STAGING-latest
+ aai-traversal:
+ image: onap/aai-traversal:1.6-STAGING-latest
appc:
- image: onap/appc-image:1.7.0-SNAPSHOT-latest
+ image: onap/appc-image:1.7.1-SNAPSHOT-latest
appc-cdt:
- image: onap/appc-cdt-image:1.7.0-SNAPSHOT-latest
+ image: onap/appc-cdt-image:1.7.1-SNAPSHOT-latest
cds:
- cds-command-executor:
- image: onap/ccsdk-commandexecutor:0.7.0-STAGING-latest
- cds-blueprints-processor:
- image: onap/ccsdk-blueprintsprocessor:0.7.0-SNAPSHOT-latest
- cds-ui:
- image: onap/ccsdk-cds-ui-server:0.7.0-SNAPSHOT-latest
- cds-sdc-listener:
- image: onap/ccsdk-sdclistener:0.7.0-STAGING-latest
+ cds-command-executor:
+ image: onap/ccsdk-commandexecutor:0.7.0-STAGING-latest
+ cds-blueprints-processor:
+ image: onap/ccsdk-blueprintsprocessor:0.7.0-STAGING-latest
+ cds-ui:
+ image: onap/ccsdk-cds-ui-server:0.7.0-STAGING-latest
+ cds-sdc-listener:
+ image: onap/ccsdk-sdclistener:0.7.0-STAGING-latest
common:
- dgbuilder:
- image: onap/ccsdk-dgbuilder-image:0.7.0-STAGING-latest
+ dgbuilder:
+ image: onap/ccsdk-dgbuilder-image:0.7.2
dcaegen2:
- dcae-bootstrap:
- image: onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container:1.6.5-STAGING-latest
- componentImages:
- prh: onap/org.onap.dcaegen2.services.prh.prh-app-server:1.4.0
+ dcae-bootstrap:
+ image: onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container:1.12.3-STAGING-latest
+ componentImages:
+ prh: onap/org.onap.dcaegen2.services.prh.prh-app-server:1.5.2
+ dcae-dashboard:
+ image: onap/org.onap.ccsdk.dashboard.ccsdk-app-os:1.3.2
nbi:
- image: onap/externalapi/nbi:5.0.1-latest
+ image: onap/externalapi/nbi:6.0.0
policy:
- image: onap/policy-pe:1.6.0-SNAPSHOT
- policy-pap:
- image: onap/policy-pap:2.2.0-SNAPSHOT
- brmsgw:
- image: onap/policy-pe:1.6.0-SNAPSHOT
- policy-xacml-pdp:
- image: onap/policy-xacml-pdp:2.2.0-SNAPSHOT
- drools:
- image: onap/policy-pdpd-cl:1.6.0-SNAPSHOT
- pdp:
- image: onap/policy-pe:1.6.0-SNAPSHOT
- policy-apex-pdp:
- image: onap/policy-apex-pdp:2.3.0-SNAPSHOT
- policy-api:
- image: onap/policy-api:2.2.0-SNAPSHOT
- policy-distribution:
- image: onap/policy-distribution:2.3.0-SNAPSHOT
+ policy-pap:
+ image: onap/policy-pap:2.3-SNAPSHOT-latest
+ policy-xacml-pdp:
+ image: onap/policy-xacml-pdp:2.3-SNAPSHOT-latest
+ policy-drools-pdp:
+ image: onap/policy-pdpd-cl:1.7-SNAPSHOT-latest
+ policy-apex-pdp:
+ image: onap/policy-apex-pdp:2.4-SNAPSHOT-latest
+ policy-api:
+ image: onap/policy-api:2.3-SNAPSHOT-latest
+ policy-distribution:
+ image: onap/policy-distribution:2.4-SNAPSHOT-latest
sdc:
- sdc-onboarding-be:
- image: onap/sdc-onboard-backend:1.6-STAGING-latest
- onboardingInitImage: onap/sdc-onboard-cassandra-init:1.6-STAGING-latest
- sdc-es:
- image: onap/sdc-elasticsearch:1.6-STAGING-latest
- elasticInitImage: onap/sdc-init-elasticsearch:1.6-STAGING-latest
- sdc-fe:
- image: onap/sdc-frontend:1.6-STAGING-latest
- sdc-be:
- image: onap/sdc-backend:1.6-STAGING-latest
- backendInitImage: onap/sdc-backend-init:1.6-STAGING-latest
- sdc-kb:
- image: onap/sdc-kibana:1.6-STAGING-latest
- sdc-cs:
- image: onap/sdc-cassandra:1.6-STAGING-latest
- cassandraInitImage: onap/sdc-cassandra-init:1.6-STAGING-latest
+ sdc-onboarding-be:
+ image: onap/sdc-onboard-backend:1.6.3
+ onboardingInitImage: onap/sdc-onboard-cassandra-init:1.6.3
+ sdc-fe:
+ image: onap/sdc-frontend:1.6.3
+ sdc-be:
+ image: onap/sdc-backend:1.6.3
+ backendInitImage: onap/sdc-backend-init:1.6.3
+ sdc-kb:
+ image: onap/sdc-kibana:1.6.3
+ sdc-cs:
+ image: onap/sdc-cassandra:1.6.3
+ cassandraInitImage: onap/sdc-cassandra-init:1.6.3
+ sdc-wfd-be:
+ image: onap/workflow-backend:1.6.1
+ configInitImage: onap/workflow-init:1.6.1
+ sdc-wfd-fe:
+ image: onap/workflow-frontend:1.6.1
robot:
image: onap/testsuite:1.6.0-STAGING-latest
sdnc:
- image: onap/sdnc-image:1.8.0-STAGING-latest
- sdnc-ansible-server:
- image: onap/sdnc-ansible-server-image:1.8.0-STAGING-latest
- dmaap-listener:
- image: onap/sdnc-dmaap-listener-image:1.8.0-STAGING-latest
- sdnc-portal:
- image: onap/admportal-sdnc-image:1.8.0-STAGING-latest
- ueb-listener:
- image: onap/sdnc-ueb-listener-image:1.8.0-STAGING-latest
+ image: onap/sdnc-image:1.8.0-STAGING-latest
+ sdnc-ansible-server:
+ image: onap/sdnc-ansible-server-image:1.8.0-STAGING-latest
+ dmaap-listener:
+ image: onap/sdnc-dmaap-listener-image:1.8.0-STAGING-latest
+ sdnc-portal:
+ image: onap/admportal-sdnc-image:1.8.0-STAGING-latest
+ ueb-listener:
+ image: onap/sdnc-ueb-listener-image:1.8.0-STAGING-latest
so:
- image: onap/so/api-handler-infra:1.5.3-STAGING-latest
- so-monitoring:
- image: onap/so/so-monitoring:1.5.3-STAGING-latest
- so-bpmn-infra:
- image: onap/so/bpmn-infra:1.5.3-STAGING-latest
- so-openstack-adapter:
- image: onap/so/openstack-adapter:1.5.3-STAGING-latest
- so-catalog-db-adapter:
- image: onap/so/catalog-db-adapter:1.5.3-STAGING-latest
- so-vnfm-adapter:
- image: onap/so/vnfm-adapter:1.5.3-STAGING-latest
- so-sdnc-adapter:
- image: onap/so/sdnc-adapter:1.5.3-STAGING-latest
- so-vfc-adapter:
- image: onap/so/vfc-adapter:1.5.3-STAGING-latest
- so-sdc-controller:
- image: onap/so/sdc-controller:1.5.3-STAGING-latest
- so-request-db-adapter:
- image: onap/so/request-db-adapter:1.5.3-STAGING-latest
+ image: onap/so/api-handler-infra:1.6.1-STAGING-latest
+ so-monitoring:
+ image: onap/so/so-monitoring:1.6.1-STAGING-latest
+ so-bpmn-infra:
+ image: onap/so/bpmn-infra:1.6.1-STAGING-latest
+ so-openstack-adapter:
+ image: onap/so/openstack-adapter:1.6.1-STAGING-latest
+ so-catalog-db-adapter:
+ image: onap/so/catalog-db-adapter:1.6.1-STAGING-latest
+ so-vnfm-adapter:
+ image: onap/so/vnfm-adapter:1.6.1-STAGING-latest
+ so-sdnc-adapter:
+ image: onap/so/sdnc-adapter:1.6.1-STAGING-latest
+ so-vfc-adapter:
+ image: onap/so/vfc-adapter:1.6.1-STAGING-latest
+ so-sdc-controller:
+ image: onap/so/sdc-controller:1.6.1-STAGING-latest
+ so-request-db-adapter:
+ image: onap/so/request-db-adapter:1.6.1-STAGING-latest
vid:
- image: onap/vid:6.0.0-latest
+ image: onap/vid:6.0-STAGING-latest
portal:
- portal-app:
- image: onap/portal-app:2.6.0-STAGING-latest
- portal-mariadb:
- image: onap/portal-db:2.6.0-STAGING-latest
- portal-sdk:
- image: onap/portal-sdk:2.6.0-STAGING-latest
- portal-widget:
- image: onap/portal-wms:2.6.0-STAGING-latest
+ portal-app:
+ image: onap/portal-app:3.2.0-STAGING-latest
+ portal-mariadb:
+ image: onap/portal-db:3.2.0-STAGING-latest
+ portal-sdk:
+ image: onap/portal-sdk:3.2.0-STAGING-latest
+ portal-widget:
+ image: onap/portal-wms:3.2.0-STAGING-latest
diff --git a/deployment/heat/onap-rke/uui-staging-image-override.yaml b/deployment/heat/onap-rke/uui-staging-image-override.yaml
new file mode 100755
index 000000000..7d717f54b
--- /dev/null
+++ b/deployment/heat/onap-rke/uui-staging-image-override.yaml
@@ -0,0 +1,5 @@
+---
+uui:
+ image: onap/usecase-ui:3.0.3
+ uui-server:
+ image: onap/usecase-ui-server:3.0.3
diff --git a/deployment/noheat/README.rst b/deployment/noheat/README.rst
new file mode 100644
index 000000000..a75ad2bb2
--- /dev/null
+++ b/deployment/noheat/README.rst
@@ -0,0 +1,48 @@
+================================
+ ONAP on Openstack without Heat
+================================
+
+Ansible roles and sample playbooks for automatic deployments for system testing and continuous
+integration test flows. These will orchestrate Openstack virtual machines setup for a Kubernetes
+cluster, a Rancher Kubernetes Engine (RKE) deployment, a DevStack deployment and an ONAP deployment.
+
+They will be used in Service Mesh lab.
+
+Prerequisites
+-------------
+
+Infrastructure
+~~~~~~~~~~~~~~
+
+- OpenStack cloud (no Heat support required)
+
+Configuration
+~~~~~~~~~~~~~
+
+- OpenStack ``clouds.yaml`` file
+
+Dependencies
+~~~~~~~~~~~~
+
+- Required python packages (including Ansible) can be found in ``requirements.txt`` pip file.
+ Tested on Python 3.8.10.
+- Ansible required collections & roles can be found in ``requirements.yml`` file for installation
+ with ansible-galaxy tool.
+
+Expected output
+---------------
+
+Ephemeral (disposable) ONAP instance.
+
+Running
+-------
+
+There are 4 playbooks available:
+
+- infa-openstack/ansible/create.yml: creates and prepares OpenStack VMs, generates inventory.
+ Must be run as a first playbook. Run on your machine.
+- devstack/ansible/create.yml: deploys Devstack on appropriate VM. Run on jumphost VM (operator0).
+- cluster-rke/ansible/create.yml: deploys NFS, k8s, helm charts and ONAP. Run on jumphost VM.
+- deploy-all.yml: runs above playbooks. Run on your machine.
+
+User may run deploy-all.yml or manually run infra-openstack, devstack and cluster-rke playbooks.
diff --git a/deployment/noheat/cluster-rke/ansible/create.yml b/deployment/noheat/cluster-rke/ansible/create.yml
new file mode 100644
index 000000000..920db966d
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/create.yml
@@ -0,0 +1,63 @@
+---
+- name: Update packages
+ hosts: operator
+ become: true
+ tasks:
+ - name: Update ca-certificates
+ package:
+ name: ca-certificates
+ state: latest
+- name: Install NFS
+ hosts: all
+ become: yes
+ roles:
+ - role: setup_nfs
+- name: Set up bastion node for ONAP Docker registry
+ hosts: "nfs0"
+ become: yes
+ roles:
+ - role: create_bastion
+ destination: "{{ nexus }}"
+- name: Add bastion information to the cluster nodes
+ hosts: control,workers
+ become: yes
+ tasks:
+ - name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ hostvars['nfs0']['ansible_default_ipv4']['address'] }} {{ item }}"
+ loop:
+ - "nexus3.onap.org"
+- name: Install Docker
+ become: yes
+ hosts: operator,control,workers
+ roles:
+ - role: setup_docker
+- name: Deploy k8s
+ hosts: operator0
+ vars_files:
+ - ~/common-vars.yml
+ roles:
+ - role: setup_k8s
+- name: Download OOM
+ hosts: operator0
+ tasks:
+ - name: Clone OOM
+ git:
+ repo: "https://git.onap.org/oom"
+ dest: "{{ oom_dir }}"
+ version: "{{ onap_branch }}"
+- name: Install Helm
+ hosts: operator0
+ roles:
+ - role: setup_helm
+- name: Install metallb, cert-manager and prometheus
+ hosts: operator0
+ gather_facts: false
+ roles:
+ - role: deps
+- name: Deploy sm-onap
+ hosts: operator0
+ gather_facts: false
+ roles:
+ - role: oom
diff --git a/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap b/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap
new file mode 100644
index 000000000..9fb3313ee
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap
@@ -0,0 +1,11 @@
+---
+nexus:
+ address: 199.204.45.137
+ port: 10001
+oom_dir: "{{ ansible_user_dir }}/oom"
+onap_branch: "master"
+override_file: "{{ oom_dir }}/kubernetes/onap/resources/overrides/sm-onap.yaml"
+integration_dir: "{{ ansible_user_dir }}/integration"
+prometheus_enabled: true
+metallb_enabled: true
+istio_enabled: true
diff --git a/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml b/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml
new file mode 120000
index 000000000..206526103
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml
@@ -0,0 +1 @@
+../all.yml.sm-onap \ No newline at end of file
diff --git a/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml
new file mode 100644
index 000000000..8189968c4
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml
@@ -0,0 +1,35 @@
+- name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ ansible_default_ipv4.address + ' ' + ansible_hostname }}"
+
+- name: Enable IP forwarding
+ ansible.posix.sysctl:
+ name: net.ipv4.ip_forward
+ value: '1'
+ sysctl_set: yes
+
+- name: Create PREROUTING rule
+ ansible.builtin.iptables:
+ table: nat
+ chain: PREROUTING
+ protocol: tcp
+ destination_port: "{{ destination.port }}"
+ jump: DNAT
+ to_destination: "{{ destination.address }}:{{ destination.port }}"
+
+- name: Create OUTPUT rule
+ ansible.builtin.iptables:
+ table: nat
+ chain: OUTPUT
+ protocol: tcp
+ destination: "{{ ansible_default_ipv4.address }}"
+ destination_port: "{{ destination.port }}"
+ jump: DNAT
+ to_destination: "{{ destination.address }}"
+
+- name: Enable masquerading
+ ansible.builtin.iptables:
+ table: nat
+ chain: POSTROUTING
+ jump: MASQUERADE
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml
new file mode 100644
index 000000000..6a3594628
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml
@@ -0,0 +1,11 @@
+---
+cert_manager_version: "1.5.5"
+prometheus_enabled: true
+prometheus_version: "19.3.0"
+metallb_enabled: true
+metallb_version: "0.13.7"
+metallb_addresses: "192.168.1.129-192.168.1.255"
+istio_enabled: true
+istio_version: "1.14.5"
+strimzi_enabled: true
+strimzi_version: "0.31.1"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml
new file mode 100644
index 000000000..8edcf09c5
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml
@@ -0,0 +1,41 @@
+---
+apiVersion: networking.istio.io/v1alpha3
+kind: EnvoyFilter
+metadata:
+ name: header-casing
+ namespace: istio-config
+spec:
+ configPatches:
+ - applyTo: CLUSTER
+ match:
+ context: ANY
+ patch:
+ operation: MERGE
+ value:
+ typed_extension_protocol_options:
+ envoy.extensions.upstreams.http.v3.HttpProtocolOptions:
+ '@type': type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions
+ use_downstream_protocol_config:
+ http_protocol_options:
+ header_key_format:
+ stateful_formatter:
+ name: preserve_case
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.http.header_formatters.preserve_case.v3.PreserveCaseFormatterConfig
+ - applyTo: NETWORK_FILTER
+ match:
+ listener:
+ filterChain:
+ filter:
+ name: envoy.filters.network.http_connection_manager
+ patch:
+ operation: MERGE
+ value:
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
+ http_protocol_options:
+ header_key_format:
+ stateful_formatter:
+ name: preserve_case
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.http.header_formatters.preserve_case.v3.PreserveCaseFormatterConfig
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml
new file mode 100644
index 000000000..5a14d93ce
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml
@@ -0,0 +1,17 @@
+---
+- name: Check if cert-manager manifest file is present
+ stat:
+ path: /tmp/cert-manager.yaml
+ register: cm_manifest
+
+- name: Download cert-manager
+ get_url:
+ url: "https://github.com/jetstack/cert-manager/releases/download/v{{ cert_manager_version }}/cert-manager.yaml"
+ dest: "/tmp"
+ mode: '0400'
+ when: not cm_manifest.stat.exists
+
+- name: Deploy cert-manager
+ kubernetes.core.k8s:
+ src: /tmp/cert-manager.yaml
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml
new file mode 100644
index 000000000..89b848636
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml
@@ -0,0 +1,55 @@
+---
+- name: Add Istio Helm repository
+ kubernetes.core.helm_repository:
+ name: istio
+ repo_url: https://istio-release.storage.googleapis.com/charts
+
+- name: Create Istio config namespace
+ kubernetes.core.k8s:
+ name: istio-config
+ api_version: v1
+ kind: Namespace
+ state: present
+
+- name: Deploy Istio base chart
+ kubernetes.core.helm:
+ name: istio-base
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/base
+ release_namespace: istio-system
+ create_namespace: true
+
+- name: Deploy Istio discovery chart
+ kubernetes.core.helm:
+ name: istiod
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/istiod
+ release_namespace: istio-system
+ wait: true
+ release_values:
+ meshConfig:
+ rootNamespace: istio-config
+
+- name: Apply workaround for SDC case sensivity issue
+ kubernetes.core.k8s:
+ state: present
+ definition: "{{ lookup('file', 'envoyfilter-case.yml') | from_yaml }}"
+
+- name: Create Istio ingress gateway namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: istio-ingress
+ labels:
+ istio-injection: enabled
+
+- name: Deploy Istio ingress gateway chart
+ kubernetes.core.helm:
+ name: istio-ingress
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/gateway
+ release_namespace: istio-ingress
+ wait: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml
new file mode 100644
index 000000000..32adc3310
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml
@@ -0,0 +1,19 @@
+---
+- name: Setup cert-manager
+ include_tasks: cert-manager.yml
+
+- name: Setup strimzi
+ include_tasks: strimzi.yml
+ when: strimzi_enabled
+
+- name: Setup MetalLB
+ include_tasks: metallb.yml
+ when: metallb_enabled
+
+- name: Setup Prometheus
+ include_tasks: prometheus.yml
+ when: prometheus_enabled
+
+- name: Setup Istio
+ include_tasks: istio.yml
+ when: istio_enabled
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml
new file mode 100644
index 000000000..95547ec32
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml
@@ -0,0 +1,51 @@
+---
+- name: Add MetalLB Helm repository
+ kubernetes.core.helm_repository:
+ name: metallb
+ repo_url: https://metallb.github.io/metallb
+
+- name: Create MetalLB namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: metallb-system
+ labels:
+ pod-security.kubernetes.io/enforce: privileged
+ pod-security.kubernetes.io/audit: privileged
+ pod-security.kubernetes.io/warn: privileged
+- name: Deploy MetalLB charts
+ kubernetes.core.helm:
+ name: metallb
+ chart_version: "{{ metallb_version }}"
+ chart_ref: metallb/metallb
+ release_namespace: metallb-system
+ wait: true
+
+- name: Create MetalLB IP Address Pool Resource
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: metallb.io/v1beta1
+ kind: IPAddressPool
+ metadata:
+ name: onap-pool
+ namespace: metallb-system
+ spec:
+ addresses:
+ - "{{ metallb_addresses }}"
+ register: result
+ retries: 1
+ until: result['failed'] == false
+
+- name: Create MetalLB L2 Advertisement Resource
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: metallb.io/v1beta1
+ kind: L2Advertisement
+ metadata:
+ name: onap
+ namespace: metallb-system
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml
new file mode 100644
index 000000000..e046cddb8
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml
@@ -0,0 +1,13 @@
+---
+- name: Add prometheus Helm repository
+ kubernetes.core.helm_repository:
+ name: prometheus
+ repo_url: https://prometheus-community.github.io/helm-charts
+
+- name: Deploy Prometheus charts
+ kubernetes.core.helm:
+ name: prometheus
+ chart_version: "{{ prometheus_version }}"
+ chart_ref: prometheus/kube-prometheus-stack
+ release_namespace: prometheus
+ create_namespace: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml
new file mode 100644
index 000000000..fd5828b19
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml
@@ -0,0 +1,15 @@
+---
+- name: Add Strimzi Helm repository
+ kubernetes.core.helm_repository:
+ name: strimzi
+ repo_url: https://strimzi.io/charts
+
+- name: Deploy Strimzi chart
+ kubernetes.core.helm:
+ name: strimzi-kafka-operator
+ chart_version: "{{ strimzi_version }}"
+ chart_ref: strimzi/strimzi-kafka-operator
+ release_namespace: strimzi-system
+ create_namespace: true
+ values:
+ watchAnyNamespace: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml
new file mode 100644
index 000000000..035fb01f5
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml
@@ -0,0 +1,66 @@
+---
+- name: Build OOM charts
+ make:
+ chdir: "{{ oom_dir }}/kubernetes"
+ target: all
+ params:
+ SKIP_LINT: "TRUE"
+
+- name: Build ONAP charts
+ make:
+ chdir: "{{ oom_dir }}/kubernetes"
+ target: onap
+ params:
+ SKIP_LINT: "TRUE"
+
+- name: Create ONAP namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: onap
+ labels:
+ istio-injection: enabled
+ when: istio_enabled
+
+- name: Create ONAP namespace
+ kubernetes.core.k8s:
+ name: onap
+ api_version: v1
+ kind: Namespace
+ state: present
+ when: not istio_enabled
+
+- name: Get encryption key
+ command: cat "{{ oom_dir }}/kubernetes/so/resources/config/mso/encryption.key"
+ register: encryption_key
+ when: encryption_key is undefined
+- name: Clone integration project
+ git:
+ repo: "https://git.onap.org/integration"
+ dest: "{{ integration_dir }}"
+ version: "{{ onap_branch }}"
+- name: Compile encryption tool
+ command:
+ cmd: javac Crypto.java
+ chdir: "{{ integration_dir }}/deployment/heat/onap-rke/scripts"
+ creates: "{{ integration_dir }}/deployment/heat/onap-rke/scripts/Crypto.class"
+- name: Encrypt password
+ command:
+ cmd: java Crypto "{{ openstack_passwd }}" "{{ encryption_key.stdout }}"
+ chdir: "{{ integration_dir }}/deployment/heat/onap-rke/scripts"
+ register: encrypted_password
+ when: encrypted_password is undefined
+
+- name: Deploy sm-onap
+ command:
+ cmd: "helm deploy onap local/onap --namespace onap --set global.masterPassword=scrtPasswd -f {{ override_file }}"
+ environment:
+ OPENSTACK_USER_NAME: "{{ openstack_username }}"
+ OPENSTACK_REGION: "{{ openstack_region }}"
+ OPENSTACK_KEYSTONE_URL: "http://{{ hostvars['openstack0']['ansible_default_ipv4']['address'] }}:5000/3.0"
+ OPENSTACK_TENANT_NAME: "{{ openstack_tenant }}"
+ OPENSTACK_ENCTYPTED_PASSWORD: "{{ encrypted_password.stdout }}"
+ changed_when: false
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml
new file mode 100644
index 000000000..cafa274a1
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml
@@ -0,0 +1,3 @@
+---
+docker_version: "20.10.21"
+local_user: "ubuntu"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml
new file mode 100644
index 000000000..3627303e6
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml
@@ -0,0 +1,5 @@
+---
+- name: restart docker
+ service:
+ name: docker
+ state: restarted
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml
new file mode 100644
index 000000000..12e13f47b
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml
@@ -0,0 +1,26 @@
+---
+- name: Setup Docker repo and packages
+ include_tasks: packages.yml
+
+- name: Add user to docker group
+ user:
+ name: "{{ local_user }}"
+ groups: docker
+ append: yes
+ when: local_user is defined
+
+- name: Make sure Docker is started and enabled
+ service:
+ name: docker
+ state: started
+ enabled: yes
+
+- name: Configure Docker
+ copy:
+ dest: /etc/docker/daemon.json
+ content: "{{ docker_config | to_nice_json }}"
+ mode: 0600
+ backup: true
+ when: docker_config is defined
+ notify:
+ - restart docker
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml
new file mode 100644
index 000000000..814dd285a
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml
@@ -0,0 +1,41 @@
+---
+- name: Install deps
+ apt:
+ name: "{{ item }}"
+ state: present
+ with_items:
+ - apt-transport-https
+ - ca-certificates
+ - curl
+ - software-properties-common
+
+- name: Add Docker repo key
+ apt_key:
+ url: "https://download.docker.com/linux/{{ ansible_distribution | lower }}/gpg"
+ state: present
+
+- name: Add Docker repo
+ apt_repository:
+ repo: "deb https://download.docker.com/linux/{{ ansible_distribution | lower }} {{ ansible_distribution_release | lower }} stable"
+ state: present
+ update_cache: yes
+
+- name: Find exact Docker version
+ shell: "set -o pipefail && apt-cache madison docker-ce | grep {{ docker_version }} | head -n 1 | cut -d ' ' -f 4"
+ args:
+ executable: "/bin/bash"
+ register: docker_pkg_version
+ changed_when: false
+
+- name: install Docker
+ apt:
+ name: "{{ item }}"
+ state: present
+ allow_downgrade: true
+ with_items:
+ - "docker-ce={{ docker_pkg_version.stdout }}"
+ - "docker-ce-cli={{ docker_pkg_version.stdout }}"
+
+- name: Lock docker version
+ command: apt-mark hold docker-ce docker-ce-cli
+ changed_when: false
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml
new file mode 100644
index 000000000..6879cca7e
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml
@@ -0,0 +1,8 @@
+---
+# docker_config will be converted to json and placed as a /etc/docker/daemon.json
+#docker_config:
+# insecure-registries:
+# - "192.168.1.1:5000"
+# - "192.168.1.2:5000"
+# registry-mirrors:
+# - "http://192.168.1.1:5000"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml
new file mode 100644
index 000000000..f0416f9df
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml
@@ -0,0 +1,7 @@
+---
+helm_version: "3.8.2"
+helm_cm_push_version: "0.10.3"
+chartmuseum_version: "0.15.0"
+chartmuseum_port: "8879"
+chartmuseum_dir: "{{ ansible_user_dir }}/helm3-storage"
+local_user: "{{ ansible_user_id }}"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml
new file mode 100644
index 000000000..0847b8182
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml
@@ -0,0 +1,5 @@
+---
+- name: Reload systemd
+ become: yes
+ systemd:
+ daemon-reload: yes
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml
new file mode 100644
index 000000000..71f43ad0a
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml
@@ -0,0 +1,45 @@
+---
+- name: Check if chartmuseum is installed
+ stat:
+ path: /usr/local/bin/chartmuseum
+ register: cm_bin
+
+- name: Check if chartmuseum is installed
+ stat:
+ path: /tmp/get-chartmuseum
+ register: cm_install
+
+- name: Download chartmuseum install script
+ get_url:
+ url: "https://raw.githubusercontent.com/helm/chartmuseum/v{{ chartmuseum_version }}/scripts/get-chartmuseum"
+ dest: "/tmp/"
+ mode: '700'
+ when: not cm_install.stat.exists
+
+- name: Install chartmuseum
+ become: yes
+ command:
+ cmd: "./get-chartmuseum -v v{{ chartmuseum_version }}"
+ chdir: "/tmp/"
+ when: not cm_bin.stat.exists
+
+- name: Create chartmuseum local storage
+ file:
+ name: "{{ chartmuseum_dir }}"
+ state: directory
+ mode: '0755'
+
+- name: Install chartmuseum service file
+ become: yes
+ template:
+ src: "chartmuseum.service.j2"
+ dest: "/etc/systemd/system/chartmuseum.service"
+ mode: '0444'
+ notify: Reload systemd
+
+- name: Start and enable chartmuseum
+ become: yes
+ service:
+ name: "chartmuseum"
+ state: started
+ enabled: yes
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml
new file mode 100644
index 000000000..88ba29f64
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml
@@ -0,0 +1,35 @@
+---
+- name: Download helm
+ get_url:
+ url: "https://get.helm.sh/helm-v{{ helm_version }}-linux-amd64.tar.gz"
+ dest: "/tmp"
+
+- name: Unarchive helm
+ unarchive:
+ src: "/tmp/helm-v{{ helm_version }}-linux-amd64.tar.gz"
+ dest: "/tmp/"
+ remote_src: yes
+
+- name: Copy helm binary to $PATH
+ become: yes
+ copy:
+ src: "/tmp/linux-amd64/helm"
+ dest: "/usr/local/bin/"
+ remote_src: yes
+ mode: '0555'
+
+- name: Install Helm Push plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "https://github.com/chartmuseum/helm-push.git"
+ plugin_version: "{{ helm_cm_push_version }}"
+ state: present
+
+- name: Install Helm OOM Deploy plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "{{ oom_dir }}/kubernetes/helm/plugins/deploy"
+ state: present
+
+- name: Install Helm OOM Undeploy plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "{{ oom_dir }}/kubernetes/helm/plugins/undeploy"
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml
new file mode 100644
index 000000000..94abf6ea8
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml
@@ -0,0 +1,12 @@
+---
+- name: Setup helm
+ include_tasks: helm.yml
+
+- name: Setup chartmuseum
+ include_tasks: cm.yml
+
+- name: Add local Helm repository
+ kubernetes.core.helm_repository:
+ name: "local"
+ repo_url: "http://127.0.0.1:{{ chartmuseum_port }}"
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2
new file mode 100644
index 000000000..78d7967f9
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2
@@ -0,0 +1,13 @@
+[Unit]
+Description=chartmuseum
+Requires=network-online.target
+After=network-online.target
+
+[Service]
+ExecStart=/usr/local/bin/chartmuseum --port "{{ chartmuseum_port }}" --storage local --storage-local-rootdir "{{ chartmuseum_dir }}"
+ExecStop=/usr/local/bin/chartmuseum step-down
+User={{ local_user }}
+Restart=always
+
+[Install]
+WantedBy=multi-user.target
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml
new file mode 100644
index 000000000..021aae0ee
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml
@@ -0,0 +1,3 @@
+---
+rke_version: "1.3.15"
+rke_k8s_version: "v{{ k8s_version }}-rancher1-1"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml
new file mode 100644
index 000000000..f9912ebdf
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml
@@ -0,0 +1,13 @@
+---
+- name: Check if kubectl is available
+ stat:
+ path: "/usr/local/bin/kubectl"
+ register: kubectl_bin
+
+- name: Get kubectl
+ become: yes
+ get_url:
+ url: "https://dl.k8s.io/release/v{{ k8s_version }}/bin/linux/amd64/kubectl"
+ dest: "/usr/local/bin/"
+ mode: '0555'
+ when: not kubectl_bin.stat.exists
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml
new file mode 100644
index 000000000..7d3ba0096
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml
@@ -0,0 +1,16 @@
+---
+- name: Deploy kubernetes with RKE
+ include_tasks: rke.yml
+
+- name: Create k8s directory
+ file:
+ name: "{{ ansible_user_dir }}/.kube"
+ state: directory
+ mode: '0700'
+
+- name: Set k8s config
+ command: "mv {{ ansible_user_dir }}/kube_config_cluster.yml {{ ansible_user_dir }}/.kube/config"
+ when: rke_run and rke_run.rc == 0
+
+- name: Install kubectl
+ include_tasks: kubectl.yml
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml
new file mode 100644
index 000000000..b253e711d
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml
@@ -0,0 +1,25 @@
+---
+- name: Check if RKE is available
+ stat:
+ path: "{{ ansible_user_dir }}/rke"
+ register: rke_bin
+
+- name: Download RKE
+ get_url:
+ url: "https://github.com/rancher/rke/releases/download/v{{ rke_version }}/rke_linux-amd64"
+ dest: "{{ ansible_user_dir }}/rke"
+ mode: '0700'
+ when: not rke_bin.stat.exists
+
+- name: Prepare RKE configuration
+ template:
+ src: "cluster.yml.j2"
+ dest: "{{ ansible_user_dir }}/cluster.yml"
+ mode: '0400'
+
+- name: Run RKE
+ command:
+ cmd: "./rke up"
+ chdir: "{{ ansible_user_dir }}"
+ creates: "{{ ansible_user_dir }}/kube_config_cluster.yml"
+ register: rke_run
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2
new file mode 100644
index 000000000..3b83fd466
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2
@@ -0,0 +1,52 @@
+# An example of an HA Kubernetes cluster for ONAP
+nodes:
+{% for host in (groups['control'] | list() ) %}
+- address: "{{ hostvars[host]['ansible_host'] }}"
+ port: "22"
+ role:
+ - controlplane
+ - etcd
+ hostname_override: "onap-control-{{ loop.index }}"
+ user: {{ ansible_user_id }}
+ ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+{% endfor %}
+{% for host in (groups['workers'] | list()) %}
+- address: "{{ hostvars[host]['ansible_host'] }}"
+ port: "22"
+ role:
+ - worker
+ hostname_override: "onap-k8s-{{ loop.index }}"
+ user: {{ ansible_user_id }}
+ ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+{% endfor %}
+services:
+ kube-api:
+ service_cluster_ip_range: 10.43.0.0/16
+ pod_security_policy: false
+ always_pull_images: false
+ kube-controller:
+ cluster_cidr: 10.42.0.0/16
+ service_cluster_ip_range: 10.43.0.0/16
+ kubelet:
+ cluster_domain: cluster.local
+ cluster_dns_server: 10.43.0.10
+ fail_swap_on: false
+network:
+ plugin: canal
+authentication:
+ strategy: x509
+ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+ssh_agent_auth: false
+authorization:
+ mode: rbac
+ignore_docker_version: false
+kubernetes_version: "{{ rke_k8s_version }}"
+private_registries:
+- url: nexus3.onap.org:10001
+ user: docker
+ password: docker
+ is_default: true
+cluster_name: "onap"
+restore:
+ restore: false
+ snapshot_name: ""
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml
new file mode 100644
index 000000000..da66bfb38
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml
@@ -0,0 +1,2 @@
+---
+nfs_mountpoint: "/dockerdata-nfs"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml
new file mode 100644
index 000000000..2d8d0b006
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml
@@ -0,0 +1,59 @@
+- name: Install NFS common
+ apt:
+ name: nfs-common
+ state: present
+ when: nfs_role is defined
+
+- name: Install NFS server
+ apt:
+ name: nfs-kernel-server
+ state: present
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Umount
+ ansible.posix.mount:
+ path: "{{ nfs_mountpoint }}"
+ state: unmounted
+ ignore_errors: yes
+
+- name: Remove leftovers
+ file:
+ path: "{{ nfs_mountpoint }}"
+ state: absent
+ when: nfs_role is defined
+
+- name: Create dockerdata directory
+ file:
+ path: "{{ nfs_mountpoint }}"
+ state: directory
+ mode: '0777'
+ owner: nobody
+ group: nogroup
+ when: nfs_role is defined
+
+- name: Configure NFS server
+ template:
+ src: "exports.j2"
+ dest: "/etc/exports"
+ owner: root
+ group: root
+ mode: '0644'
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Restart NFS server
+ service:
+ name: nfs-kernel-server
+ state: restarted
+ enabled: yes
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Configure NFS clients
+ mount:
+ path: "{{ nfs_mountpoint }}"
+ src: "{{ hostvars[groups['nfs'][0]]['ansible_default_ipv4']['address'] }}:{{ nfs_mountpoint }}"
+ fstype: nfs
+ opts: auto,nofail,noatime,nolock,intr,tcp,actimeo=1800
+ dump: 0
+ passno: 0
+ state: mounted
+ when: nfs_role is defined and nfs_role == "client"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2
new file mode 100644
index 000000000..6a5a825c6
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2
@@ -0,0 +1 @@
+{{ nfs_mountpoint }} {% for host in (groups['control'] | union(groups['workers'])) %} {{ hostvars[host]['ansible_default_ipv4']['address'] }}(rw,sync,no_root_squash,no_subtree_check){% endfor %}
diff --git a/deployment/noheat/common-vars.yml b/deployment/noheat/common-vars.yml
new file mode 100644
index 000000000..f7265f4a6
--- /dev/null
+++ b/deployment/noheat/common-vars.yml
@@ -0,0 +1,2 @@
+---
+k8s_version: "1.23.10"
diff --git a/deployment/noheat/deploy-all.yml b/deployment/noheat/deploy-all.yml
new file mode 100644
index 000000000..2ea069525
--- /dev/null
+++ b/deployment/noheat/deploy-all.yml
@@ -0,0 +1,9 @@
+---
+- name: Create infastructure
+ import_playbook: infra-openstack/ansible/create.yml
+- hosts: operator0
+ tasks:
+ - name: Deploy Devstack
+ ansible.builtin.command: ansible-playbook -i {{ ansible_user_dir }}/inventory.ini {{ ansible_user_dir }}/devstack/ansible/create.yml
+ - name: Deploy k8s & ONAP
+ ansible.builtin.command: ansible-playbook -i {{ ansible_user_dir }}/inventory.ini {{ ansible_user_dir }}/deploy/cluster-rke/ansible/create.yml
diff --git a/deployment/noheat/devstack/ansible/create.yml b/deployment/noheat/devstack/ansible/create.yml
new file mode 100644
index 000000000..f11fe1194
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/create.yml
@@ -0,0 +1,43 @@
+---
+- name: Deploy Devstack
+ hosts: "openstack*"
+ tasks:
+ - name: Update Devstack hosts
+ become: true
+ ansible.builtin.apt:
+ upgrade: full
+ update_cache: true
+ autoremove: true
+ autoclean: true
+
+ - name: Reboot OS
+ become: true
+ ansible.builtin.reboot:
+
+ - name: Clone Devstack
+ ansible.builtin.git:
+ repo: "https://opendev.org/openstack/devstack"
+ dest: "{{ devstack_dir }}"
+ version: "{{ devstack_version }}"
+
+ - name: Copy local.conf
+ ansible.builtin.template:
+ src: "local.conf.j2"
+ dest: "{{ devstack_dir }}/local.conf"
+ mode: '0600'
+
+ - name: Run devstack setup script
+ ansible.builtin.command:
+ chdir: "{{ devstack_dir }}"
+ cmd: "./stack.sh"
+ creates: /opt/stack
+
+ - name: Run devstack setup script
+ ansible.builtin.file:
+ path: "{{ devstack_dir }}"
+ state: absent
+
+ handlers:
+ - name: Reboot OS
+ become: true
+ ansible.builtin.reboot:
diff --git a/deployment/noheat/devstack/ansible/group_vars/all/all.yml b/deployment/noheat/devstack/ansible/group_vars/all/all.yml
new file mode 100644
index 000000000..b2d63c672
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/group_vars/all/all.yml
@@ -0,0 +1,3 @@
+---
+devstack_dir: "{{ ansible_user_dir }}/devstack"
+devstack_version: "stable/yoga"
diff --git a/deployment/noheat/devstack/ansible/templates/local.conf.j2 b/deployment/noheat/devstack/ansible/templates/local.conf.j2
new file mode 100644
index 000000000..0bfa3bba9
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/templates/local.conf.j2
@@ -0,0 +1,5 @@
+[[local|localrc]]
+ADMIN_PASSWORD="{{ openstack_passwd }}"
+DATABASE_PASSWORD=$ADMIN_PASSWORD
+RABBIT_PASSWORD=$ADMIN_PASSWORD
+SERVICE_PASSWORD=$ADMIN_PASSWORD
diff --git a/deployment/noheat/infra-openstack/HACKING.rst b/deployment/noheat/infra-openstack/HACKING.rst
new file mode 100644
index 000000000..dcdc2062e
--- /dev/null
+++ b/deployment/noheat/infra-openstack/HACKING.rst
@@ -0,0 +1,30 @@
+=========================
+ Development environment
+=========================
+
+This environment focuses on interactions with OpenStack (here: DevStack) instance. Changes can be
+made from host machine but additional guest ("operator") is provided for developers' convenience.
+
+Environment on "operator" machine is already set up and can be accessed by:
+
+.. code-block:: shell
+
+ $ vagrant ssh operator
+
+Provided ``clouds.yaml`` file differs slightly from the one that can be obtained with following
+steps:
+
+#. Open OpenStack dashboard (http://localhost:8080 forwarded from "devstack" machine)
+#. Navigate to ``Project``, then ``API Access`` on the left panel
+#. Select ``Download OpenStack RC File``, then ``OpenStack clouds.yaml File`` on the right side
+
+Summary of changes:
+
+- Added password from ``local.conf`` file (used in DevStack instance setup)
+- Removed ``project_id`` which might change on a new DevStack instance
+- Replaced ``auth_url`` based on machine's dynamic IP with the static private address
+- Added ``project_domain_name`` needed to run Ansible playbooks
+
+Installed Python package ``python-openstackclient`` includes key package ``openstacksdk`` as
+a dependency and provides additional CLI tools. Tool ``pip`` for Python 3 was used for installing
+these packages.
diff --git a/deployment/noheat/infra-openstack/README.rst b/deployment/noheat/infra-openstack/README.rst
new file mode 100644
index 000000000..c48dfa7f2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/README.rst
@@ -0,0 +1,34 @@
+==================================================
+ Cloud infrastructure: OpenStack virtual machines
+==================================================
+
+Ansible roles and sample playbooks for creating virtual machines on OpenStack without Heat support.
+
+They will be used to create virtual machines hosting Service Mesh lab cluster.
+
+Prerequisites
+-------------
+
+Infrastructure
+~~~~~~~~~~~~~~
+
+- OpenStack cloud (no Heat support required)
+
+Configuration
+~~~~~~~~~~~~~
+
+- OpenStack ``clouds.yaml`` file
+
+Dependencies
+~~~~~~~~~~~~
+
+Tested on Python 3.8.10. Required Python dependencies can be found in ``../requirements.txt``.
+Required Ansible roles and collections can be found in ``../requirements.yml``
+
+.. _openstacksdk: https://pypi.org/project/openstacksdk
+
+
+Expected output
+---------------
+
+Ephemeral (disposable) OpenStack virtual machines for a Kubernetes cluster.
diff --git a/deployment/noheat/infra-openstack/ansible/create.yml b/deployment/noheat/infra-openstack/ansible/create.yml
new file mode 100644
index 000000000..73830663c
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/create.yml
@@ -0,0 +1,136 @@
+---
+- name: Prepare infrastructure and create operation instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - create_network
+ - create_securitygroup
+ - create_keypair
+ - role: create_hosts
+ hosts: "{{ operation.hosts }}"
+ operator_key: "dummy"
+ tasks:
+ - name: Get operator Openstack info
+ openstack.cloud.server_info:
+ server: "operator0"
+ register: operator_info
+ - name: Create directory for artifacts
+ ansible.builtin.file:
+ name: "artifacts"
+ state: directory
+ mode: '0755'
+ - name: Save operator access information
+ ansible.builtin.copy:
+ content: "{{ operator_info['openstack_servers'][0]['public_v4'] }},{{ image['user'] }},~/.ssh/{{ keypair['key']['name'] }}"
+ dest: "artifacts/operator.csv"
+ mode: "0644"
+- name: Create cluster operator access keypair
+ hosts: "operator0"
+ gather_facts: False
+ tasks:
+ - name: Wait for system to become reachable
+ wait_for_connection:
+ - name: Generate an OpenSSH keypair with the default values (4096 bits, rsa)
+ community.crypto.openssh_keypair:
+ path: "~/.ssh/{{ keypair.name }}"
+ register: key
+ - name: Add operator0 public key to it's authorized keys
+ ansible.posix.authorized_key:
+ key: "{{ key['public_key'] }}"
+ state: present
+ user: "{{ ansible_user }}"
+- name: Create OpenStack instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: create_hosts
+ hosts: "{{ openstack.hosts }}"
+ operator_key: "{{ hostvars['operator0']['key']['public_key'] }}"
+- name: Create cluster instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: create_hosts
+ hosts: "{{ cluster.hosts }}"
+ operator_key: "{{ hostvars['operator0']['key']['public_key'] }}"
+- name: Create cluster operator access information
+ hosts: "operator0"
+ vars_files:
+ - ../../common-vars.yml
+ tasks:
+ - name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ item.value + ' ' + item.key }}"
+ become: yes
+ loop: "{{ lookup('dict', hostvars['localhost']['hosts_dict']) }}"
+ - name: Create inventory for in-cluster deployment stage
+ template:
+ src: templates/inventory.ini.j2
+ dest: "{{ operation.inventory }}"
+ vars:
+ hosts: "{{ lookup('dict', hostvars['localhost']['hosts_dict']) }}"
+ - name: Push in-cluster deployment stage description to the next Ansible control host
+ copy:
+ src: ../../cluster-rke
+ dest: ~/deploy
+ - name: Push Devstack deployment stage description to the next Ansible control host
+ copy:
+ src: ../../devstack
+ dest: ~/
+ - name: Push common variables to the next Ansible control host
+ copy:
+ src: ../../common-vars.yml
+ dest: ~/
+ - name: Push Devstack vars to the next Ansible control host (for Devstack stage)
+ template:
+ src: "templates/openstack.yml.j2"
+ dest: ~/devstack/ansible/group_vars/all/openstack.yml
+ mode: '0644'
+ - name: Push Devstack vars to the next Ansible control host (for cluster-rke stage)
+ template:
+ src: "templates/openstack.yml.j2"
+ dest: ~/deploy/cluster-rke/ansible/group_vars/all/openstack.yml
+ mode: '0644'
+ - name: Create Devstack config directory
+ file:
+ path: ~/.config/openstack/
+ state: directory
+ mode: '0755'
+ - name: Generate Devstack clouds.yml file
+ template:
+ src: "templates/clouds.yaml.j2"
+ dest: ~/.config/openstack/clouds.yml
+ mode: '0644'
+ - block:
+ - name: Install python dependencies
+ become: yes
+ apt:
+ name:
+ - python3-pip
+ - python3-setuptools
+ - default-jdk-headless
+ state: present
+ update_cache: true
+ - name: Install community.kubernetes.k8s Ansible collection dependencies
+ pip:
+ name:
+ - ansible-core==2.13.5
+ - openshift==0.13.1
+ - pyyaml==6.0
+ # Major version of Python k8s libraty matches minor version of k8s.
+ - kubernetes~={{ k8s_version | regex_search("[^^.][0-9]+[^$]") ~ "0" }}
+ executable: pip3
+ become: yes
+ - name: Copy ansible-galaxy requirements file
+ copy:
+ src: operator-requirements.yml
+ dest: ~/requirements.yml
+ mode: '0444'
+ - name: Install ansible-galaxy collections
+ community.general.ansible_galaxy_install:
+ requirements_file: ~/requirements.yml
+ type: both
diff --git a/deployment/noheat/infra-openstack/ansible/destroy.yml b/deployment/noheat/infra-openstack/ansible/destroy.yml
new file mode 100644
index 000000000..1564e3088
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/destroy.yml
@@ -0,0 +1,15 @@
+---
+- name: Destroy infrastructure
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: destroy_hosts
+ hosts: "{{ cluster.hosts }}"
+ - role: destroy_hosts
+ hosts: "{{ operation.hosts }}"
+ - role: destroy_hosts
+ hosts: "{{ openstack.hosts }}"
+ - destroy_keypair
+ - destroy_network
+ - destroy_securitygroup
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample
new file mode 100644
index 000000000..541e15279
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample
@@ -0,0 +1,63 @@
+---
+network:
+ name: &network_name "onap_ci_lab"
+ cidr: "192.168.1.0/24"
+ dns_servers:
+ # - x.x.x.x
+ # - y.y.y.y
+
+keypair:
+ name: &keypair_name "onap_ci_lab"
+
+securitygroup:
+ name: &securitygroup_name "onap_ci_lab"
+ remote_ip_prefix:
+ - "172.24.4.0/24"
+ - "192.168.1.0/24"
+ local_ip_prefix:
+ - "192.168.1.0/24"
+
+image:
+ name: &image_name "Ubuntu_20.04"
+ user: "ubuntu"
+
+openstack:
+ name: "vnf0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "openstack0"
+ image: *image_name
+ flavor: "m1.large"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 100
+
+operation:
+ name: "operation0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "operator0"
+ image: *image_name
+ flavor: "m1.tiny"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 5
+
+cluster:
+ name: "cluster0"
+ hosts:
+ - name: "worker0"
+ image: *image_name
+ flavor: "m1.tiny"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 5
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap
new file mode 100644
index 000000000..9223ea591
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap
@@ -0,0 +1,86 @@
+---
+network:
+ name: &network_name "onap_ci_lab"
+ cidr: "192.168.1.0/24"
+
+keypair:
+ name: &keypair_name "onap_ci_lab"
+
+securitygroup:
+ name: &securitygroup_name "onap_ci_lab"
+ remote_ip_prefix:
+ - "0.0.0.0/0"
+ local_ip_prefix:
+ - "192.168.1.0/24"
+
+image:
+ name: &image_name "Ubuntu_20.04"
+ user: "ubuntu"
+
+openstack:
+ name: "vnf0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "openstack0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 140
+
+operation:
+ name: "operation0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "operator0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ volume_size: 20
+
+cluster:
+ name: "cluster0"
+ hosts:
+ - name: "control0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 50
+ - name: "worker0a"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "worker0b"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "worker0c"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "nfs0"
+ image: *image_name
+ flavor: "m1.large"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ volume_size: 150
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml b/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml
new file mode 120000
index 000000000..854839817
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml
@@ -0,0 +1 @@
+../all.yml.sample \ No newline at end of file
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml b/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml
new file mode 100644
index 000000000..63ed1b081
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml
@@ -0,0 +1,6 @@
+---
+openstack_username: "admin"
+openstack_domain: "Default"
+openstack_passwd: "secret"
+openstack_region: "RegionOne"
+openstack_tenant: "admin"
diff --git a/deployment/noheat/infra-openstack/ansible/operator-requirements.yml b/deployment/noheat/infra-openstack/ansible/operator-requirements.yml
new file mode 100644
index 000000000..0532eb473
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/operator-requirements.yml
@@ -0,0 +1,8 @@
+---
+collections:
+ - name: ansible.posix
+ version: 1.4.0
+ - name: kubernetes.core
+ version: 2.3.2
+ - name: community.general
+ version: 5.8.0
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml
new file mode 100644
index 000000000..8fa4d0709
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml
@@ -0,0 +1,33 @@
+---
+- name: Create host
+ os_server:
+ state: present
+ name: "{{ host.name }}"
+ image: "{{ host.image }}"
+ flavor: "{{ host.flavor }}"
+ key_name: "{{ host.keypair }}"
+ network: "{{ host.network }}"
+ security_groups:
+ - "{{ host.securitygroup }}"
+ auto_ip: "{{ host.auto_ip | default(true) }}"
+ boot_from_volume: "{{ host.boot_from_volume | default(true) }}"
+ terminate_volume: "{{ host.terminate_volume | default(true) }}"
+ volume_size: "{{ host.volume_size | default(10) }}"
+ userdata: |
+ #cloud-config
+ ssh_authorized_keys:
+ - "{{ operator_key }}"
+ register: new_host
+
+- name: Add host to inventory
+ add_host:
+ hostname: "{{ new_host.server.name }}"
+ groups: "{{ cluster.name }}"
+ ansible_ssh_host: "{{ new_host.server.public_v4 }}"
+ ansible_ssh_user: "{{ image.user }}"
+ ansible_ssh_extra_args: "-o StrictHostKeyChecking=no"
+ ansible_ssh_private_key_file: "~/.ssh/{{ keypair.key.name }}"
+
+- name: Add host to hosts dict
+ set_fact:
+ hosts_dict: "{{ hosts_dict|default({}) | combine( {new_host.server.name: new_host.server.private_v4} ) }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml
new file mode 100644
index 000000000..933b2f526
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml
@@ -0,0 +1,5 @@
+---
+- include_tasks: create_host.yml
+ loop: "{{ hosts }}"
+ loop_control:
+ loop_var: host
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml
new file mode 100644
index 000000000..8a7c72092
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml
@@ -0,0 +1,25 @@
+- name: Create keypair
+ os_keypair:
+ state: present
+ name: "{{ keypair.name }}"
+ register: keypair
+
+- name: Create local public key
+ copy:
+ content: "{{ keypair.key.public_key }}"
+ dest: "~/.ssh/{{ keypair.key.name }}.pub"
+ mode: 0600
+ delegate_to: localhost
+
+- name: Check if local private key exists
+ stat:
+ path: "~/.ssh/{{ keypair.key.name }}"
+ register: local_private_key
+
+- name: Create local private key
+ copy:
+ content: "{{ keypair.key.private_key }}"
+ dest: "~/.ssh/{{ keypair.key.name }}"
+ mode: 0600
+ delegate_to: localhost
+ when: not local_private_key.stat.exists
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml
new file mode 100644
index 000000000..3e22ee6ce
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml
@@ -0,0 +1,28 @@
+---
+- name: "Create {{ net.name }} network"
+ os_network:
+ name: "{{ net.name }}"
+ state: present
+
+- name: Set nameservers list fact
+ set_fact:
+ dns_ips: "{{ network.dns_servers | list }}"
+ when: network.dns_servers[0] is defined
+
+- name: "Create {{ net.name }} subnet"
+ os_subnet:
+ name: "{{ net.name }}_subnet"
+ network_name: "{{ net.name }}"
+ cidr: "{{ net.cidr }}"
+ dns_nameservers: "{{ dns_ips if dns_ips is defined else omit }}"
+ allocation_pool_start: '{{ net.allocation_pool_start | default("") }}'
+ allocation_pool_end: '{{ net.allocation_pool_end | default ("") }}'
+ state: present
+
+- name: "Create {{ net.name }} router"
+ os_router:
+ name: "{{ net.name }}_router"
+ network: public
+ interfaces:
+ - "{{ net.name }}_subnet"
+ state: present
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml
new file mode 100644
index 000000000..cce6f790b
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: create_network.yml
+ loop:
+ - "{{ network }}"
+ loop_control:
+ loop_var: net
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml
new file mode 100644
index 000000000..b9a3e2973
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml
@@ -0,0 +1,23 @@
+---
+- name: "Create {{ secgrp.name }} security group"
+ os_security_group:
+ state: present
+ name: "{{ secgrp.name }}"
+
+- name: "Create {{ secgrp.name }} security group rule for ping"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: icmp
+ remote_ip_prefix: "{{ item }}"
+ loop: "{{ secgrp.remote_ip_prefix }}"
+
+- name: "Create {{ secgrp.name }} security group rule for all TCP"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: tcp
+ remote_ip_prefix: "0.0.0.0/0"
+
+- name: "Create {{ secgrp.name }} security group rule for all UDP"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: udp
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml
new file mode 100644
index 000000000..872988032
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: create_securitygroup.yml
+ loop:
+ - "{{ securitygroup }}"
+ loop_control:
+ loop_var: secgrp
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml
new file mode 100644
index 000000000..e9cedce7a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml
@@ -0,0 +1,5 @@
+---
+- name: Destroy host
+ os_server:
+ name: "{{ host.name }}"
+ state: absent
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml
new file mode 100644
index 000000000..1dd5c7224
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml
@@ -0,0 +1,5 @@
+---
+- include_tasks: destroy_host.yml
+ loop: "{{ hosts }}"
+ loop_control:
+ loop_var: host
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml
new file mode 100644
index 000000000..6025b82b3
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml
@@ -0,0 +1,12 @@
+- name: Destroy keypair
+ os_keypair:
+ state: absent
+ name: "{{ keypair.name }}"
+
+- name: Destroy local keypair
+ file:
+ state: absent
+ path: "{{ item }}"
+ loop:
+ - "~/.ssh/{{ keypair.name }}.pub"
+ - "~/.ssh/{{ keypair.name }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml
new file mode 100644
index 000000000..8f97d9507
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml
@@ -0,0 +1,10 @@
+---
+- name: "Destroy {{ net.name }} router"
+ os_router:
+ name: "{{ net.name }}_router"
+ state: absent
+
+- name: "Destroy {{ net.name }} network and its subnets"
+ os_network:
+ name: "{{ net.name }}"
+ state: absent
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml
new file mode 100644
index 000000000..1d84ab62a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: destroy_network.yml
+ loop:
+ - "{{ network }}"
+ loop_control:
+ loop_var: net
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml
new file mode 100644
index 000000000..eb86f9bc2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml
@@ -0,0 +1,5 @@
+---
+- name: "Destroy {{ secgrp.name }} security group"
+ os_security_group:
+ state: absent
+ name: "{{ secgrp.name }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml
new file mode 100644
index 000000000..8142e8070
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: destroy_securitygroup.yml
+ loop:
+ - "{{ securitygroup }}"
+ loop_control:
+ loop_var: secgrp
diff --git a/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2 b/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2
new file mode 100644
index 000000000..afbbc8738
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2
@@ -0,0 +1,11 @@
+clouds:
+ openstack:
+ auth:
+ auth_url: "https://{{ hostvars['localhost']['hosts_dict']['openstack0'] }}:5000/v3"
+ project_name: "{{ openstack_tenant }}""
+ username: "{{ openstack_username }}"
+ user_domain_name: "{{ openstack_domain }}"
+ password: "{{ openstack_passwd }}"
+ region_name: "{{ openstack_region }}"
+ interface: "public"
+ identity_api_version: 3
diff --git a/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2 b/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2
new file mode 100644
index 000000000..79da2c603
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2
@@ -0,0 +1,53 @@
+[all]
+{% for item in hosts %}
+{{ item.key }} ansible_host={{ item.value }}
+{% endfor %}
+
+[openstack]
+{% for item in hosts %}
+{% if "openstack" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[operator]
+{% for item in hosts %}
+{% if "operator" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[control]
+{% for item in hosts %}
+{% if "control" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[workers]
+{% for item in hosts %}
+{% if "worker" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[nfs]
+{% for item in hosts %}
+{% if "nfs" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[nfs:vars]
+nfs_role="server"
+
+[control:vars]
+nfs_role="client"
+
+[workers:vars]
+nfs_role="client"
+
+[all:vars]
+ansible_ssh_private_key_file="~/.ssh/{{ keypair.name }}"
+ansible_ssh_common_args='-o StrictHostKeyChecking=no'
+ansible_python_interpreter="/usr/bin/python3"
diff --git a/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2 b/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2
new file mode 100644
index 000000000..25233abca
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2
@@ -0,0 +1,5 @@
+---
+openstack_username: "{{ openstack_username }}"
+openstack_passwd: "{{ openstack_passwd }}"
+openstack_region: "{{ openstack_region }}"
+openstack_tenant: "{{ openstack_tenant }}"
diff --git a/deployment/noheat/infra-openstack/vagrant/Vagrantfile b/deployment/noheat/infra-openstack/vagrant/Vagrantfile
new file mode 100644
index 000000000..ed1a3d076
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/Vagrantfile
@@ -0,0 +1,167 @@
+# -*- mode: ruby -*-
+# -*- coding: utf-8 -*-
+
+host_folder_ansible = "../ansible"
+synced_folder_ansible = "/ansible"
+synced_folder_main = "/vagrant"
+synced_folder_config = "#{synced_folder_main}/config"
+os_config = "#{synced_folder_config}/local.conf"
+os_clouds_template = "#{synced_folder_config}/clouds.yaml"
+os_clouds_dir = "${HOME}/.config/openstack"
+os_clouds_config = "#{os_clouds_dir}/clouds.yaml"
+os_admin = "admin"
+os_user = "demo"
+image_url = "https://cloud-images.ubuntu.com/focal/current/focal-server-cloudimg-amd64.img"
+image_name = "Ubuntu_20.04"
+
+vm_cpu = 1
+vm_cpus = 4
+vm_memory = 1 * 1024
+vm_memory_os = 8 * 1024
+vm_disk = 32
+vm_box = "generic/ubuntu2004"
+
+operation = {
+ name: 'operator',
+ hostname: 'operator',
+ ip: '172.17.5.254',
+ ip_os: '172.24.4.254',
+ cpus: vm_cpu,
+ memory: vm_memory,
+ disk: vm_disk
+}
+devstack = {
+ name: 'devstack',
+ hostname: 'devstack',
+ ip: '172.17.5.200',
+ ip_os: '172.24.4.2',
+ cpus: vm_cpus,
+ memory: vm_memory_os,
+ disk: vm_disk
+}
+
+all = [] << operation << devstack
+
+operation_post_msg = "Run: \"vagrant provision #{operation[:name]} --provision-with=add_os_image,run_playbook_create\" to complete infrastructure deployment"
+
+$enable_ipv6 = <<-SCRIPT
+ sed -i'' 's/net.ipv6.conf.all.disable_ipv6.*$/net.ipv6.conf.all.disable_ipv6 = 0/' /etc/sysctl.conf
+ sysctl -p
+SCRIPT
+
+$setup_devstack = <<-SCRIPT
+ CONFIG="$1"
+ git clone https://opendev.org/openstack/devstack
+ cd devstack
+ cp "$CONFIG" .
+ ./stack.sh
+SCRIPT
+
+$setup_py = <<-SCRIPT
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get update
+ apt-get install -yq python3-distutils
+
+ curl -fsSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py
+ python3 get-pip.py
+SCRIPT
+
+$setup_openstackclient = <<-SCRIPT
+ pip install --ignore-installed python-openstackclient
+ mkdir -p #{os_clouds_dir}
+SCRIPT
+
+$setup_openstacksdk = <<-SCRIPT
+ pip install ansible openstacksdk
+ mkdir -p #{os_clouds_dir}
+SCRIPT
+
+$create_os_clouds = <<-SCRIPT
+ user="$1"
+ template="$2"
+ config="$3"
+ OS_USERNAME="$user" envsubst < "$template" > "$config"
+SCRIPT
+
+$add_os_image = <<-SCRIPT
+ url="$1"
+ name="$2"
+ image="/root/${name}.img"
+ wget --quiet --continue --output-document="$image" "$url"
+ export OS_CLOUD=openstack
+ openstack image create "$name" --public --disk-format qcow2 --container-format bare --file "$image"
+SCRIPT
+
+$run_playbook = <<-SCRIPT
+ PLAYBOOK="$1"
+ export OS_CLOUD=openstack
+ cd #{synced_folder_ansible}
+ ansible-playbook "$PLAYBOOK"
+SCRIPT
+
+Vagrant.configure("2") do |config|
+ all.each do |machine|
+ config.vm.define machine[:name] do |config|
+ config.vm.box = vm_box
+ config.vm.hostname = machine[:hostname]
+
+ config.vm.provider :virtualbox do |v|
+ v.name = machine[:name]
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ end
+
+ config.vm.provider :libvirt do |v|
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ v.machine_virtual_size = machine[:disk] # set at VM creation
+ end
+
+ config.vm.network :private_network, ip: machine[:ip]
+ config.vm.network :private_network, ip: machine[:ip_os]
+
+ if machine[:name] == 'devstack'
+ config.vm.network "forwarded_port", guest: 80, host: 8080
+
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: "Vagrantfile"
+
+ config.vm.provision "enable_ipv6", type: :shell, run: "always", inline: $enable_ipv6
+ config.vm.provision "setup_devstack", type: :shell, privileged: false, inline: $setup_devstack, args: os_config
+ end
+
+ if machine[:name] == 'operator'
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: "Vagrantfile"
+ config.vm.synced_folder host_folder_ansible, synced_folder_ansible, type: "rsync"
+
+ config.vm.provision "setup_py", type: :shell, inline: $setup_py
+ config.vm.provision "setup_openstackclient", type: :shell, inline: $setup_openstackclient
+ config.vm.provision "create_os_clouds_admin", type: :shell, run: "always" do |s|
+ s.inline = $create_os_clouds
+ s.args = [os_admin, os_clouds_template, os_clouds_config]
+ end
+ config.vm.provision "setup_openstacksdk", type: :shell, privileged: false, inline: $setup_openstacksdk
+ config.vm.provision "create_os_clouds", type: :shell, run: "always" do |s|
+ s.privileged = false
+ s.inline = $create_os_clouds
+ s.args = [os_user, os_clouds_template, os_clouds_config]
+ end
+
+ config.vm.post_up_message = operation_post_msg
+ config.vm.provision "add_os_image", type: :shell, run: "never" do |s|
+ s.inline = $add_os_image
+ s.args = [image_url, image_name]
+ end
+ config.vm.provision "run_playbook_create", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.inline = $run_playbook
+ s.args = "create.yml"
+ end
+ config.vm.provision "run_playbook_destroy", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.inline = $run_playbook
+ s.args = "destroy.yml"
+ end
+ end
+ end
+ end
+end
diff --git a/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml b/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml
new file mode 100644
index 000000000..f4a009302
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml
@@ -0,0 +1,12 @@
+clouds:
+ openstack:
+ auth:
+ auth_url: http://172.17.5.200/identity
+ username: "${OS_USERNAME}"
+ password: "default123456!"
+ project_name: "demo"
+ project_domain_name: "Default"
+ user_domain_name: "Default"
+ region_name: "RegionOne"
+ interface: "public"
+ identity_api_version: 3
diff --git a/deployment/noheat/infra-openstack/vagrant/config/local.conf b/deployment/noheat/infra-openstack/vagrant/config/local.conf
new file mode 100644
index 000000000..c301d853c
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/config/local.conf
@@ -0,0 +1,6 @@
+[[local|localrc]]
+PUBLIC_INTERFACE=eth2
+ADMIN_PASSWORD=default123456!
+DATABASE_PASSWORD=$ADMIN_PASSWORD
+RABBIT_PASSWORD=$ADMIN_PASSWORD
+SERVICE_PASSWORD=$ADMIN_PASSWORD
diff --git a/deployment/noheat/infra-openstack/vagrant/test/Makefile b/deployment/noheat/infra-openstack/vagrant/test/Makefile
new file mode 100644
index 000000000..403263dfc
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/Makefile
@@ -0,0 +1,12 @@
+rwildcard = $(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $2,$d))
+
+.PHONY: test
+test: $(patsubst %.test,%.stdout,$(call rwildcard,,%.test))
+
+%.stdout: %.test
+ ./$< > $@ 2> $(patsubst %.stdout,%.stderr,$@) \
+ || (touch --date=@0 $@; false)
+ git diff --exit-code --src-prefix=expected/ --dst-prefix=actual/ \
+ $@ $(patsubst %.stdout,%.stderr,$@) \
+ || (touch --date=@0 $@; false)
+
diff --git a/deployment/noheat/infra-openstack/vagrant/test/README.rst b/deployment/noheat/infra-openstack/vagrant/test/README.rst
new file mode 100644
index 000000000..03d9ea101
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/README.rst
@@ -0,0 +1,31 @@
+==============================
+ Vagrant: simple test harness
+==============================
+
+Use ``make`` and ``git diff`` for a simple test harness for Vagrant-based environment.
+
+Prerequisites
+-------------
+
+Dependencies
+~~~~~~~~~~~~
+
+- make: tested on 4.1
+- git: tested on 2.17.1
+
+
+Running
+-------
+
+Command
+~~~~~~~
+
+.. code-block:: shell
+
+ $ make test
+
+
+Credit
+------
+
+This is based on https://chrismorgan.info/blog/make-and-git-diff-test-harness blog post.
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json b/deployment/noheat/infra-openstack/vagrant/test/create_host.stderr
index e69de29bb..e69de29bb 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout
new file mode 100644
index 000000000..25c23dda2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout
@@ -0,0 +1 @@
+"operator0"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_host.test b/deployment/noheat/infra-openstack/vagrant/test/create_host.test
new file mode 100755
index 000000000..f2a1ab909
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export HOST_NAME='operator0'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local host="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack server list -fcsv" \
+ | grep "$host" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$HOST_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ftpes/files/ftpes-noone.txt b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr
index e69de29bb..e69de29bb 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/ftpes/files/ftpes-noone.txt
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test
new file mode 100755
index 000000000..e402fa69a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export KEYPAIR_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local key="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack keypair list -fcsv" \
+ | grep "$key" \
+ | cut -d',' -f1
+}
+
+set_up >/dev/null # drop provisioning output
+check "$KEYPAIR_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ftpes/files/onap/ftpes-onap.txt b/deployment/noheat/infra-openstack/vagrant/test/create_network.stderr
index e69de29bb..e69de29bb 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/ftpes/files/onap/ftpes-onap.txt
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_network.test b/deployment/noheat/infra-openstack/vagrant/test/create_network.test
new file mode 100755
index 000000000..d81a12fa6
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export NETWORK_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local net="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack network list -fcsv" \
+ | grep "$net" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$NETWORK_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/application-it.properties b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr
index e69de29bb..e69de29bb 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/application-it.properties
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test
new file mode 100755
index 000000000..6ac7fdc85
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export SECURITYGROUP_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local secgrp="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack security group list -fcsv" \
+ | grep "$secgrp" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$SECURITYGROUP_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/test/vcpe/aaiutil.py b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr
index e69de29bb..e69de29bb 100644
--- a/test/vcpe/aaiutil.py
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout
new file mode 100644
index 000000000..30d7e153a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout
@@ -0,0 +1 @@
+Host operator0 not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test
new file mode 100755
index 000000000..8217081b1
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export HOST_NAME='operator0'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local host="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack server list -fcsv" \
+ | grep "$host" \
+ || echo "Host ${host} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$HOST_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout
new file mode 100644
index 000000000..df6e49297
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout
@@ -0,0 +1 @@
+Keypair onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test
new file mode 100755
index 000000000..42132b347
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export KEYPAIR_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local key="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack keypair list -fcsv" \
+ | grep "$key" \
+ || echo "Keypair ${key} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$KEYPAIR_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout
new file mode 100644
index 000000000..d48081495
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout
@@ -0,0 +1 @@
+Network onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test
new file mode 100755
index 000000000..182d7dcaf
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export NETWORK_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local net="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack network list -fcsv" \
+ | grep "$net" \
+ || echo "Network ${net} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$NETWORK_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout
new file mode 100644
index 000000000..7adb2f89a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout
@@ -0,0 +1 @@
+Security group onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test
new file mode 100755
index 000000000..ce65f1f08
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export SECURITYGROUP_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local secgrp="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack security group list -fcsv" \
+ | grep "$secgrp" \
+ || echo "Security group ${secgrp} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$SECURITYGROUP_NAME"
diff --git a/deployment/noheat/requirements.txt b/deployment/noheat/requirements.txt
new file mode 100644
index 000000000..8ef36fc05
--- /dev/null
+++ b/deployment/noheat/requirements.txt
@@ -0,0 +1,3 @@
+wheel==0.37.1
+openstacksdk==0.61.0
+ansible-core==2.13.5
diff --git a/deployment/noheat/requirements.yml b/deployment/noheat/requirements.yml
new file mode 100644
index 000000000..2a185e6b9
--- /dev/null
+++ b/deployment/noheat/requirements.yml
@@ -0,0 +1,10 @@
+---
+collections:
+ - name: ansible.posix
+ version: 1.4.0
+ - name: community.general
+ version: 5.8.0
+ - name: community.crypto
+ version: 2.8.0
+ - name: openstack.cloud
+ version: 1.10.0
diff --git a/deployment/onap-lab-ci/README.md b/deployment/onap-lab-ci/README.md
deleted file mode 100644
index d7efc7b6a..000000000
--- a/deployment/onap-lab-ci/README.md
+++ /dev/null
@@ -1 +0,0 @@
-# onap-lab-ci \ No newline at end of file
diff --git a/deployment/onap-lab-ci/jjb/jobs.yaml b/deployment/onap-lab-ci/jjb/jobs.yaml
index 83166e062..5fc398397 100644
--- a/deployment/onap-lab-ci/jjb/jobs.yaml
+++ b/deployment/onap-lab-ci/jjb/jobs.yaml
@@ -1,143 +1,142 @@
- project:
name: staging
jobs:
- - '{env}-staging-{frequency}'
+ - '{env}-staging-{frequency}'
integration-branch: 'master'
oom-branch: 'staging'
env:
- - 'windriver':
- lab-name: 'windriver'
- tenant-name: 'Integration-Staging-Daily'
- stack-name: 'staging'
- frequency: 'daily'
- disabled_var: false
- triggers_var:
- - timed: 'H 4 * * *'
- - 'windriver-sb00':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-00'
- stack-name: 'sb00'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb01':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-01'
- stack-name: 'sb01'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb02':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-02'
- stack-name: 'sb02'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb03':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-03'
- stack-name: 'sb03'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb04':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-04'
- stack-name: 'sb04'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-integration-design':
- lab-name: 'windriver'
- tenant-name: 'Integration-Design'
- stack-name: 'design'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-integration-instantiation':
- lab-name: 'windriver'
- tenant-name: 'Integration-Instantiation'
- stack-name: 'instantiation'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-integration-closedloop':
- lab-name: 'windriver'
- tenant-name: 'Integration-ClosedLoop'
- stack-name: 'closedloop'
- frequency: 'manual'
- disabled_var: false
+ - 'windriver':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Staging-Daily'
+ stack-name: 'staging'
+ frequency: 'daily'
+ disabled_var: false
+ triggers_var:
+ - timed: 'H 4 * * *'
+ - 'windriver-sb00':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-00'
+ stack-name: 'sb00'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb01':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-01'
+ stack-name: 'sb01'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb02':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-02'
+ stack-name: 'sb02'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb03':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-03'
+ stack-name: 'sb03'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb04':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-04'
+ stack-name: 'sb04'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-integration-design':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Design'
+ stack-name: 'design'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-integration-instantiation':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Instantiation'
+ stack-name: 'instantiation'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-integration-closedloop':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-ClosedLoop'
+ stack-name: 'closedloop'
+ frequency: 'manual'
+ disabled_var: false
- project:
name: release
jobs:
- - '{env}-release-{frequency}'
+ - '{env}-release-{frequency}'
integration-branch: 'master'
oom-branch: 'master'
env:
- - 'windriver':
- lab-name: 'windriver'
- tenant-name: 'Integration-Release-Daily'
- stack-name: 'release'
- frequency: 'daily'
- disabled_var: false
- triggers_var:
- - timed: 'H 6 * * *'
- - 'windriver-longevity':
- lab-name: 'windriver'
- tenant-name: 'Integration-Longevity'
- stack-name: 'long'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb00':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-00'
- stack-name: 'sb00'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb01':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-01'
- stack-name: 'sb01'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb02':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-02'
- stack-name: 'sb02'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb03':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-03'
- stack-name: 'sb03'
- frequency: 'manual'
- disabled_var: false
- - 'windriver-sb04':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-04'
- stack-name: 'sb04'
- frequency: 'manual'
- disabled_var: false
+ - 'windriver':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Release-Daily'
+ stack-name: 'release'
+ frequency: 'daily'
+ disabled_var: false
+ triggers_var:
+ - timed: 'H 6 * * *'
+ - 'windriver-longevity':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Longevity'
+ stack-name: 'long'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb00':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-00'
+ stack-name: 'sb00'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb01':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-01'
+ stack-name: 'sb01'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb02':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-02'
+ stack-name: 'sb02'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb03':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-03'
+ stack-name: 'sb03'
+ frequency: 'manual'
+ disabled_var: false
+ - 'windriver-sb04':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-04'
+ stack-name: 'sb04'
+ frequency: 'manual'
+ disabled_var: false
- project:
name: dublin
jobs:
- - '{env}-release-{frequency}'
+ - '{env}-release-{frequency}'
integration-branch: 'master'
oom-branch: 'dublin'
env:
- - 'windriver-dublin-sb04':
- lab-name: 'windriver'
- tenant-name: 'Integration-SB-04'
- stack-name: 'dublin'
- frequency: 'manual'
- disabled_var: false
+ - 'windriver-dublin-sb04':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-SB-04'
+ stack-name: 'dublin'
+ frequency: 'manual'
+ disabled_var: false
- project:
name: stability
jobs:
- - '{env}-stability72hr'
- - '{env}-vfwclosedloop'
+ - '{env}-stability72hr'
+ - '{env}-vfwclosedloop'
integration-branch: 'master'
env:
- - 'windriver-longevity':
- lab-name: 'windriver'
- tenant-name: 'Integration-Longevity'
- stack-name: 'long'
- disabled_var: false
-
+ - 'windriver-longevity':
+ lab-name: 'windriver'
+ tenant-name: 'Integration-Longevity'
+ stack-name: 'long'
+ disabled_var: false
diff --git a/docs/.gitignore b/docs/.gitignore
new file mode 100644
index 000000000..43ca5b671
--- /dev/null
+++ b/docs/.gitignore
@@ -0,0 +1,3 @@
+/.tox
+/_build/*
+/__pycache__/*
diff --git a/docs/_static/css/ribbon.css b/docs/_static/css/ribbon.css
new file mode 100644
index 000000000..7949130b3
--- /dev/null
+++ b/docs/_static/css/ribbon.css
@@ -0,0 +1,63 @@
+.ribbon {
+ z-index: 1000;
+ background-color: #a00;
+ overflow: hidden;
+ white-space: nowrap;
+ position: fixed;
+ top: 25px;
+ right: -50px;
+ -webkit-transform: rotate(45deg);
+ -moz-transform: rotate(45deg);
+ -ms-transform: rotate(45deg);
+ -o-transform: rotate(45deg);
+ transform: rotate(45deg);
+ -webkit-box-shadow: 0 0 10px #888;
+ -moz-box-shadow: 0 0 10px #888;
+ box-shadow: 0 0 10px #888;
+
+}
+
+.ribbon a {
+ border: 1px solid #faa;
+ color: #fff;
+ display: block;
+ font: bold 81.25% 'Helvetica Neue', Helvetica, Arial, sans-serif;
+ margin: 1px 0;
+ padding: 10px 50px;
+ text-align: center;
+ text-decoration: none;
+ text-shadow: 0 0 5px #444;
+ transition: 0.5s;
+}
+
+.ribbon a:hover {
+ background: #c11;
+ color: #fff;
+}
+
+
+/* override table width restrictions */
+@media screen and (min-width: 767px) {
+
+ .wy-table-responsive table td, .wy-table-responsive table th {
+ /* !important prevents the common CSS stylesheets from overriding
+ this as on RTD they are loaded after this stylesheet */
+ white-space: normal !important;
+ }
+
+ .wy-table-responsive {
+ overflow: visible !important;
+ }
+}
+
+@media screen and (max-width: 767px) {
+ .wy-table-responsive table td {
+ white-space: nowrap;
+ }
+}
+
+/* fix width of the screen */
+
+.wy-nav-content {
+ max-width: 800px;
+}
diff --git a/docs/_static/favicon.ico b/docs/_static/favicon.ico
new file mode 100755
index 000000000..cb712ebd2
--- /dev/null
+++ b/docs/_static/favicon.ico
Binary files differ
diff --git a/docs/_static/logo_onap_2017.png b/docs/_static/logo_onap_2017.png
new file mode 100644
index 000000000..5d064f431
--- /dev/null
+++ b/docs/_static/logo_onap_2017.png
Binary files differ
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 000000000..14f37ca67
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,72 @@
+project = "onap"
+release = "master"
+version = "master"
+
+author = "Open Network Automation Platform"
+# yamllint disable-line rule:line-length
+copyright = "ONAP. Licensed under Creative Commons Attribution 4.0 International License"
+
+pygments_style = "sphinx"
+html_theme = "sphinx_rtd_theme"
+html_theme_options = {
+ "style_nav_header_background": "white",
+ "sticky_navigation": "False" }
+html_logo = "_static/logo_onap_2017.png"
+html_favicon = "_static/favicon.ico"
+html_static_path = ["_static"]
+html_show_sphinx = False
+
+extensions = [
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.graphviz',
+ 'sphinxcontrib.blockdiag',
+ 'sphinxcontrib.seqdiag',
+ 'sphinxcontrib.swaggerdoc',
+ 'sphinxcontrib.plantuml'
+]
+
+#
+# Map to 'latest' if this file is used in 'latest' (master) 'doc' branch.
+# Change to {releasename} after you have created the new 'doc' branch.
+#
+
+branch = 'latest'
+
+intersphinx_mapping = {}
+doc_url = 'https://docs.onap.org/projects'
+master_doc = 'index'
+
+exclude_patterns = ['.tox']
+
+spelling_word_list_filename='spelling_wordlist.txt'
+spelling_lang = "en_GB"
+
+#
+# Example:
+# intersphinx_mapping['onap-aai-aai-common'] = ('{}/onap-aai-aai-common/en/%s'.format(doc_url) % branch, None)
+#
+intersphinx_mapping = {}
+intersphinx_mapping['onap-oom'] = ('{}/onap-oom/en/%s'.format(doc_url) % branch, None)
+intersphinx_mapping['onap-cli'] = ('{}/onap-cli/en/%s'.format(doc_url) % branch, None)
+
+html_last_updated_fmt = '%d-%b-%y %H:%M'
+
+def setup(app):
+ app.add_css_file("css/ribbon.css")
+
+linkcheck_ignore = [
+ r'http://localhost:\d+/'
+ r'http://localhost:.*',
+ r'http://CONSUL_SERVER_UI:30270/ui/#/dc1/services',
+ r'https://.*h=frankfurt',
+ r'http.*frankfurt.*',
+ r'http.*simpledemo.onap.org.*',
+ r'http://ANY_K8S_IP.*',
+ r'http://so-monitoring:30224',
+ r'http://SINK_IP_ADDRESS:667.*',
+ r'http.*K8S_HOST:30227.*',
+ r'http.*K8S_NODE_IP.*',
+ r'http.*REPO_IP.*',
+ r'http://team.onap.eu',
+ r'https://tools.ietf.org/html/rfc8345'
+]
diff --git a/docs/docs_5G_Bulk_PM.rst b/docs/docs_5G_Bulk_PM.rst
index 71d8778cd..7bdc06324 100644
--- a/docs/docs_5G_Bulk_PM.rst
+++ b/docs/docs_5G_Bulk_PM.rst
@@ -1,25 +1,27 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_5g_bulk_pm:
+:orphan:
+
5G Bulk PM
----------
5G Bulk PM Package
-~~~~~~~~~~~~
-- 5G Bulk PM Package: https://wiki.onap.org/display/DW/5G+-+Bulk+PM+-+Integration+Test+Case
+~~~~~~~~~~~~~~~~~~
+- 5G Bulk PM Package: https://wiki.onap.org/pages/viewpage.action?pageId=38121543
Description
~~~~~~~~~~~
-The Bulk PM feature consists of an event-driven bulk transfer of monitoring data from an xNF to ONAP/DCAE. A micro-service will listen for 'FileReady' VES events sent from an xNF via the VES collector. Once files become available the collector micro-service will fetch them using protocol such as FTPES (committed) or SFTP. The collected data files are published internally on a DMaaP Data Router (DR) feed.
-The ONAP 5G Bulk PM Use Case Wiki Page can be found here:
+The Bulk PM feature consists of an event-driven bulk transfer of monitoring data from an xNF to ONAP/DCAE. A micro-service will listen for 'FileReady' VES events sent from an xNF via the VES collector. Once files become available the collector micro-service will fetch them using protocol such as FTPES (committed) or SFTP. The collected data files are published internally on a DMaaP Data Router (DR) feed.
+The ONAP 5G Bulk PM Use Case Wiki Page can be found here:
https://wiki.onap.org/display/DW/5G+-+Bulk+PM
How to Use
~~~~~~~~~~
-See the following instructions on how to manually test the feature. https://wiki.onap.org/display/DW/5G+Bulk+PM+Usecase+Testing+@+Ericsson+Lab+-+Casablanca
-The tests can also be executed using the Robot framework, information can be found https://wiki.onap.org/display/DW/5G+-+Bulk+PM+-+Integration+Test+Cases
+See the following instructions on how to manually test the feature. https://wiki.onap.org/display/DW/5G+Bulk+PM+Usecase+Testing+\@+Ericsson+Lab+-+Casablanca
+The tests can also be executed using the Robot framework, information can be found https://wiki.onap.org/pages/viewpage.action?pageId=38121543
Test Status and Plans
~~~~~~~~~~~~~~~~~~~~~
@@ -28,4 +30,3 @@ To see information on the status of the test see https://wiki.onap.org/display/D
Known Issues and Resolutions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
none.
-
diff --git a/docs/docs_5G_Configuration_over_NETCONF.rst b/docs/docs_5G_Configuration_over_NETCONF.rst
index 9cf8643c5..d8701a655 100644
--- a/docs/docs_5G_Configuration_over_NETCONF.rst
+++ b/docs/docs_5G_Configuration_over_NETCONF.rst
@@ -1,10 +1,12 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_5G_Configuration_over_NETCONF:
+:orphan:
+
5G - Configuration over NETCONF
-----------------------
+-------------------------------
Description
@@ -16,13 +18,13 @@ This use case is intended to be applicable for 5G base stations and other nodes
**Useful Links**
-- `5G - Configuration with NETCONF documentation <https://wiki.onap.org/display/DW/5G+-+Configuration+with+NETCONF>
-- `5G - Configuration with NETCONF - Integtion Test Cases <https://wiki.onap.org/pages/viewpage.action?pageId=58229781&src=contextnavipagetreemode>
+- `5G - Configuration with NETCONF documentation <https://wiki.onap.org/display/DW/5G+-+Configuration+with+NETCONF>`_
+- `5G - Configuration with NETCONF - Integtion Test Cases <https://wiki.onap.org/pages/viewpage.action?pageId=58229781&src=contextnavipagetreemode>`_
How to Use
~~~~~~~~~~
-Set up certificate in SDNC using <https://docs.onap.org/en/dublin/submodules/sdnc/oam.git/docs/cert_installation.html?highlight=SDNC>
+Set up certificate in SDNC using <https://docs.onap.org/projects/onap-sdnc-oam/en/latest/cert_installation.html>
As this usecase is extention of PnP PNF flow so run PnP usecase following running this usecase follow link <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>
diff --git a/docs/docs_5G_NRM_Configuration.rst b/docs/docs_5G_NRM_Configuration.rst
new file mode 100644
index 000000000..1deeb97e8
--- /dev/null
+++ b/docs/docs_5G_NRM_Configuration.rst
@@ -0,0 +1,41 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5G_NRM_Configuration:
+
+:orphan:
+
+5G NRM (Network Resource Model) Configuration
+---------------------------------------------
+
+Description
+~~~~~~~~~~~
+Network Resource Model (NRM) configuration management allows service providers to control and monitor the actual configuration on the Network Resources, which are the fundamental resources to the mobility networks. Considering the huge number of existing information object classes (IOC) and increasing IOCs in various domains, this use case is to handle the NRM configuration management in a dynamic manner. Moreover, it uses the http-based restful solution in R6 and other solutions may be possible.
+
+Useful Links
+============
+`5G NRM Configuration in R6 Wiki Page <https://wiki.onap.org/display/DW/5G+Network+Resource+Model+%28NRM%29+Configuration+in+R6+Frankfurt>`_
+
+Current Status in Frankfurt
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+* Provide a restful-executor in CDS blueprint processor.
+* Provide a simplified generic provisioning management service provider for simulating an external service (may be deployed in EMS or deployed standalone) for integration test.
+
+How to Use
+~~~~~~~~~~
+The pre-conditions are:
+* CDS containers are ready to use.
+* The external provisioning management service provider (could be a simulator) is ready to use.
+* At design time, CDS controller blueprint provided by xNF vendors is designed and ready for CDS.
+* Service instantiation is completed. It means users of ONAP could know the xNF instance. For this use case in R6, one PNF instance is selected.
+
+At run time, NRM configuration management is triggered when the operator provides the selected PNF instance, expected managed object instances. Then the procedure is executed in CDS:
+a. CDS sends request(s) with action-identifier{actionName, blueprintName, blueprintVersion} to the blueprint processor inside the controller using CDS self-service API.
+b. Controller/blueprint processor use the corresponding executor (and blueprint scripts) and send http requests to the external provisioning management service provider.
+c. The external provisioning management service provider is responsible of configuration management and sends responses to CDS.
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+To see information on the status of the test cases, please follow the link below:
+
+`5G NRM Configuration Test Status <https://wiki.onap.org/display/DW/5G+Network+Resource+Model+%28NRM%29+Configuration+in+R6+Frankfurt#id-5GNetworkResourceModel(NRM)ConfigurationinR6Frankfurt-TestStatus>`_
diff --git a/docs/docs_5G_PNF_Software_Upgrade.rst b/docs/docs_5G_PNF_Software_Upgrade.rst
index 0424a3116..a4d435b69 100644
--- a/docs/docs_5G_PNF_Software_Upgrade.rst
+++ b/docs/docs_5G_PNF_Software_Upgrade.rst
@@ -1,60 +1,57 @@
-.. This work is licensed under a Creative Commons Attribution 4.0
- International License. http://creativecommons.org/licenses/by/4.0
-
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
.. _docs_5g_pnf_software_upgrade:
+:orphan:
+
5G PNF Software Upgrade
-----------------------------
+-----------------------
Description
~~~~~~~~~~~
-The 5G PNF Software upgrade use case shows how users/network operators can modify the software of PNF instance during installation or regular maintaince. This use case is one aspect of Software Management. This could be used to update the PNF software to a newer or older version of software.
-**Useful Links**
-- `5G - PNF software upgrade use case documentation <https://wiki.onap.org/pages/viewpage.action?pageId=40206496>`_
-- `5G - PNF software upgrade Integration test case status for Dublin release <https://wiki.onap.org/display/DW/5G+-+PNF+SW+Upgrade+-+Integration+Test+Cases>`_
+The 5G PNF Software upgrade use case shows how users/network operators can modify the software of a PNF instance during installation or regular maintenance. This use case is one aspect of Software Management. This could be used to update the PNF software to a different version of software.
+
+Useful Link
+~~~~~~~~~~~
+
+`PNF Software Upgrade Wiki Page <https://wiki.onap.org/display/DW/PNF+software+upgrade+in+R6+Frankfurt>`_
+
+
+Current Status in Frankfurt
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+PNF Software Upgrade Scenarios
+------------------------------
+
+There are 3 PNF software upgrade scenarios supported in Frankfurt release:
-**Current status in Dublin**
-- with the support of an EM
-- LCM API (focus on controller only)
-- integration of basic 3GPP SwM interfaces (*)
-- ansible protocol only
-Note: In Dublin, Controller provided four related APIs (precheck, postcheck, upgrade and rollback), which were finally translated to invoke interfaces provided by EM. Rollback API is to call swFallback operation, and Upgrade API is to call downloadNESw, installNESw and activateNESw operations (Ref. 3GPP TS 32.532[1]).
+* `Using direct Netconf/Yang interface with PNF <docs_5G_PNF_Software_Upgrade_direct_netconf_yang>`
-**Future Plans**
-- E2E PNF Software upgrade both for design and runtime
-- Generic workflow for demonstration
+ - (https://wiki.onap.org/pages/viewpage.action?pageId=64007309)
-How to Use
-~~~~~~~~~~
-Upgrading PNF (instance) software requires the user/network operator to trigger the upgrade operation from the UI, e.g. VID or UUI. In Dublin, users need use ONAP Controllers GUI or publish DMaaP messages to trigger the LCM opeations, which are pre-check, post-check, upgrade and rollback. After receiving the API requests, the ONAP controllers will communicate to EMS through south-bound adaptors, which is Ansible protocol only in Dublin.
+* `Using Ansible protocol with EM <docs_5G_PNF_Software_Upgrade_ansible_with_EM>`
-Note that, both APPC and SDNC in R4 supported Ansible. Taking SDNC and Prechecking as an example, the steps are as follows:
+ - (https://wiki.onap.org/pages/viewpage.action?pageId=64007357)
-1) `In ansible server container, prepare the ssh connection conditions to the external controller, both ssh key file and ansible inventory configuration`_
+* `Using Netconf/Yang interface with EM <docs_5G_PNF_Software_Upgrade_netconf_with_EM>`
-2) `In sdnc controller container, update the dg configuration file: lcm-dg.properties.`_
+ - (https://wiki.onap.org/pages/viewpage.action?pageId=64008675)
-For example:
-::
-lcm.pnf.upgrade-pre-check.playbookname=ansible_huawei_precheck
-lcm.pnf.upgrade-post-check.playbookname=ansible_huawei_postcheck
-lcm.pnf.upgrade-software.playbookname=ansible_huawei_upgrade
-lcm.pnf.upgrade-rollback.playbookname=ansible_huawei_rollback
+Common tasks for all scenarios
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-3) `Login controller UI, access the pre-check LCM operation (or other operations) and send request, the detailed request parameters can be found in corresponding test case link.`_
+SO Workflows
+~~~~~~~~~~~~
-4) `The HTTP API response code 200 and LCM retured code 400 (See APPC return code design specification) indicate success, otherwise failed.`_
+Common SO workflows are used with generic SO building blocks which can be used for any PNF software upgrade scenarios. In Frankfurt release, a PNF software upgrade workflow and a PNF preparation workflow have been created.
-Test Status and Plans
-~~~~~~~~~~~~~~~~~~~~~
-To see information on the status of the test case: https://wiki.onap.org/display/DW/5G+-+PNF+SW+Upgrade+-+Integration+Test+Cases
+ .. image:: files/softwareUpgrade/SWUPWorkflow.png
-References
-==========
-[1] TS 32.532,Telecommunication management; Software management (SwM); Integration Reference Point (IRP); Information Service (IS)
+LCM evolution with API Decision Tree
+====================================
-Known Issues and Resolutions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-None
+A decision point has been introduced in the Frankfurt release. The service designer needs to indicate which LCM API they would like to use for the LCM operations on the selected PNF source at design time (via SDC). The possible LCM APIs are: SO-REF-DATA (default), CDS, SDNC, or APPC.
+ .. image:: files/softwareUpgrade/APIDecisionTree.png
diff --git a/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst b/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst
new file mode 100644
index 000000000..c844f1f5d
--- /dev/null
+++ b/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst
@@ -0,0 +1,114 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5g_pnf_software_upgrade_with_schema_update:
+
+:orphan:
+
+Support xNF Software Upgrade in association to schema updates
+-------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+
+A schema update in relation to a xNF software upgrades is a routine for network upgrade to support new xNF features, improve efficiency or increase xNF capacity on the field, and to eliminate bugs. This use case provides to ONAP an advantage in orchestrating and managing the Life Cycle of a Network Services in-line with business and service objectives. Deployment and orchestration of new services over CNFs, VNFs and PNFs in a model and software driven way simplifies the network management. Enables operators and service providers to manage the Life Cycle of a Network Service. Assuring continuity of operation of services is crucial for production and carrier grade environments. The actualization or upgrades of software and in consequence required changes in the service model is a natural part of service instance life cycle. Without the support of ONAP service update with schema change, service life cycle management by ONAP can be very difficult which can impact the quality and continuity of services.
+
+
+Current Status in Guilin
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+xNF Software Upgrade with xNF artifacts updating in Release G
+-------------------------------------------------------------
+
+The following is the xNF software upgrade procedure with schema update.
+
+.. image:: files/softwareUpgrade/SchemaUpdate.png
+
+1. A vendor shall provide
+ a. a new VNF/PNF package with updated artifacts, and
+ b. the new VNF/ PNF software image to the operator.
+2. At receiving of the new package, the operator shall
+ a. onboard the new package and create a new resource template or update the existing resource template (PNF or VNF)
+ b. update the existing service template with the new or updated resource template
+ c. distribute the updated service template to run time.
+3. At run time, the operator shall, based on the updated service template,
+ a. upgrade a service instance and its resource instances, and
+ b. update the AAI entry accordingly
+
+The above procedure is based on the following conditions:
+
+* When updating a service template at design time, the resource instance name and network topology shall be unchanged.
+
+* A service template must be upgradable from any previous versions, including that any new resource template of a given resource instance (within the service template) must be upgradeable from any previous resource template versions.
+
+* At run time, resource upgrade sequence is not sensitive in service instance upgrading procedure.
+
+Function limitations in Release G
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* The operator shall know the possible/feasible resource upgrade path based on vendor provided information.
+
+* When operator updating a service template, the updated service template must be upgradable from any previous versions:
+ - Within the service template, the resource instance name and network topology are unchanged.
+ - The new resource template of a given resource instance (within the service template) must be upgradeable from any previous resource template versions.
+
+.. note::
+ This is to avoid adding possible upgrade paths info and upgrade sequence info into SDC model
+
+Update a xNF resource template from a new onboarding package
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When updating a resource template from a new VSP casr, the new onboarded descriptor and the new onboarded artifacts will be transformed into the new version of the resource csar. The current resource name and invariantUUID will be remained.
+
+As an alternative, a resource csar can be updated manually using SDC GUI.
+
+.. image:: files/softwareUpgrade/OnboardingCsar.png
+
+The update path (green path in above picture) is supported in the current SDC implementation. However, there are bugs which need to be fixed.
+
+Service level LCM workflow in SO
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. image:: files/softwareUpgrade/ServiceLevelWorkflow.png
+
+A generic SO workflow is created which can be used to upgrade one service instance based on the updated service template. This service level workflow is network function type independent. When upgrade one resource instance, the subsequent resource level upgrade workflow is selected based on the network function type. It contains following main steps:
+
+* Service Level Preparation
+ - Creating resource template instance upgrade list by comparing the service templates
+ - Select a resource level health check workflow based on the resource type
+ - Execute the selected resource level health check workflow on all resource instances within the service
+* Service Level Upgrade
+ - Select a resource level upgrade workflow based on the resource type
+ - Execute the selected resource level upgrade workflow on each upgrading resource instances
+ - Update the software version, model-invariant-id and model-version-id of the resource template in the A&AI entry at end of each Resource level upgrade workflow
+* Service Level Update
+ - Update the model-version-id of the service template in the A&AI entry
+* Service Level postCheck
+ - Select a resource level health check workflow based on the resource type
+ - Execute the selected resource level health check workflow on all resource instances within the service
+
+The following is an example of the service level workflow with PNF upgrade sub-workflow is called at Service Level Upgrade step:
+
+.. image:: files/softwareUpgrade/ServiceLevelUpgrade.png
+
+Workflow view
+~~~~~~~~~~~~~
+
+.. image:: files/softwareUpgrade/WorkflowView.png
+
+SO APIs
+~~~~~~~
+.. csv-table:: use case table
+ :file: schema-update-apis.csv
+ :widths: 60,20,20
+ :header-rows: 1
+
+Reference
+~~~~~~~~~~~
+
+`PNF Software Upgrade with Schema Update Wiki Page <https://wiki.onap.org/pages/viewpage.action?pageId=81400388#SupportxNFSoftwareUpgradeinassociationtoschemaupdates-DevelopmentStatus>`_
+
+Testing Procedure
+~~~~~~~~~~~~~~~~~~
+
+:ref:`Testing 5G PNF Software Upgrade with Schema Update <docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update>` \ No newline at end of file
diff --git a/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst b/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst
new file mode 100644
index 000000000..6426446eb
--- /dev/null
+++ b/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst
@@ -0,0 +1,35 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5g_pnf_software_upgrade_ansible_with_EM:
+
+:orphan:
+
+PNF Software Upgrade Scenario: Using Ansible protocol with EM
+-------------------------------------------------------------
+
+Software Upgrade Procedure
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+With this scenario, the pre-conditions are:
+
+* SO PNF software upgrade workflows are ready to use. For this scenario, the CONTROLLER_ACTOR is set for SDNC client for the API selection decision.
+* Service instantiation is completed, including PNF PnP. It means a PNF instance is in operation and is avaibale for ONAP (maybe via EMS).
+* ONAP Controller (SDNC and ansible server) and DMaaP are ready to use. It means necessary ansible connection and DMaaP topics are ready.
+* EMS has direct ansible interface to the ansible server. The underlying protocol is SSH.
+
+At run time, the service provider in R6 can use CLI to trigger the PNF in-place software upgrade procedure by selecting the existing PNF software upgrade workflow or uploading a custom workflow, as well as an identifier of a PNF instance, the target software version and optional json-formatted payload.
+
+Then the software upgrade workflow is executed as follows:
+
+a. SO sends request(s) with input {action, action-identifiers, common header, and optional payload} to SDNC API handler using traditional LCM API.
+b. SDNC API handler executes corresponding DG and sends requests to the ansible server.
+c. The ansible server executes ansible playbook with the EMS. Then EMS is responsible of software upgrade procedure of the selected PNF instance.
+d. Repeat above steps for each SO building block in the corresponding PNF software upgrade workflow.
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+
+To see information on the status of the test cases, please follow the link below:
+
+`Enhancement on PNF software upgrade using Ansible Test Status <https://wiki.onap.org/pages/viewpage.action?pageId=64007357#EnhancementonPNFS/WUpgradeusingAnsible-TestStatus>`_
diff --git a/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst b/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst
new file mode 100644
index 000000000..24098cdc4
--- /dev/null
+++ b/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst
@@ -0,0 +1,37 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5g_pnf_software_upgrade_direct_netconf_yang:
+
+:orphan:
+
+PNF Software Upgrade Scenario: Using Direct Netconf/Yang interface with PNF
+===========================================================================
+
+Software Upgrade Procedure
+---------------------------
+
+With this scenario, the pre-conditions are:
+
+* SO PNF software upgrade workflows are ready to use
+* An SDC service template with one PNF resource has been designed (including CBA association) and has been distributed
+* Service instantiation is completed, including PNF PnP. meaning a PNF instance is in operation with connectivity between PNF-ONAP, PNF-SFTP
+* At design time, the CONTROLLER_ACTOR is set for CDS client for the API selection decision
+* PNF has direct NETCONF/YANG interface configured which can be reachable from ONAP controller.
+
+At run time, the PNF in-place software upgrade procedure is triggered when the operator provides the selected PNF software upgrade workflow, a PNF instance, and the target software version using VID GUI or CLI.
+Then the software upgrade workflow is executed in SO:
+
+a. SO sends CDS request(s) with action-identifier {actionName, blueprintName, blueprintVersion} to the blueprint processor inside the controller using CDS self-service API
+b. Controller/blueprint processor executes the blueprint scripts including sending NETCONF request(s) to the PNF instance via the direct NETCONF interface.
+c. Repeat above two steps for each SO building blocks.
+
+ .. image:: files/softwareUpgrade/DirectNetconfYangInterface.png
+
+
+Test Status and Plans
+---------------------
+
+To see information on the status of the test cases please follow the link below:
+
+`PNF Software Upgrade Test Status <https://wiki.onap.org/display/DW/PNF+software+upgrade+in+R6+Frankfurt#PNFsoftwareupgradeinR6Frankfurt-TestStatus>`_
diff --git a/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst b/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst
new file mode 100644
index 000000000..75eb244e1
--- /dev/null
+++ b/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst
@@ -0,0 +1,35 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5g_pnf_software_upgrade_netconf_with_EM:
+
+:orphan:
+
+PNF Software Upgrade Scenario: Using Netconf/Yang interface with EM
+-------------------------------------------------------------------
+
+Software Upgrade Procedure
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+With this scenario, the pre-conditions are:
+
+* SO PNF software upgrade workflows are ready to use.
+* An SDC service template with one PNF resource has been designed (including CBA association) and has been distributed.
+* Service instantiation is completed, including PNF PnP.
+* At design time, the CONTROLLER_ACTOR is set for CDS client for the API selection decision.
+* EMS (with netconf capability and suitable software management yang model) is ready to use. It has direct NETCONF/YANG interface configured which can be reachable from CDS.
+
+At run time, the service provider in R6 can use CLI to trigger the PNF in-place software upgrade procedure by selecting the existing PNF software upgrade workflow or uploading a custom workflow, as well as an identifier of a PNF instance, the target software version.
+
+Then the software upgrade workflow is executed as follows:
+
+a. SO sends CDS request(s) with action-identifier {actionName, blueprintName, blueprintVersion} to the blueprint processor inside the controller using CDS self-service API.
+b. Controller/blueprint processor executes the blueprint scripts including sending NETCONF request(s) to the EMS via the direct NETCONF interface. Then EMS is responsible of software upgrade procedure of the selected PNF instance.
+c. Repeat above two steps for each SO building block in the corresponding PNF software upgrade workflow.
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+
+To see information on the status of the test cases, please follow the link below:
+
+`PNF Software Upgrade with netconf/yang interface with EM Test Status <https://wiki.onap.org/pages/viewpage.action?pageId=64008675>`_
diff --git a/docs/docs_5G_oof_pci.rst b/docs/docs_5G_oof_pci.rst
deleted file mode 100644
index 31cfecb0e..000000000
--- a/docs/docs_5G_oof_pci.rst
+++ /dev/null
@@ -1,43 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0
- International License. http://creativecommons.org/licenses/by/4.0
-
-.. _docs_5G_oof_pci:
-
-OOF-PCI
---------
-
-Description
-~~~~~~~~~~~
-The 5G OOF-PCI use case is an implementation of a SON (Self-Organizing Networks) algorithm for Physical Cell ID (PCI) optimization and the centralized Automatic Neighbor Relations (ANR) function (blacklisting a neighbor for handovers) in a 4G/5G network using the ONAP Optimization Framework (OOF). This use case began with just PCI optimization use case in Casablanca. Further details of Casablanca scope and impacts are described in https://docs.onap.org/en/casablanca/submodules/integration.git/docs/docs_5G_oof_pci.html#docs-5g-oof-pci
-
-For Dublin release, the earlier PCI-Handler MS which was a standalone MS is renamed as SON-Handler MS and onboarded as a micro-service in DCAE. Enhancements were made to Policy and SDN-C. The Config DB functionality (containing configuration details of the RAN), and some of the additions/fixes done to SDN-C are not part of the official Dublin release functionality, but are part of the full use case are only a Proof Of Concept (POC). These code changes in SDN-C are targeted for submission in El Alto release.
-
-In addition, the POC also has a RAN Simulator providing a simulated Radio Access Network (RAN) with a number of netconf servers simulating PNF elements. The functionality of the RAN Simulator has also been enhanced from the Casablanca use case to (a) generate alarms for PCI collision/confusion and (b) generate handover metrics for the different neighbor pairs (for the ANR use case).
-
-All details regarding the use case for Dublin can be found here:
-https://wiki.onap.org/display/DW/OOF-PCI+Use+Case+-+Dublin+Release+-+ONAP+based+SON+for+PCI+and+ANR
-
-The main use case page is https://wiki.onap.org/display/DW/5G+-+OOF+%28ONAP+Optimization+Framework%29+and+PCI+%28Physical+Cell+ID%29+Optimization
-
-
-How to Use
-~~~~~~~~~~
-The OOF-PCI use case is implemented in the Rutgers University (Winlab) ONAP Wireless Lab (OWL). For details, please see: https://wiki.onap.org/pages/viewpage.action?pageId=45298557 .
-This page includes instructions for access to the lab. Since this is a POC at this stage, testing is done manually.
-
-For all instructions about installing the components and test plans, please see:
-
-https://wiki.onap.org/display/DW/Installation+Aspects
-Son-handler installation - https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/services/son-handler/installation.html
-
-
-
-Test Status and Plans
-~~~~~~~~~~~~~~~~~~~~~
-For Dublin release, the OOF-PCI use case is a Proof of Concept (POC). OOF was enhanced with joint PCI-ANR optimization, SON-Handler MS was functionally enhanced and also onboarded on to DCAE, and Policy was also enhanced with a new control loop for ANR and control loop extension to receive feedback of actions. The pairwise testing was done in Windriver lab (https://wiki.onap.org/display/DW/Integration+Testing). Other non-release functions are all tested as part of the PoC in the Rutgers University (Winlab) ONAP Wireless Lab (OWL). To see information about test plans, please see https://wiki.onap.org/display/DW/Functional+Testing and https://wiki.onap.org/display/DW/Use+case+testing.
-
-
-Known Issues and Resolutions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-(a) 2 known issues (Medium): CCSDK-1399 and CCSDK-1400
-(b) It is intended to have the RAN Simulator support sufficient Honeycomb netconf server instances to simulate 2000 cells. However, this number may be lower if there are hardware limitatons.
diff --git a/docs/docs_5G_oof_son.rst b/docs/docs_5G_oof_son.rst
new file mode 100644
index 000000000..0ec539d76
--- /dev/null
+++ b/docs/docs_5G_oof_son.rst
@@ -0,0 +1,128 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5G_oof_son:
+
+:orphan:
+
+5G-SON (earlier name was OOF-SON)
+---------------------------------
+
+Description
+~~~~~~~~~~~
+
+The 5G OOF-SON (earlier name was OOF-PCI) use case is an implementation of a **SON (Self-Organizing Networks)** algorithm for Physical Cell ID (PCI) optimization and the centralized Automatic Neighbor Relations (ANR) function (blacklisting a neighbor for handovers) in a 4G/5G network using the ONAP Optimization Framework (OOF).
+
+The use case is a multi-release effort. This use case began with the implementation of PCI optimization in the Casablanca release. In the Dublin release, the SON-Handler MS was onboarded as a micro-service in DCAE. Enhancements were made to Policy and SDN-C components.
+
+
+RAN Simulator
+~~~~~~~~~~~~~
+
+As part of this use case work, the SON Use Case team developed RAN-Sim, which is a RAN Simulator providing a simulated Radio Access Network (RAN) with a number of netconf servers simulating PNF elements representing gNodeBs. The functionality of the RAN Simulator includes:
+
+- Input of a sample topology of cells, with netconf servers (representing DUs) representing groups of cells
+- Represenation of cell locations and cell neighbor relations
+- Generation of neighbor-list-update messages
+- Generation of alarms for PCI collision/confusion and
+- Generation of handover metrics for different neighbor pairs (for the ANR use case).
+- Implementation of an O1 interface termination for CU/DU NFs
+- Implementation of an A1 interface termination with A1-Termination and RAN-App (new for Kohn release)
+
+All above functionality are enabled using a simple UI.
+
+
+Frankfurt Release
+~~~~~~~~~~~~~~~~~
+
+In Frankfurt release, the following were the main enhancements:
+
+- Introduction of Control Loop Coordination functionality, wherein a second control loop execution is denied by Policy component when another control loop is in progress.
+- Introduction of adaptive SON, wherein a set of cells whose PCI values are fixed (i.e., cannot be changed during PCI optimization) are considered during the PCI optimization.
+- In addition, the first step towards O-RAN alignment is being taken with SDN-C (R) being able to receive a DMaaP message containing configuration updates (which would be triggered when a neighbor-list-change occurs in the RAN and is communicated to ONAP over VES). `Details of this implementation <https://wiki.onap.org/display/DW/CM+Notification+Support+in+ONAP>`_
+
+
+Istanbul Release
+~~~~~~~~~~~~~~~~~
+
+In the Istanbul release, the following are the main enhancements:
+
+- Updates in FM reporting and fault handling to be in line with VES 7.2, 3GPP and smoother future alignment with O-RAN O1
+- Alignment with 3GPP NRM/O-RAN yang models for SON use case
+- Use CPS for storing/retrieving RAN config data for this use case (was stretch goal, partially addressed)
+- Configuration Management (CM) notifications over VES based on VES 7.2 (was stretch goal, partially addressed)
+
+The end-to-end setup for the use case requires a database which stores the cell related details of the RAN. This database is ConfigDB till we complete the transition to using CPS DB/TBDMT. The database is updated by SDN-C (R), and is accessed by SON-Handler MS and OOF for fetching (e.g., neighbor list, PNF id, etc):
+
+- `The Config DB implementation <https://github.com/onap-oof-pci-poc/sdnc/tree/master/ConfigDB/Dublin>`_
+- `Swagger JSON API documentation <https://github.com/onap-oof-pci-poc/sdnc/blob/master/ConfigDB/Dublin/SDNC_ConfigDB_API_v3.0.0.json>`_
+
+As part of Istanbul release work, progress was made towards the goal of transitioning from ConfigDB to CPS DB. CPS DB is fully based on yang models, and we have developed a modeling approach using two yang models:
+
+- Primary model: (e.g., ran-network). This is a modular sub-set of, and fully aligned with, ORAN/3GPP 28.541 NRM yang model. This aligns with device models and vendor models (base and extensions)
+
+- Secondary model: (e.g, cps-ran-schema-model) This model captures information which is not present in ORAN model, e.g., region-to-cell (CU) mapping, latitude/longitude of DU. This also has derived information for API/query efficiency, e.g., list of neighbor cells. This aligns with operator network model for use cases and applications.
+
+
+Jakarta Release
+~~~~~~~~~~~~~~~
+
+The following are the enhancements in the Jakarta release:
+
+- Update of SDN-R netconf code to use the new O1 yang models
+- Update of RAN-Sim to use the new O1 yang models
+
+In the Jakarta release, the SON Use Case work was impacted by the fact RAN-Sim needed enhancements to implement new features. We have made progress in the following areas in planning for future releases.
+
+- Convergence on the VES message formats to be used for FM/PM/CM
+- Inclusion of A1 based actions for the end-to-end SON Use Case
+- Enhancement of RAN-Sim to include abstraction of RAN App and A1 Termination which would process an A1 message and update of a CU/DU
+- Planning for replacement of Honeycomb netconf engine (project is archived)
+
+Kohn Release
+~~~~~~~~~~~~
+
+We have introduced a new paradigm in the Kohn release and taken steps to harmonize with O-RAN SC and new approaches for ONAP Control Loops. The following are the enhancements in the Kohn release:
+
+- We introduced a new paradigm of marking the RAN action SON control flows as being O1-based or A1-based. The PCI control flow is now an O1-based flow which goes to SDN-R for netconf-based configurations over O1 interface to the CU/DU (simulated in RAN-Sim). The ANR control flow is now an A1-based flow which goes to SDN-R/A1-PMS to generate A1 Policy messages over the A1 interface to the xApp/Near-RT RIC (simulated in RAN-Sim).
+- The formats of the Control Loop Message between SON Handler MS, Policy, and SDN-R have been updated. Policies in Policy Function have been updated. The PCI flow remains as an O1-based netconf action from SDN-R, while major changes were made for the ANR flow
+- We have introduce a new A1-based SON action flow leveraging the use of A1-PMS in SDN-R and A1-Termination in RAN-Sim. We have harmonized ONAP and O-RAN SC work, and cross-linked ONAP JIRAs to use O-RAN SC projects.
+- We have major changes for RAN-Sim. There is a new A1-Termination module as well as a new RAN-App module. The RAN-App module abstracts the function of an xApp in the Near-RT RIC. RAN-App processes the A1 policy message payload and sends a message to the RAN-Sim controller to make configuration changes in the RAN NF (CU or DU) in the RAN-Sim.
+
+
+For more information, please see:
+
+- `5G-SON Kohn release wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=149029149>`_.
+
+- `5G-SON Jakarta release wiki page <https://wiki.onap.org/display/DW/R10+5G+SON+use+case>`_.
+
+- `5G-OOF-SON Base wiki page <https://wiki.onap.org/display/DW/5G+-+OOF+%28ONAP+Optimization+Framework%29+and+PCI+%28Physical+Cell+ID%29+Optimization>`_.
+
+- `OOF-SON El Alto & Frankfurt OOF (SON) wiki page <https://wiki.onap.org/display/DW/OOF+%28SON%29+in+R5+El+Alto%2C+OOF+%28SON%29+in+R6+Frankfurt>`_.
+
+
+How to Use
+~~~~~~~~~~
+
+The 5G-SON use case is implemented in the Rutgers University (Winlab) ONAP Wireless Lab (OWL).
+For details, please see
+`lab details <https://wiki.onap.org/pages/viewpage.action?pageId=45298557>`_.
+
+This page includes instructions for access to the lab. Setup and testing is done manually up to now.
+
+For all instructions about installing the components, please see:
+
+- `Wiki Installation page <https://wiki.onap.org/display/DW/Demo+setup+steps+for+Frankfurt>`_
+
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+
+See `test plans <https://wiki.onap.org/display/DW/R11+5G+SON+Integration+Tests>`_ for details.
+
+Known Issues and Resolutions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+(a) It is intended to have the RAN Simulator support sufficient Honeycomb netconf server instances to simulate 2000 cells. However, this number may be lower if there are hardware limitations.
+(b) For Control Loop Co-ordination, the denial of a second Control Loop based on Target Lock (i.e., when a second Control Loop tries to operate on the same target (in this case, a PNF) is successfully tested. The CLC is also applied at Control Loop level only. However, some code updates are required in Policy to properly update the Operations History DB entry, and to check the existence of active Control Loops by Policy. This will be addressed in Jakarta release, and tracked via https://jira.onap.org/browse/POLICY-2484
+(c) Honeycomb netconf server project has been archived. The plan is to migrate to netopeer. As an interim step, we have a new ran-app module which interacts with the ran-sim controller.
diff --git a/docs/docs_5g_pnf_pnp.rst b/docs/docs_5g_pnf_pnp.rst
index 09ec6d302..7807062d8 100644
--- a/docs/docs_5g_pnf_pnp.rst
+++ b/docs/docs_5g_pnf_pnp.rst
@@ -1,16 +1,13 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_5g_pnf_pnp:
+:orphan:
+
5G - PNF Plug and Play
----------------------
-Source files
-~~~~~~~~~~~~
-
-- Base PnP PNF Simulator heat template file: https://gerrit.onap.org/r/gitweb?p=integration.git;a=tree;f=test/mocks/pnfsimulator/deployment/src
-
Description
~~~~~~~~~~~
@@ -18,17 +15,66 @@ The PNF Plug and Play is a procedure that is executed between a PNF and ONAP. In
**Useful Links**
-- `5G - PNF Plug and Play use case documentation <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>`_
-- `5G - PNF Plug and Play - Integration Test Cases <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases>`_
-- `Instruction how to setup PnP PNF Simulator <https://wikgit i.onap.org/display/DW/PnP+PNF+Simulator>`_
-- `Instruction how to use PnP PNF Simulator <https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/pnfsimulator/README.md>`_
+1. `5G - PNF Plug and Play use case documentation <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>`_
+2. `5G - PNF Plug and Play - Integration Test Cases <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases>`_
+3. Instruction how to setup and use VES CLinet from :ref:`NF Simulator <nf_simulator>`.
How to Use
~~~~~~~~~~
-1) `Create and distribute service model which contains PNF <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases#id-5G-PNFPnP-IntegrationTestCases-CreateanddistributeservicewhichcontainsPNF>`_
-2) `Create service for PNF and wait for PNF Ready message in DmaaP topic <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases#id-5G-PNFPnP-IntegrationTestCases-PNFReady>`_
-3) `Send PNF Registartion request from PnP PNF Simualtor and finish registration <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases#id-5G-PNFPnP-IntegrationTestCases-PNFregistrationacceptingwhenAAIentrycreatedinadvance>`_
+1. Create and distribute service model which contains PNF
+2. Create service for PNF and wait for PNF Ready message in DmaaP topic
+3. Send PNF Registartion request from NF Simualtor (VES Client) and finish registration
+
+Below is present an example of event that need to be send to VES Client in order to trigger registration event from VES Client to ONAP VES Collector.
+There is need to fill following values in example json with proper values:
+
+1. dcae-ves-collector-host-name
+2. dcae-ves-collector-port
+3. sourceName - Identifier of this Pnf information element. It is the first three letters of the Vendor and the PNF serial number.
+ This is a unique identifier for the PNF instance. It is also referred to as the Correlation ID.
+4. oamV4IpAddress - This is the IP address (IPv4) for the PNF itself. This is the IPv4 address that the PNF itself can be accessed at.
+5. oamV6IpAddress - This is the IP address (IPv6) for the PNF itself. This is the IPv6 address that the PNF itself can be accessed at.
+
+::
+ {
+ "vesServerUrl": "https://<dcae-ves-collector-host-name>:<dcae-ves-collector-port>/eventListener/v7",
+ "event": {
+ "event": {
+ "commonEventHeader": {
+ "startEpochMicrosec": 1538407540940,
+ "sourceId": "val13",
+ "eventId": "registration_38407540",
+ "nfcNamingCode": "oam",
+ "internalHeaderFields": {},
+ "eventType": "pnfRegistration",
+ "priority": "Normal",
+ "version": "4.0.1",
+ "reportingEntityName": "VEN6061ZW3",
+ "sequence": 0,
+ "domain": "pnfRegistration",
+ "lastEpochMicrosec": 1538407540940,
+ "eventName": "pnfRegistration",
+ "vesEventListenerVersion": "7.0.1",
+ "sourceName": "<sourceName>",
+ "nfNamingCode": "gNB"
+ },
+ "pnfRegistrationFields": {
+ "unitType": "val8",
+ "serialNumber": "6061ZW3",
+ "pnfRegistrationFieldsVersion": "2.0",
+ "manufactureDate": "1538407540942",
+ "modelNumber": "val6",
+ "lastServiceDate": "1538407540942",
+ "unitFamily": "BBU",
+ "vendorName": "VENDOR",
+ "oamV4IpAddress": "<oamV4IpAddress>,
+ "oamV6IpAddress": "<oamV6IpAddress>",
+ "softwareVersion": "val7"
+ }
+ }
+ }
+ }
diff --git a/docs/docs_5g_rtpm.rst b/docs/docs_5g_rtpm.rst
index 2de85cdb6..45f1103f2 100644
--- a/docs/docs_5g_rtpm.rst
+++ b/docs/docs_5g_rtpm.rst
@@ -1,8 +1,10 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_realtime_pm:
+:orphan:
+
5G - Real Time PM and High Stream Data Collection
-------------------------------------------------
@@ -18,16 +20,16 @@ The Real-Time Performance Measurements support allows for a PNF to send streamin
Component and API descriptions can be found under:
-- `High Volume VNF Event Streaming (HV-VES) Collector <https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/services/ves-hv/index.html>`_
-- `HV-VES (High Volume VES) <https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/apis/ves-hv/index.html#hv-ves-high-volume-ves>`_
+- `High Volume VNF Event Streaming (HV-VES) Collector <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/ves-hv/index.html>`_
+- `HV-VES (High Volume VES) <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/apis/ves-hv/index.html#hv-ves-high-volume-ves>`_
How to verify
~~~~~~~~~~~~~
Follow instructions in the links below to send data to HV-VES collector and verify messages published on Kafka topic:
-- `HV-VES xNF simulator integration to ONAP <https://wiki.onap.org/display/DW/HV-VES+simulator#HV-VESsimulator-HV-VESxNFsimulatorintegrationtoONAP>`_
-- `HV-VES xNF message simulation from shell <https://wiki.onap.org/display/DW/HV-VES+simulator#HV-VESsimulator-HV-VESxNFmessagesimulationfromshell>`_
+- `HV-VES xNF simulator integration to ONAP <https://wiki.onap.org/display/DW/HV-VES+simulator>`_ (HVVESsimulator-HV-VESxNFmessagesimulationfromshell)
+- `HV-VES xNF message simulation from shell <https://wiki.onap.org/display/DW/HV-VES+simulator>`_ (HV-VESsimulator-HV-VESxNFsimulatorintegrationtoONAP)
Useful links
~~~~~~~~~~~~
diff --git a/docs/docs_BBS.rst b/docs/docs_BBS.rst
index 81af17294..1047ae3d7 100644
--- a/docs/docs_BBS.rst
+++ b/docs/docs_BBS.rst
@@ -1,5 +1,7 @@
.. _docs_bbs:
+:orphan:
+
BBS (Broadband Service)
-----------------------
@@ -10,12 +12,12 @@ management and assurance of broadband services. BBS focuses on multi-Gigabit
Internet Connectivity services based on PON (Passive Optical Network) access
technology.
-In Dublin release, BBS enables ONAP to
+In Frankfurt release, BBS enables ONAP to
1. Establish a subscriber's HSIA (High Speed Internet Access) service from an ONT (Optical Network Termination unit) to the Internet drain
- The HSIA service is designed and deployed using ONAP's design and deployment capabilities
- - The HSIA service activation is initiated via ONAP's External APIs and orchestrated and controlled using ONAP orchestration and control capabilities. The control capabilities leverage a 3rd party controller to implement the requested action within the technology domain/location represented by the domain specific SDN management and control function.
+ - The HSIA service activation is initiated via ONAP's External APIs and orchestrated and controlled using ONAP orchestration and control capabilities. The control capabilities leverage a 3rd party controller to implement the requested actions within the technology domain/location represented by the domain specific SDN management and control function.
2. Detect the change of location for ONT devices (Nomadic ONT devices)
@@ -27,7 +29,8 @@ In Dublin release, BBS enables ONAP to
- Service location modification that is detected by ONAP's analytic and initiated via the closed loop capabilities
- - The closed loop capabilities invoke a HSIA location change service that is orchestrated and controlled using ONAP capabilities and 3rd party controllers
+ - The closed loop capabilities invoke a HSIA location change service that
+ is orchestrated and controlled using ONAP capabilities and 3rd party controllers
|image1|
@@ -51,16 +54,10 @@ SO: Custom Workflow Configuration
::
- ~/oom/kubernetes# kubectl edit cm dev-so-so-bpmn-infra-app-configmap
+ ~/oom/kubernetes# kubectl edit cm dev-so-bpmn-infra-app-configmap
- mso:
- ...
- workflow:
- custom:
- BBS_E2E_Service:
- sdnc:
- need: true
- ...
+ ## replace "workflow:\n CreateGenericVNFV1:\n"
+ ## with "workflow:\n custom:\n BBS_E2E_Service:\n sdnc:\n need: true\n CreateGenericVNFV1:\n"
## Restart the pod
~/oom/kubernetes# kubectl delete po dev-so-so-bpmn-infra-7556d7f6bc-8fthk
@@ -72,68 +69,35 @@ IMPORTANT: make sure vnf_recipe.NF_ROLE matches vnf_resource.MODEL_NAME, and vnf
::
- root@onap-rancher-daily:/home/ubuntu# kubectl exec -ti dev-mariadb-galera-mariadb-galera-0 sh
+ root@onap-rancher-daily:/home/ubuntu# kubectl exec -ti dev-mariadb-galera-0 sh
sh-4.2$ mysql -u root -p
MariaDB [(none)]> use catalogdb;
- MariaDB [catalogdb]> select * from vnf_recipe;
- ...
- +-------+---------------------+-----------------------+--------------+-------------+--------------------------------------------------------------------------------+-----------------------------------------------+---------------+----------------+---------------------+--------------------------------------+
- | id | NF_ROLE | ACTION | SERVICE_TYPE | VERSION_STR | DESCRIPTION | ORCHESTRATION_URI | VNF_PARAM_XSD | RECIPE_TIMEOUT | CREATION_TIMESTAMP | VF_MODULE_ID |
- +-------+---------------------+-----------------------+--------------+-------------+--------------------------------------------------------------------------------+-----------------------------------------------+---------------+----------------+---------------------+--------------------------------------+
- | 10043 | InternetProfile | createInstance | NF | 1.0 | create InternetProfile | /mso/async/services/CreateSDNCNetworkResource | NULL | 180000 | 2019-02-18 08:34:39 | NULL |
- | 10044 | AccessConnectivity | createInstance | NF | 1.0 | create AccessConnectivity | /mso/async/services/CreateSDNCNetworkResource | NULL | 180000 | 2019-02-18 08:34:39 | NULL |
- | 10045 | CPE | createInstance | NF | 1.0 | create CPE | /mso/async/services/HandlePNF | NULL | 180000 | 2019-02-18 08:34:39 | NULL |
- +-------+---------------------+-----------------------+--------------+-------------+--------------------------------------------------------------------------------+-----------------------------------------------+---------------+----------------+---------------------+--------------------------------------+
- ...
- MariaDB [catalogdb]> select * from vnf_resource;
- +--------------------+-----------------------+---------------------+--------------------------------------+-----------------+-----------------+--------------------------------------+---------------+--------------------+----------------------------------------------+-----------------------------+-------------------+-----------------------+
- | ORCHESTRATION_MODE | DESCRIPTION | CREATION_TIMESTAMP | MODEL_UUID | AIC_VERSION_MIN | AIC_VERSION_MAX | MODEL_INVARIANT_UUID | MODEL_VERSION | MODEL_NAME | TOSCA_NODE_TYPE | HEAT_TEMPLATE_ARTIFACT_UUID | RESOURCE_CATEGORY | RESOURCE_SUB_CATEGORY |
- +--------------------+-----------------------+---------------------+--------------------------------------+-----------------+-----------------+--------------------------------------+---------------+--------------------+----------------------------------------------+-----------------------------+-------------------+-----------------------+
- | HEAT | CPE VF | 2019-05-15 22:11:07 | 8f5fe623-c5e3-4ab3-90f9-3a28daea6601 | NULL | NULL | 0ee07fe6-a156-4e59-9dee-09a775d02bca | 1.0 | CPE | org.openecomp.resource.vf.Cpe | NULL | Generic | Infrastructure |
- | HEAT | InternetProfile VF | 2019-05-15 22:11:11 | a8de16d8-0d1a-4a19-80ac-2bcb2790e9a6 | NULL | NULL | acbe6358-6ce4-43a9-9385-111fe5cadad3 | 1.0 | InternetProfile | org.openecomp.resource.vf.Internetprofile | NULL | Generic | Infrastructure |
- | HEAT | AccessConnectivity VF | 2019-05-15 22:11:13 | b464fd87-3663-46c9-adc5-6f7d9e98ff26 | NULL | NULL | 53018dba-c934-415d-b4b1-0b1cae9553b8 | 1.0 | AccessConnectivity | org.openecomp.resource.vf.Accessconnectivity | NULL | Generic | Infrastructure |
- +--------------------+-----------------------+---------------------+--------------------------------------+-----------------+-----------------+--------------------------------------+---------------+--------------------+----------------------------------------------+-----------------------------+-------------------+-----------------------+
-
-Modify the MODEL_UUID and MODEL_INVARIANT_UUID for each resource in the SQL query below accordingly to your environment.
-
-::
-
- INSERT INTO `vnf_resource` (`ORCHESTRATION_MODE`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `MODEL_UUID`, `AIC_VERSION_MIN`, `AIC_VERSION_MAX`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `TOSCA_NODE_TYPE`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `RESOURCE_CATEGORY`, `RESOURCE_SUB_CATEGORY`)
- VALUES
- ('HEAT', 'CPE VF', '2019-05-15 22:11:07', '8f5fe623-c5e3-4ab3-90f9-3a28daea6601', NULL, NULL, '0ee07fe6-a156-4e59-9dee-09a775d02bca', '1.0', 'CPE', 'org.openecomp.resource.vf.Cpe', NULL, 'Generic', 'Infrastructure'),
- ('HEAT', 'InternetProfile VF', '2019-05-15 22:11:11', 'a8de16d8-0d1a-4a19-80ac-2bcb2790e9a6', NULL, NULL, 'acbe6358-6ce4-43a9-9385-111fe5cadad3', '1.0', 'InternetProfile', 'org.openecomp.resource.vf.Internetprofile', NULL, 'Generic', 'Infrastructure'),
- ('HEAT', 'AccessConnectivity VF', '2019-05-15 22:11:13', 'b464fd87-3663-46c9-adc5-6f7d9e98ff26', NULL, NULL, '53018dba-c934-415d-b4b1-0b1cae9553b8', '1.0', 'AccessConnectivity', 'org.openecomp.resource.vf.Accessconnectivity', NULL, 'Generic', 'Infrastructure');
-
-Adding is_pnf flag to CPE resource input in catalogdb database. Needed in DoCreateResource BPMN for pausing the flow until a PNF is ready
-
-::
-
- INSERT INTO `vnf_resource_customization` (`ID`, `MODEL_CUSTOMIZATION_UUID`, `MODEL_INSTANCE_NAME`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_MAX_COUNT`, `NF_TYPE`, `NF_ROLE`, `NF_FUNCTION`, `NF_NAMING_CODE`, `MULTI_STAGE_DESIGN`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`, `SERVICE_MODEL_UUID`, `RESOURCE_INPUT`, `CDS_BLUEPRINT_NAME`, `CDS_BLUEPRINT_VERSION`, `SKIP_POST_INSTANTIATION_CONFIGURATION`)
+ MariaDB [catalogdb]> INSERT INTO vnf_recipe (NF_ROLE, ACTION, SERVICE_TYPE, VERSION_STR, DESCRIPTION, ORCHESTRATION_URI, VNF_PARAM_XSD, RECIPE_TIMEOUT)
VALUES
- (16, '0cea1cea-e4e4-4c91-be41-675e183a8983', 'CPE 0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'false', '2019-05-21 11:15:42', '8f5fe623-c5e3-4ab3-90f9-3a28daea6601', '0187be8c-8efb-4531-97fa-dbe984ed9cdb', '{\\\"nf_naming\\\":\\\"true\\\",\\\"skip_post_instantiation_configuration\\\":\\\"true\\\",\\\"multi_stage_design\\\":\\\"false\\\",\\\"availability_zone_max_count\\\":\\\"1\\\",\\\"is_pnf\\\":\\\"ont_0_is_pnf|true\\\"}', NULL, NULL, 1);
-
-We need to ensure that the order in which the resources are processed by SO engine is correct. In BBS case, the PNF resource should go right after VnfVirtualLink (NOTE: the BPMN flow waits until PNF is ready in order to create AccessConnectivity and InternetProfile resources)
+ ("InternetProfile", "createInstance", "NF", "1.0", "create InternetProfile", "/mso/async/services/CreateSDNCNetworkResource", '{"operationType":"AccessConnectivity"}', 180000),
+ ("AccessConnectivity", "createInstance", "NF", "1.0", "create AccessConnectivity", "/mso/async/services/CreateSDNCNetworkResource", '{"operationType":"InternetProfile"}', 180000),
+ ("CPE", "createInstance", "NF", "1.0", "create CPE", "/mso/async/services/HandlePNF", NULL, 180000);
-::
+ MariaDB [catalogdb]> select * from vnf_recipe where NF_ROLE IN ('AccessConnectivity','InternetProfile', 'CPE');
+ +-------+--------------------+----------------+--------------+-------------+---------------------------+-----------------------------------------------+----------------------------------------+----------------+---------------------+--------------+
+ | id | NF_ROLE | ACTION | SERVICE_TYPE | VERSION_STR | DESCRIPTION | ORCHESTRATION_URI | VNF_PARAM_XSD | RECIPE_TIMEOUT | CREATION_TIMESTAMP | VF_MODULE_ID |
+ +-------+--------------------+----------------+--------------+-------------+---------------------------+-----------------------------------------------+----------------------------------------+----------------+---------------------+--------------+
+ | 10048 | InternetProfile | createInstance | NF | 1.0 | create InternetProfile | /mso/async/services/CreateSDNCNetworkResource | {"operationType":"InternetProfile"} | 1800000 | 2020-01-20 17:43:07 | NULL |
+ | 10051 | AccessConnectivity | createInstance | NF | 1.0 | create AccessConnectivity | /mso/async/services/CreateSDNCNetworkResource | {"operationType":"AccessConnectivity"} | 1800000 | 2020-01-20 17:43:07 | NULL |
+ | 10054 | CPE | createInstance | NF | 1.0 | create CPE | /mso/async/services/HandlePNF | NULL | 1800000 | 2020-01-20 17:43:07 | NULL |
+ +-------+--------------------+----------------+--------------+-------------+---------------------------+-----------------------------------------------+----------------------------------------+----------------+---------------------+--------------+
+ 3 rows in set (0.00 sec)
- MariaDB [catalogdb]> select RESOURCE_ORDER from service where MODEL_NAME="BBS_E2E_Service";
- +----------------------------------------------------------------------------+
- | RESOURCE_ORDER |
- +----------------------------------------------------------------------------+
- | VnfVirtualLink,CPE,AccessConnectivity,InternetProfile,PonUni,OltNni,OntNni |
- | VnfVirtualLink,CPE,AccessConnectivity,InternetProfile,PonUni,OltNni,OntNni |
- +----------------------------------------------------------------------------+
- 2 rows in set (0.00 sec)
DMaaP Message Router
====================
-Create required topics
+Create the required topics in DMaaP
::
- curl -X POST \
- http://mr.api.simpledemo.openecomp.org:30227/topics/create \
+ curl -kX POST \
+ https://mr.api.simpledemo.openecomp.org:30226/topics/create \
-H 'Accept: application/json' \
-H 'Content-Type: application/json' \
-H 'cache-control: no-cache' \
@@ -144,8 +108,8 @@ Create required topics
"replicationCount": "3"
}'
- curl -X POST \
- http://mr.api.simpledemo.openecomp.org:30227/topics/create \
+ curl -kX POST \
+ https://mr.api.simpledemo.openecomp.org:30226/topics/create \
-H 'Accept: application/json' \
-H 'Content-Type: application/json' \
-H 'cache-control: no-cache' \
@@ -156,8 +120,8 @@ Create required topics
"replicationCount": "3"
}'
- curl -X POST \
- http://mr.api.simpledemo.openecomp.org:30227/topics/create \
+ curl -kX POST \
+ https://mr.api.simpledemo.openecomp.org:30226/topics/create \
-H 'Accept: application/json' \
-H 'Content-Type: application/json' \
-H 'cache-control: no-cache' \
@@ -168,8 +132,8 @@ Create required topics
"replicationCount": "3"
}'
- curl -X POST \
- http://mr.api.simpledemo.openecomp.org:30227/topics/create \
+ curl -kX POST \
+ https://mr.api.simpledemo.openecomp.org:30226/topics/create \
-H 'Accept: application/json' \
-H 'Content-Type: application/json' \
-H 'cache-control: no-cache' \
@@ -180,81 +144,211 @@ Create required topics
"replicationCount": "3"
}'
+ curl -k 'https://mr.api.simpledemo.openecomp.org:30226/topics'
+
+ {
+ "topics": [
+ "org.onap.dmaap.mr.PNF_REGISTRATION",
+ "unauthenticated.DCAE_CL_OUTPUT",
+ "AAI-EVENT",
+ "SDC-DISTR-STATUS-TOPIC-AUTO",
+ "SDC-DISTR-NOTIF-TOPIC-AUTO",
+ "org.onap.dmaap.mr.PNF_READY",
+ "unauthenticated.PNF_READY",
+ "POLICY-PDP-PAP",
+ "unauthenticated.CPE_AUTHENTICATION",
+ "unauthenticated.VES_MEASUREMENT_OUTPUT",
+ "unauthenticated.PNF_UPDATE",
+ "org.onap.dmaap.mr.mirrormakeragent",
+ "__consumer_offsets"
+ ]
+ }
+
+
DCAE: BBS Event Processor (BBS-ep)
==================================
-Installation instructions: `BBS-ep <https://wiki.onap.org/pages/viewpage.action?pageId=60891185>`_
+Description: `BBS-ep <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/bbs-event-processor/index.html?highlight=BBS>`_
+
+The following BBS event processor blueprint will be used:
+
+- `k8s-bbs-event-processor.yaml <https://git.onap.org/dcaegen2/platform/blueprints/plain/blueprints/k8s-bbs-event-processor.yaml?h=frankfurt>`_
-Update the configuration of BBS-ep in Consul with the following version for close loop (see screenshot below) in order to match the version expected by BBS APEX policy:
+
+The BBS-ep deployment procedure:
::
- "application.clVersion": "1.0.0"
+ root@onap-nfs:/home/ubuntu# kubectl exec -ti dev-dcae-bootstrap-7599b45c77-czxsx -n onap bash
+ bash-4.2$ cfy install -b bbs-ep -d bbs-ep /blueprints/k8s-bbs-event-processor.yaml
+
+
+IMPORTANT: Make sure that the configuration of BBS-ep in Consul contains the following version for the close loop policy in order to match the version expected by BBS APEX policy:
+
+::
+
+ "application.clVersion": "1.0.2"
+
DCAE: RESTCONF Collector
========================
-Installation instructions: `RESTCONF Collector <https://wiki.onap.org/pages/viewpage.action?pageId=60891182>`_
+Description: `RESTCONF Collector <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/restconf/index.html>`_
+
+The following RESTCONF collector blueprint will be used:
+
+- `k8s-restconf.yaml <https://git.onap.org/dcaegen2/platform/blueprints/plain/blueprints/k8s-restconf.yaml?h=frankfurt>`_
+
+
+RESTCONF Collector deployment procedure:
+
+::
+
+ root@onap-nfs:/home/ubuntu# kubectl exec -ti dev-dcae-bootstrap-7599b45c77-czxsx -n onap bash
+ bash-4.2$ cfy install -b restconf -d restconf /blueprints/k8s-restconf.yaml
+
DCAE: VES mapper
================
-Installation instructions: `VES Mapper <https://wiki.onap.org/pages/viewpage.action?pageId=60891188>`_
+Description: `VES Mapper <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/mapper/index.html>`_
+
+The following VES mapper blueprint will be used:
+
+- `k8s-ves-mapper.yaml <https://git.onap.org/dcaegen2/platform/blueprints/tree/blueprints/k8s-ves-mapper.yaml?h=frankfurt>`_
+
+
+VES Mapper deployment procedure:
+
+::
+
+ root@onap-nfs:/home/ubuntu# kubectl exec -ti dev-dcae-bootstrap-7599b45c77-czxsx -n onap bash
+ bash-4.2$ cfy install -b ves-mapper -d ves-mapper /blueprints/k8s-ves-mapper.yaml
+
DCAE: VES collector
===================
-Configure mapping VES event domain to DMaaP topic: ves-statechange --> unauthenticated.CPE_AUTHENTICATION
+Configure the mapping of the VES event domain to the correct DMaaP topic in Consul: ves-statechange --> unauthenticated.CPE_AUTHENTICATION
-Access Consul UI: http://<consul_server_ui>:30270/ui/#/dc1/services
+1. Access Consul UI <http://CONSUL_SERVER_UI:30270/ui/#/dc1/services>
-Modify dcae-ves-collector configuration by adding a new VES domain to DMaaP topic mapping
+2. Modify the dcae-ves-collector configuration by adding a new VES domain to DMaaP topic mapping
::
"ves-statechange": {"type": "message_router", "dmaap_info": {"topic_url": "http://message-router:3904/events/unauthenticated.CPE_AUTHENTICATION"}}
+|image3|
+
+3. Click on UPDATE in order to apply the new configuration
+
+
SDNC: BBS DGs (Directed Graphs)
===============================
-Make sure that BBS DGs in SDNC DGBuilder are in Active state
+Make sure that the following BBS DGs in the SDNC DGBuilder are in Active state
+
+::
+
+ bbs-access-connectivity-vnf-topology-operation-create-huawei
+ bbs-access-connectivity-vnf-topology-operation-delete-huawei
+ bbs-internet-profile-vnf-topology-operation-change-huawei
+ bbs-internet-profile-vnf-topology-operation-common-huawei
+ bbs-internet-profile-vnf-topology-operation-create-huawei
+ bbs-internet-profile-vnf-topology-operation-delete-huawei
+ validate-bbs-vnf-input-parameters
+
+DGBuilder URL: https://sdnc.api.simpledemo.onap.org:30203
+
+
+Access SDN M&C DG
+=================
+Configure Access SDN M&C IP address in SDNC DG using dgbuilder. For instance:
+
+> GENERIC-RESOURCE-API: bbs-access-connectivity-vnf-topology-operation-create-huawei.json
+> GENERIC-RESOURCE-API: bbs-access-connectivity-vnf-topology-operation-delete-huawei.json
+
+1. Export the relevant DG
+
+2. Modify the IP address
+
+3. Import back the DG and Activate it
+
+DGBuilder URL: https://sdnc.api.simpledemo.onap.org:30203
+
+
+Edge SDN M&C DG
+===============
+Configure Edge SDN M&C IP address in SDNC DG using dgbuilder. For instance:
+
+> GENERIC-RESOURCE-API: bbs-access-connectivity-vnf-topology-operation-common-huawei.json
+
+1. Export the relevant DG
+
+2. Modify the IP address
+
+3. Import back the DG and Activate it
+
+DGBuilder URL: https://sdnc.api.simpledemo.onap.org:30203
-http://dguser:test123@{{sdnc-dgbuilder_Node-IP}}:30203/#
+
+Add SSL certificate of the 3rd party controller into the SDNC trust store
+=========================================================================
::
- bbs-access-connectivity-network-topology-operation-create-huawei
- bbs-access-connectivity-network-topology-operation-delete-huawei
- bbs-internet-profile-network-topology-operation-change-huawei
- bbs-internet-profile-network-topology-operation-common-huawei
- bbs-internet-profile-network-topology-operation-create-huawei
- bbs-internet-profile-network-topology-operation-delete-huawei
- validate-bbs-network-input-parameters
+ kubectl exec -ti dev-sdnc-0 -n onap -- bash
+
+ openssl s_client -connect <IP_ADDRESS_EXT_CTRL>:<PORT>
+ # copy server certificate and paste in /tmp/<CA_CERT_NAME>.crt
+ sudo keytool -importcert -file /tmp/<CA_CERT_NAME>.crt -alias <CA_CERT_NAME>_key -keystore truststore.onap.client.jks -storepass adminadmin
+ keytool -list -keystore truststore.onap.client.jks -storepass adminadmin | grep <CA_CERT_NAME>
+
Policy: BBS APEX policy
=======================
-Inside APEX container,
+Deployment procedure of BBS APEX Policy (master, apex-pdp image v2.3+)
+
+1. Make Sure APEX PDP is running and in Active state
+
+::
+
+ API: GET
+ URL: {{POLICY-PAP-URL}}/policy/pap/v1/pdps
+
+2. Create the operational control loop APEX policy type
+
+::
-1) Edit DCAEConsumer URL in `examples/config/ONAPBBS/NomadicONTPolicyModel_config.json`
+ API: POST
+ URL: {{POLICY-API-URL}}/policy/api/v1/policytypes
+ JSON Payload: https://git.onap.org/integration/usecases/bbs/tree/policy/apex/json/bbs_policytypes.json
-2) Edit AAI and SDNC URLs in `examples/config/ONAPBBS/config.txt`
+3. Create BBS APEX policy
::
- AAI_URL=aai:8443
- AAI_USERNAME=AAI
- AAI_PASSWORD=AAI
- SDNC_URL=sdnc:8282
- SDNC_USERNAME=admin
- SDNC_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U
- SVC_NOTIFICATION_URL=http://c1.vm1.mso.simpledemo.openecomp.org:8080
+ API: POST
+ URL: {{POLICY-API-URL}}/policy/api/v1/policytypes/onap.policies.controlloop.operational.Apex/versions/1.0.0/policies
+ JSON Payload: https://git.onap.org/integration/usecases/bbs/tree/policy/apex/json/bbs_create_policy.json
-3) Launch APEX BBS policy as a background process
+4. Deploy BBS policy
::
- nohup /opt/app/policy/apex-pdp/bin/apexApps.sh engine -c examples/config/ONAPBBS/NomadicONTPolicyModel_config.json &
+ API: POST
+ URL: {{POLICY-PAP-URL}}/policy/pap/v1/pdps/policies
+ JSON Payload: https://git.onap.org/integration/usecases/bbs/tree/policy/apex/json/bbs_simple_deploy.json
+
+5. Verify the deployment
+
+::
+
+ API: GET
+ URL: {{POLICY-API-URL}}/policy/api/v1/policytypes/onap.policies.controlloop.operational.Apex/versions/1.0.0/policies/
+
Edge Services: vBNG+AAA+DHCP, Edge SDN M&C
==========================================
@@ -264,22 +358,18 @@ Installation and setup instructions: `Swisscom Edge SDN M&C and virtual BNG <htt
References
==========
-Please refer to the following wiki page for additional set up and configuration
-instructions:
+Please refer to the following wiki page for further steps related to the BBS service design and instantiation:
-- `BBS Documentation <https://wiki.onap.org/display/DW/BBS+Documentation>`_
+- `BBS Documentation <https://wiki.onap.org/pages/viewpage.action?pageId=75303137#BBSDocumentation(Frankfurt)-BBSServiceConfiguration>`_
Known Issues
------------
-- PNF registration timeout is limited to 60s due HTTP timeout in inter-BPMN workflow calls (`SO-1938 <https://jira.onap.org/browse/SO-1938>`_)
-
-- E2E Service deletion workflow does not delete the PNF resource in AAI (`SO-1994 <https://jira.onap.org/browse/SO-1994>`_)
-
-- Under certain circumstances, multiple attachment points (logical links) are associated to a single PNF (`DCAEGEN2-1611 <https://jira.onap.org/browse/DCAEGEN2-1611>`_)
-
+- E2E Service deletion workflow does not delete the PNF resource in AAI (`SO-2609 <https://jira.onap.org/browse/SO-2609>`_)
.. |image1| image:: files/bbs/BBS_arch_overview.png
:width: 6.5in
.. |image2| image:: files/bbs/BBS_system_view.png
:width: 6.5in
+.. |image3| image:: files/bbs/BBS_dcae-ves-collector_config.png
+ :width: 6.5in
diff --git a/docs/docs_CCVPN.rst b/docs/docs_CCVPN.rst
index b0ec57239..d24862a68 100644
--- a/docs/docs_CCVPN.rst
+++ b/docs/docs_CCVPN.rst
@@ -1,10 +1,413 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_ccvpn:
+:orphan:
+
CCVPN (Cross Domain and Cross Layer VPN)
----------------------------------------
+Update for London Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The London release enhances the CCVPN use-case by introducing the Cloud-Network Convergence support (REQ-1413).
+CCVPN London release will add transport domain support for the Intent-based cloud-network convergence.
+
+London Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The standardized cloud resource management APIs are still under our investigation.
+In London, we will only support the registration of the Cloud Orchestrator to SNDC,
+whose mechanism is similar to the network controller registration.
+
+The impacted ONAP modules are CCSDK and SDN-C.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For London's new features, the integration test environment is similar to that of
+the Kohn release: an ONAP instance with London release interfacing with 3rd party
+cloud orchestrators should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Register a 3rd party cloud orchestrator to SDNC through ESR APIs
+- Create and delete a single CLL instance that accesses a single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+
+Update for kohn Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The kohn release enhances the CCVPN use-case by introducing the following three features (REQ-1268):
+1. E-LINE (P2P connection) support for the Cloud Leased Line (CLL) service delivery
+2. Enhancing the Closed-Loop Automation of CCVPN services by using DCAE SDK dmaap-client lib in slice analysis MS
+3. Enhancing TN NSSMF NBI to align with the latest IETF specification (SO changes)
+
+Kohn Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Kohn release is an enhancement release. E-LINE service model (P2P connection) is added to the Cloud Leased Line (CLL) service.
+Also, slice analysis MS is enhanced to use DCAE SDK dmaap-client lib.
+And lastly, TN NSSMF northbound is aligned with the latest IETF transport slice definition model (SO changes).
+
+The impacted ONAP modules are: CCSDK, SDN-C, DCAE, and SO.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Kohn new features, the integration test environment is similar to that of
+the Jakarta release: an ONAP instance with Kohn release interfacing with 3rd party
+transport domain controllers should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Create and delete single CLL instance which accesses single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+
+
+Update for Jakarta Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Jakarta release enhances the CCVPN use-case by introducing the following three features (REG-1076):
+1. Support for IBN service discovery by registering Cloud Leased Line (CLL) and Transport Slicing services to MSB
+2. Support for 1+1 protection of Cloud Leased Line (CLL)
+3. Support for closed-loop and user-triggered intent update
+
+Jakarta Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The "CCVPN closed-loop" feature and the "user-triggered intent update" feature require both a front-end and a back-end system.
+The front-end would be different for IBN and CCVPN, but the two features can share a common back-end.
+As a first step, current bandwidth usage of a CLL should be collected from the physical network. Then VES collector API
+should be called to send this information to DCAE. DCAE would then publish a new DMaaP topic to be consumed by DCAE slice
+analysis micro-service. This module will then send this notification to Policy.
+
+In Jakarta, the goal of both user-triggered intent update and CCVPN closed-loop is to ensure the max-bandwidth of the CLL service
+can satisfy user's intent throughout the intent life cycle. Thus, the modify-CLL operation triggered by DCAE and Policy is
+common to IBN and CCVPN. So a common back-end mechanism is implemented to support both use-cases.
+
+The impacted ONAP modules are: CCSDK, SDN-C, A&AI, DCAE, POLICY, and SO.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Jakarta new features, the integration test environment is similar to that of
+the Istanbul release: an ONAP instance with Istanbul release interfacing with 3rd party
+transport domain controllers should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Create and delete single CLL instance which accesses single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete single CLL instance which access multiple clouds, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete multiple CLL instances which access single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete multiple CLL instances which access multiple clouds, and monitor if the closed-loop call flow is getting triggered.
+- Create a CLL instance which have connection links with different bandwidth, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+- Modify an existing CLL instance by add a new connection link, and monitor if the closed-loop call flow is getting triggered.
+
+
+Update for Istanbul Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Istanbul release introduces a new functionality for the CCVPN use-case:
+Cloud Lease Line (CLL) service support. The following three main operations were
+added in Istanbul release (REQ-719):
+
+1. The support for creating an E-Tree service, which has one ROOT (Cloud POP) and may have
+ one or more LEAFs (i.e. ONUs) as its branches.
+2. The support for modifying the maximum bandwidth supported by a given E-Tree.
+3. The support for deleting an E-Tree service.
+
+Istanbul Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For operation #1 mentioned above, the user should be able to "create" an E-Tree service.
+The modification operation is able to support the following scenarios:
+
+a. An E-Tree can have one or more branches (LEAFs) located in one or multiple (different)
+ domains.
+b. When multiple LEAFs are physically located in a single OLT node, those LEAFs
+ should re-use or share the same OTN tunnels, therefore the path computation
+ mechanism should only be called once.
+
+By operation #2 mentioned above, a user can change/modify the maximum bandwidth supported
+by a given E-Tree.
+
+And by operation #3 mentioned above, a user can delete a given E-Tree.
+
+The impacted ONAP modules are: SO, SDN-C, and A&AI.
+
+For A&AI, additional edge-rules were introduced between two connectivity nodes as well as
+between a connectivity and a uni node.
+
+In SDN-C, additional Directed Graphs (DGs) were implemented to support the above-mentioned
+features. These new DGs are placed under the generic-resource-api folder in SDNC.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Istanbul new features, the integration test environment is similar to that of
+the Honolulu release: an ONAP instance with Istanbul release interfacing with 3rd party
+transport domain controllers should be established.
+
+For E-Tree support, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SO, ADNS, A&AI, and UUI. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+
+- create an E-Tree with one ROOT and one or multiple LEAF(s) in a multi-domain topology
+- modify the maximum bw of a given E-Tree or add a new connection link to a given E-Tree
+- delete a given E-Tree
+
+To run such test cases, the user must first add (register) the domain controllers as the ESR
+3rd party controllers. As a result of this registration, a round of topology discovery gets
+triggered. After that, network-routes or UNI Endpoints have to be created in A&AI. This step
+is similar to that of Guilin release, and is described in the following link:
+https://wiki.onap.org/display/DW/Transport+Slicing+Configuration+and+Operation+Guidance
+
+Then an E-Tree creation, modification and deletion can be triggered from SO APIs.
+
+
+
+Update for Honolulu Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Honolulu release continued to support and extend the Transport Slicing functionality
+developed in Guilin release. Two main features were aded in Honolulu release (REQ-456):
+
+1. The support for reuse and modification of an existing TN NSSI has been developed.
+2. In addition, the Honolulu release also continuted to support and extend the CCVPN
+ use-case and in particular, the support for inter-domain connections of three or
+ more network domains has been introduced in Honolulu release. (CCVPN in previous
+ releases were only be able to connect two domains).
+
+Honolulu Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For feature #1 mentioned above, the user should be able to "modify" a Transport Slice.
+The modification operation is able to support the following three scenarios:
+
+a. A user may "Add" one or more new service(s)/connections link(s) to a given slice
+ (TN NSSI) that is already created.
+b. A user may need to change or modify the maximum bandwidth attribute (i.e. the SLA
+ agreement) using which a given slice is created.
+c. Both of the above operations.
+
+For feature #2 mentioned above, now in H release, we can have and support an artibrary
+number of domains inter-connected to each other and we can support a cross-layer
+cross-domain VPN connectivity and transport slicing for these kinds of scenarios as well.
+
+Impacted ONAP modules include: SO, SDN-C, CCSDK, A&AI.
+
+In CCSDk, a path computation engine (PCE) mechanism is introduced to support a
+graph-based path computation in a multi-domain network topologies. This PCE system is
+implemented as a SLI plugin to be called and used by Directed Graphs (DGs).
+
+For A&AI, additional attributes were introduced to the connectivity node and vpn-binding node.
+
+In SDN-C, additional Directed Graphs (DGs) were implemented to support the above-mentioned
+two features.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Honolulu new features, the integration test environment is similar to that of the Guilin
+release: an ONAP instance with Honolulu release interfacing to 3rd party transport domain
+controllers should be established.
+
+For Transport Slicing, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SDC, SO, A&AI, UUI and OOF. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+
+- service/template design: Successful design of TN NSST and Slice Profile
+- modify max-bandwidth of existing TN NSSI: Modify the maximum bandwidth of an existing TN NSSI
+- modify connection links existing TN NSSI: Add new connection links to existing TN NSSI
+- modify both max-bandwidth and connection links of TN NSSI: Modify both the maximum bandwidth and add new connection links to an existing TN NSSI
+- three-domain network: Test create TN NSSI (or other NSI life cycle operations) on a three-domain network (i.e., need 3 ACTN PNC simulators)
+
+
+
+Update for Guilin Release
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In Guilin Release, **MDONS** Extension feature is introduced.
+
+In addition to the MDONS extension, CCVPN has also developed an
+IETF/ACTN-based Transport Slicing solution (REQ-347). This development
+enabled ONAP to offer the TN NSSMF functionality, which was used by
+the E2E Network Slicing use case (REQ-342).  The solution was built
+upon the existing IETF/ACTN E-LINE over OTN NNI feature developed in Frankfurt release.
+
+Guilin Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+MDONS Extension implementation for the Frankfurt release will incorporate the following:
+
+- Support Asynchronous OpenRoadM OTN service activation notification handling
+- Add OOF support for inter domain link/path selection
+- Support Closed Loop sub-use case
+
+Impacted ONAP modules include: OOF, SDN-C, SO and Holmes.
+
+`Wiki link reference <https://wiki.onap.org/display/DW/MDONS+Extension+in+R7>`_
+
+Transport Slicing in Guilin release has implemented the following TN NSSMF functionality:
+
+- Allocate TN NSSI
+- Deallocate TN NSSI
+- Activate TN NSSI
+- Deactivate TN NSSI
+
+The Tranport Slicing implementation has made code changes in the following modules:
+
+- AAI (Schema changes only)
+- UUI
+- SO
+- OOF
+- SDN-C
+- CCSDK
+- Modelling
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For integration test case and description of MDONS extension, refer to this
+`following wiki-page <https://wiki.onap.org/display/DW/Integration+Test+Cases+-+MDONS+Extension>`_.
+
+For integration test case and description of Transport Slicing:
+
+- `Guilin Test plan <https://wiki.onap.org/display/DW/CCVPN+-+Transport+Slicing+integration+test+plan+for+Guilin+release>`_
+- `Guilin E2E Network Slicing <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Use+Case+in+R7+Guilin>`_
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For MDONS extension, the integration test environment is established to have ONAP instance with Guilin
+release interfacing to 3rd party transport domain controllers. One controller
+instance manages OpenROADM OTN topology and the other 2 instances manage TAPI
+OTN topology. L0 infrastructure and WDM services are pre-provisioned to support
+L1 topology discovery and OTN service orchestration from ONAP.
+
+For Transport Slicing, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SDC, SO, A&AI, UUI and OOF. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Testing Procedures
+~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in:
+
+- `Testing procedure for MDONS extension <https://wiki.onap.org/display/DW/Integration+Test+Cases+-+MDONS+Extension>`_
+- `Testing procedure for Transport Slicing <https://wiki.onap.org/display/DW/CCVPN+-+Transport+Slicing+integration+test+plan+for+Guilin+release>`_
+
+Update for Frankfurt release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In Frankfurt, we introduced two extensions in CCVPN use case. One is E-LINE service over OTN NNI handover, another is the
+multi domain optical service which aims to provide end to end layer 1 service.
+
+E-LINE over OTN NNI
+~~~~~~~~~~~~~~~~~~~
+
+Description
+~~~~~~~~~~~
+
+It is considered a typical scenario for operators to use OTN to interconnect its multiple transport network domains. Hence
+the capabilities of orchestrating end-to-end E-LINE services across the domains over OTN is important for ONAP. When operating
+with multiple domains with multi vendor solutions, it is also important to define and use standard and open
+interfaces, such as the IETF ACTN-based transport `YANG models <https://tools.ietf.org/html/rfc8345>`_, as the southbound interface
+of ONAP, in order to ensure interoperability. The SOTN NNI use-case aims to automate the design, service provision by independent
+operational entities within a service provider network by delivering E-Line over OTN orchestration capabilities into ONAP. SOTN NNI
+extends upon the CCVPN use-case by incorporating support for L1/L2 network management capabilities leveraging open standards & common
+data models.
+
+Frankfurt Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Frankfurt demonstration includes L1(OTN) and L2(ETH) Topology discovery from multiple domains controllers with in an operator
+and provide VPN service provision in OTN and ETH network.
+
+The ONAP components involved in this use case are: SDC, A&AI, UUI, SO, SDNC, OOF, MSB.
+
+Functional Test Cases
+~~~~~~~~~~~~~~~~~~~~~
+
+Usecase specific developments have been realized in SO, OOF, AAI, SDNC and UUI ONAP components..
+
+Testing Procedure
+~~~~~~~~~~~~~~~~~
+Design time
+SOTNVPNInfraService service design in SDC and distribute to AAI and SO.
+
+Run Time:
+All operation will be triggered by UUI, including service creation and termination,
+link management and topology network display:
+
+- `E-LINE over OTN Inter Domain Test Cases <https://wiki.onap.org/display/DW/E-LINE+over+OTN+Inter+Domain+Test+Cases>`_
+- `Testing status <https://wiki.onap.org/display/DW/2%3A+Frankfurt+Release+Integration+Testing+Status>`_
+
+MDONS (Multi-Domain Optical Network Services)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Overall Description
+~~~~~~~~~~~~~~~~~~~
+
+The MDONS use-case aims to automate the design, activation & operations resulting
+from an optical transport (L0/L1) service request exchange between service providers and/or independent operational entities within a service provider network by delivering E2E optical orchestration capabilities into ONAP. MDONS extends upon the CCVPN use-case by incorporating support for L0/L1 network management capabilities leveraging open standards & common data models defined by OpenROADM, Transport-API & MEF.
+
+Frankfurt Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+MDONS implementation for the Frankfurt release will incorporate the following:
+- Design & modelling of optical services based on MEF L1 subscriber & operator properties
+- E2E optical service workflow definitions for service instantiation & deletion
+- UI portal with L1 service instantiation templates
+- Optical Transport domain management (topology, resource onboarding) through standard models / APIs - OpenROADM, T-API
+Impacted ONAP modules include: A&AI, SDC, SDN-C, SO, UUI
+
+References:
+
+- `OpenROADM reference <https://github.com/OpenROADM/OpenROADM_MSA_Public>`_
+- `ONF Transport-API (TAPI) <https://github.com/OpenNetworkingFoundation/TAPI>`_
+- `MEF <https://wiki.mef.net/display/CESG/MEF+63+-+Subscriber+Layer+1+Service+Attributes>`_
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For integration test case and description, refer to this following
+`wiki-page <https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case>`_.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+The integration test environment is established to have ONAP instance with
+Frankfurt release interfacing to 3rd party transport domain controllers.
+One controller instance manages OpenROADM OTN topology and the other 2 instances
+manage TAPI OTN topology. L0 infrastructure and WDM services are pre-provisioned
+to support L1 topology discovery and OTN service orchestration from ONAP.
+
+Testing Procedure
+~~~~~~~~~~~~~~~~~
+
+Test environment is described in
+`Installation and Test Procedure <https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case>`_.
Update for Dublin release
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -12,11 +415,14 @@ Update for Dublin release
1. Service model optimization
In Dublin release,the design of CCVPN was optimized by having support of List type of Input in SDC.
-During onboarding and design phase, one end to end service is created using SDC. This service is
-composed of these two kinds of resources:
-• VPN resource
-• Site resource
-You can see the details from here https://wiki.onap.org/display/DW/Details+of+Targeted+Service+Template
+During onboarding and design phase, one end to end service is created using SDC.
+This service is composed of these two kinds of resources:
+
+- VPN resource
+- Site resource
+
+See the `Details of Targeted Service Template wiki page <https://wiki.onap.org/display/DW/Details+of+Targeted+Service+Template>`_
+for details.
2. Closed Loop in bandwidth adjustment
Simulate alarm at the edge site branch and ONAP will execute close-loop automatically and trigger bandwidth to change higher.
@@ -24,53 +430,70 @@ Simulate alarm at the edge site branch and ONAP will execute close-loop automati
3. Site Change
Site can be add or delete according to the requirements
+More information about:
-More information about CCVPN in Dublin release:https://wiki.onap.org/pages/viewpage.action?pageId=45296665
-and the test case in Dublin can be found:https://wiki.onap.org/display/DW/CCVPN+Test+Cases+for+Dublin+Release
-And test status:https://wiki.onap.org/display/DW/CCVPN+Test+Status
+- `CCVPN in Dublin release <https://wiki.onap.org/pages/viewpage.action?pageId=45296665>`_
+- `Dublin test cases <https://wiki.onap.org/display/DW/CCVPN+Test+Cases+for+Dublin+Release>`_
+- `CCVPN Test Status wiki page <https://wiki.onap.org/display/DW/CCVPN+Test+Status>`_
-Note: CCVPN integration testing coversed service design, service creation and closed-loop bandwidth adjustments in Dublin release.
-The service termination and service change will continue to be tested in E release.
-During the integration testing, SDC, SO, SDC master branch are used which include the enhanced features for CCVPN use case.
+.. note::
+ CCVPN integration testing coversed service design, service creation and
+ closed-loop bandwidth adjustments in Dublin release.
+ The service termination and service change will continue to be tested in E release.
+ During the integration testing, SDC, SO, SDC master branch are used which
+ includes the enhanced features for CCVPN use case.
-Sevice used for CCVPN
-~~~~~~~~~~~~~~~~~~~~~
+Service used for CCVPN
+~~~~~~~~~~~~~~~~~~~~~~
-- SOTNVPNInfraService, SDWANVPNInfraService and SIteService: https://wiki.onap.org/display/DW/CCVPN+Service+Design
-- WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ): https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design
+- `SOTNVPNInfraService, SDWANVPNInfraService and SIteService <https://wiki.onap.org/display/DW/CCVPN+Service+Design>`_
+- `WanConnectionService (Another way to describe CCVPN in a single service form which based on ONF CIM <https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design>`_
Description
~~~~~~~~~~~
-Cross-domain, cross-layer VPN (CCVPN) is one of the use cases of the ONAP Casablanca release. This release demonstrates cross-operator ONAP orchestration and interoperability with third party SDN controllers and enables cross-domain, cross-layer and cross-operator service creation and assurance.
-The demonstration includes two ONAP instances, one deployed by Vodafone and one by China Mobile, both of which orchestrate the respective operator underlay OTN networks and overlay SD-WAN networks and peer to each other for cross-operator VPN service delivery.
+Cross-domain, cross-layer VPN (CCVPN) is one of the use cases of the ONAP
+Casablanca release. This release demonstrates cross-operator ONAP orchestration
+and interoperability with third party SDN controllers and enables cross-domain,
+cross-layer and cross-operator service creation and assurance.
-The CCVPN Use Case Wiki Page can be found here: https://wiki.onap.org/display/DW/CCVPN%28Cross+Domain+and+Cross+Layer+VPN%29+USE+CASE.
+The demonstration includes two ONAP instances, one deployed by Vodafone and one
+by China Mobile, both of which orchestrate the respective operator underlay OTN
+networks and overlay SD-WAN networks and peer to each other for cross-operator
+VPN service delivery.
+
+`CCVPN Use Case Wiki Page <https://wiki.onap.org/display/DW/CCVPN%28Cross+Domain+and+Cross+Layer+VPN%29+USE+CASE>`_
The projects covered by this use case include: SDC, A&AI, UUI, SO, SDNC, OOF, Policy, DCAE(Holmes), External API, MSB
How to Use
~~~~~~~~~~
-Design time
-SOTNVPNInfraService, SDWANVPNInfraService and SIteService service Design steps can be found here: https://wiki.onap.org/display/DW/CCVPN+Service+Design
-WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ): https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design
+
+Design time:
+
+- `SOTNVPNInfraService, SDWANVPNInfraService and SIteService service Design steps <https://wiki.onap.org/display/DW/CCVPN+Service+Design>`_
+- `WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ) <https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design>`_
Run Time:
-All opertion will be triggerd by UUI, inlcuding service creation and termination, link management and topology network display.
+- All operations will be triggered by UUI, including service creation and termination,
+ link management and topology network display.
-More details can be fonud here: https://wiki.onap.org/display/DW/CCVPN+Test+Guide
+
+See the `CCVPN Test Guide wiki page <https://wiki.onap.org/display/DW/CCVPN+Test+Guide>`_
+for details.
Test Status and Plans
~~~~~~~~~~~~~~~~~~~~~
-All test case covered by this use case: https://wiki.onap.org/display/DW/CCVPN+Integration+Test+Case
-And the test status can be found: https://wiki.onap.org/display/DW/CCVPN++-Test+Status
+- `All test case covered by this use case <https://wiki.onap.org/display/DW/CCVPN+Integration+Test+Case>`_
+- `Test status <https://wiki.onap.org/display/DW/CCVPN++-Test+Status>`_
Known Issues and Resolutions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1) AAI-1923. Link Management, UUI can't delete the link to external onap otn domain.
+
+1) AAI-1923. Link Management, UUI can't delete the link to external onap otn domain.
For the manual steps provided by A&AI team, we should follow the steps as follow
the only way to delete is using the forceDeleteTool shell script in the graphadmin container.
@@ -78,19 +501,19 @@ First we will need to find the vertex id, you should be able to get the id by ma
GET /aai/v14/network/ext-aai-networks/ext-aai-network/createAndDelete/esr-system-info/test-esr-system-info-id-val-0?format=raw
-::
+.. code-block:: JSON
+
+ {
-{
-"results": [
-{
-"id": "20624",
-"node-type": "pserver",
-"url": "/aai/v13/cloud-infrastructure/pservers/pserver/pserverid14503-as988q",
-"properties": {
-}
-}
-]
-}
+ "results": [
+ {
+ "id": "20624",
+ "node-type": "pserver",
+ "url": "/aai/v13/cloud-infrastructure/pservers/pserver/pserverid14503-as988q",
+ "properties": {}
+ }
+ ]
+ }
Same goes for the ext-aai-network:
@@ -103,59 +526,52 @@ Run the following command multiple times for both the esr-system-info and ext-aa
::
-kubectl exec -it $(kubectl get pods -lapp=aai-graphadmin -n onap --template 'range .items.metadata.name"\n"end' | head -1) -n onap gosu aaiadmin /opt/app/aai-graphadmin/scripts/forceDeleteTool.sh -action DELETE_NODE -userId YOUR_ID_ANY_VALUE -vertexId VERTEX_ID
+ kubectl exec -it $(kubectl get pods -lapp=aai-graphadmin -n onap --template 'range .items.metadata.name"\n"end' | head -1) -n onap gosu aaiadmin /opt/app/aai-graphadmin/scripts/forceDeleteTool.sh -action DELETE_NODE -userId YOUR_ID_ANY_VALUE -vertexId VERTEX_ID
From the above, remove the YOUR_ID_ANY_VALUE and VERTEX_ID with your info.
2) SDC-1955. Site service Distribution
To overcome the Service distribution, the SO catalog has to be populated with the model information of the services and resources.
-a) Refering to the Csar that is generated in the SDC designed as per the detailes mentioned in the below link: https://wiki.onap.org/display/DW/CCVPN+Service+Design
+a) Refering to the Csar that is generated in the SDC designed as per the details mentioned in the below link: https://wiki.onap.org/display/DW/CCVPN+Service+Design
b) Download the Csar from SDC thus generated.
c) copy the csar to SO sdc controller pod and bpmn pod
+
+.. code-block:: bash
+
kubectl -n onap get pod|grep so
kubectl -n onap exec -it dev-so-so-sdc-controller-c949f5fbd-qhfbl /bin/sh
-
mkdir null/ASDC
mkdir null/ASDC/1
kubectl -n onap cp service-Sdwanvpninfraservice-csar.csar dev-so-so-bpmn-infra-58796498cf-6pzmz:null/ASDC/1/service-Sdwanvpninfraservice-csar.csar
kubectl -n onap cp service-Sdwanvpninfraservice-csar.csar dev-so-so-bpmn-infra-58796498cf-6pzmz:ASDC/1/service-Sdwanvpninfraservice-csar.csar
-d) populate model information to SO db
- the db script example can be seen in https://wiki.onap.org/display/DW/Manual+steps+for+CCVPN+Integration+Testing
+d) populate model information to SO db: the db script example can be seen in
+ https://wiki.onap.org/display/DW/Manual+steps+for+CCVPN+Integration+Testing
The same would also be applicable for the integration of the client to create the service and get the details.
Currently the testing has been performed using the postman calls to the corresponding APIs.
-3) SDC-1955 & SDC-1958. Site serivce parsing Error
+3) SDC-1955 & SDC-1958. Site service parsing Error
-UUI: stored the csar which created based on beijing release under a fixed directory, If site serive can't parsed by SDC tosca parser, UUI will parse this default csar and get the input parameter
+UUI: stored the csar which created based on beijing release under a fixed directory, If site servive can't parsed by SDC tosca parser, UUI will parse this default csar and get the input parameter
a) Make an available csar file for CCVPN use case.
b) Replace uuid of available files with what existing in SDC.
c) Put available csar files in UUI local path (/home/uui).
-4) SO docker branch 1.3.5 has fixes for the issues 1SO-1248.
+4) SO docker branch 1.3.5 has fixes for the issues 1SO-1248
After SDC distribution success, copy all csar files from so-sdc-controller:
- connect to so-sdc-controller( eg: kubectl.exe exec -it -n onap dev-so-so-sdc-controller-77df99bbc9-stqdz /bin/sh )
- find out all csar files ( eg: find / -name '*.csar' )
- the csar files should be in this path: /app/null/ASDC/ ( eg: /app/null/ASDC/1/service-Sotnvpninfraservice-csar.csar )
- exit from the so-sdc-controller ( eg: exit )
- copy all csar files to local derectory ( eg: kubectl.exe cp onap/dev-so-so-sdc-controller-6dfdbff76c-64nf9:/app/null/ASDC/tmp/service-DemoService-csar.csar service-DemoService-csar.csar -c so-sdc-controller )
-
-Copy csar files, which got from so-sdc-controller, to so-bpmn-infra
- connect to so-bpmn-infra ( eg: kubectl.exe -n onap exec -it dev-so-so-bpmn-infra-54db5cd955-h7f5s -c so-bpmn-infra /bin/sh )
- check the /app/ASDC deretory, if doesn't exist, create it ( eg: mkdir /app/ASDC -p )
- exit from the so-bpmn-infra ( eg: exit )
- copy all csar files to so-bpmn-infra ( eg: kubectl.exe cp service-Siteservice-csar.csar onap/dev-so-so-bpmn-infra-54db5cd955-h7f5s:/app/ASDC/1/service-Siteservice-csar.csar )
-
-5) Manual steps in closed loop Scenario:
-Following steps were undertaken for the closed loop testing.
-a. Give controller ip, username and password, trust store and key store file in restconf collector collector.properties
-b. Updated DMAAP ip in cambria.hosts in DmaapConfig.json in restconf collector and run restconf collector
-c. Followed the steps provided in this link(https://wiki.onap.org/display/DW/Holmes+User+Guide+-+Casablanca#HolmesUserGuide-Casablanca-Configurations) to push CCVPN rules to holmes
-d. Followed the steps provided in this link(https://wiki.onap.org/display/DW/ONAP+Policy+Framework%3A+Installation+of+Amsterdam+Controller+and+vCPE+Policy) as reference to push CCVPN policies to policy module and updated sdnc.url, username and password in environment(/opt/app/policy/config/controlloop.properties.environment)
-As per wiki (Policy on OOM), push-policied.sh script is used to install policies. but I observed that CCVPN policy is not added in this script. So merged CCVPN policy using POLICY-1356 JIRA ticket. but policy is pushed by using push-policy_casablanca.sh script during integration test.
-It is found that the changes made were overwritten and hence had to patch the DG manually. This will be tracked by the JIRA SDNC-540.
-
-all above manual steps can be found https://wiki.onap.org/display/DW/Manual+steps+for+CCVPN+Integration+Testing \ No newline at end of file
+
+- connect to so-sdc-controller ( eg: kubectl.exe exec -it -n onap dev-so-so-sdc-controller-77df99bbc9-stqdz /bin/sh )
+- find out all csar files ( eg: find / -name "\*.csar" ), the csar files should
+ be in this path: /app/null/ASDC/ ( eg: /app/null/ASDC/1/service-Sotnvpninfraservice-csar.csar )
+- exit from the so-sdc-controller ( eg: exit )
+- copy all csar files to local derectory ( eg: kubectl.exe cp onap/dev-so-so-sdc-controller-6dfdbff76c-64nf9:/app/null/ASDC/tmp/service-DemoService-csar.csar service-DemoService-csar.csar -c so-sdc-controller )
+
+Copy csar files, which got from so-sdc-controller, to so-bpmn-infra:
+
+- connect to so-bpmn-infra ( eg: kubectl.exe -n onap exec -it dev-so-so-bpmn-infra-54db5cd955-h7f5s -c so-bpmn-infra /bin/sh )
+- check the /app/ASDC directory, if doesn't exist, create it ( eg: mkdir /app/ASDC -p )
+- exit from the so-bpmn-infra ( eg: exit )
+- copy all csar files to so-bpmn-infra ( eg: kubectl.exe cp service-Siteservice-csar.csar onap/dev-so-so-bpmn-infra-54db5cd955-h7f5s:/app/ASDC/1/service-Siteservice-csar.csar )
diff --git a/docs/docs_CM_flexible_designer_orchestrator.rst b/docs/docs_CM_flexible_designer_orchestrator.rst
index 3a9dd7bfe..c919ec6f8 100644
--- a/docs/docs_CM_flexible_designer_orchestrator.rst
+++ b/docs/docs_CM_flexible_designer_orchestrator.rst
@@ -3,8 +3,10 @@
.. _docs_CM_flexible_designer_orchestrator:
+:orphan:
+
Dublin Workflow Designer Release Notes
--------------------------------------------------------------
+--------------------------------------
The Workflow Editor was developed in the Beijing release by Amdocs and
is available in SDC for users to create workflows.
@@ -287,4 +289,4 @@ part of the Dublin release. The others were not part of the release but
are available to test with your vNF. Please refer to the Scale out
release notes for further information.
-https://onap.readthedocs.io/en/latest/submodules/integration.git/docs/docs_scaleout.html#docs-scaleout
+https://docs.onap.org/projects/onap-integration/en/frankfurt/docs_scaleout.html
diff --git a/docs/docs_CM_schedule_optimizer.rst b/docs/docs_CM_schedule_optimizer.rst
index 9da2e5337..2ff8cfca1 100644
--- a/docs/docs_CM_schedule_optimizer.rst
+++ b/docs/docs_CM_schedule_optimizer.rst
@@ -1,15 +1,22 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
-.. _docs_CM_schedule_optimizer:
-Change Management Schedule Optimization
--------------------------------------------------------------
+.. _docs_CM_schedule_optimizer:
-Description
+:orphan:
+
+Change Management Schedule Optimization
+---------------------------------------
+
+Description
~~~~~~~~~~~~~~
-The change management schedule optimizer automatically identifies a conflict-free schedule for executing changes across multiple network function instances. It takes into account constraints such as concurrency limits (how many instances can be executed simultaneously), time preferences (e.g., night time maintenance windows with low traffic volumes) and applies optimization techniques to generate schedules.
+The change management schedule optimizer automatically identifies a conflict-free
+schedule for executing changes across multiple network function instances.
+It takes into account constraints such as concurrency limits (how many instances
+can be executed simultaneously), time preferences (e.g., night time maintenance
+windows with low traffic volumes) and applies optimization techniques to
+generate schedules.
-More details can be found here:
-https://onap.readthedocs.io/en/latest/submodules/optf/cmso.git/docs/index.html \ No newline at end of file
+More details can be found here:
+https://docs.onap.org/projects/onap-optf-cmso/en/latest/index.html#master-index
diff --git a/docs/docs_E2E_network_slicing.rst b/docs/docs_E2E_network_slicing.rst
new file mode 100644
index 000000000..3686b2d0c
--- /dev/null
+++ b/docs/docs_E2E_network_slicing.rst
@@ -0,0 +1,638 @@
+.. This file is licensed under the CREATIVE COMMONS ATTRIBUTION 4.0 INTERNATIONAL LICENSE
+.. Full license text at https://creativecommons.org/licenses/by/4.0/legalcode
+
+:orphan:
+
+.. contents::
+ :depth: 3
+..
+.. _docs_E2E_network_slicing:
+
+
+E2E Network Slicing Use Case
+============================
+
+Overall Blueprint
+-----------------
+The objective of this use case is to realize **End-to-End 5G Network
+Slicing** using ONAP. An End-to-End Network Slice consists of RAN (Radio
+Access Network), Transport Network (TN) and Core Network (CN) slice
+sub-nets. This use case intends to demonstrate the modeling,
+orchestration (life cycle and resources) and assurance of a network
+slice which are implemented in alignment with relevant standards. The
+key highlights of this use case include:
+
+- Modular architecture providing building blocks and flexibility under
+ various deployment scenarios
+
+- Functionality aligned with 3GPP and other relevant standards such as
+ ETSI and IETF
+
+- Interfaces and APIs aligned with relevant standards (3GPP, IETF,
+ ETSI, TM Forum, etc.) while enabling easy customization through use
+ of appropriate plug-ins. This would enable easier interoperability of
+ slice management functions realized within ONAP with 3\ :sup:`rd`
+ party slice management functions, as well as northbound and
+ southbound systems.
+
+- Taking a step-by-step approach to realizing different architectural
+ options in an extendable manner.
+
+- Providing flexibility in network slice selection by providing an
+ option of manual intervention, as well as abstracting the network
+ internals as needed.
+
+- The use case implementation team is composed of service providers,
+ software and hardware vendors, solution providers and system
+ integrators thereby taking into consideration different perspectives
+ and requirements.
+
+This use case is a multi-release effort in ONAP with the first steps
+taken in Frankfurt release. It will continue to expand in scope both in
+breadth and depth, and along the journey it shall also align with
+updates to the relevant standards which are also currently evolving.
+This use case shall also collaborate with SDOs such as
+O-RAN and ETSI to enable wider adoption and use.
+
+Architecture Choice
+-------------------
+3GPP(TS 28.801) defines three layer slice management functions which include:
+
+CSMF(Communication Service Management Function):
+
+• Responsible for translating the communication service related requirement to network slice related requirements.
+
+• Communicate with Network Slice Management Function (NSMF).
+
+NSMF(Network Slice Management Function):
+
+• Responsible for management and orchestration of NSI.
+• Derive network slice subnet related requirements from network slice related requirements.
+• Communicate with the Network Slice Subnet Management Function (NSSMF) and Communication Service Management Function.
+
+NSSMF(Network Slice Subnet Management Function):
+
+• Responsible for management and orchestration of NSSI.
+• Communicate with the NSMF.
+
+To realize the three layers of the slice management function, we need to decide whether to implement CSMF, NSMF or NSMF within ONAP, or use the external CSMF, NSMF or NSSMF. This implies that for ONAP-based network slice management, we have different choices from an architectural perspective:
+
+1) Implement CSMF, NSMF, NSSMF all within ONAP;
+
+2) Connect an external CSMF from the Northbound, Implement NSMF and NSSMF within ONAP;
+
+3) Connect an external CSMF from the Northbound, Implement NSMF within ONAP, Connect a 3rd party NSSMF from the Southbound;
+
+4) Implement CSMF, NSMF within ONAP, Connect a 3rd party NSSMF from then Southbound.
+
+5) Use external CSMF and NSMF, only implement NSSMF within ONAP.
+
+External Interfaces
+-------------------
+The guiding principle is when a Slice Management function is outside ONAP, standard interfaces/APIs (3GPP, IETF, ETSI, TM Forum, etc.) can be supported by default, while any customization of such interfaces shall also be supported by ONAP using suitable plug-ins/adaptors. This would enable easier interoperability of slice management functions realized within ONAP with 3rd party slice management functions, as well as northbound and southbound systems.
+
+Another key point would be that both internal and external interface mechanisms should be supported by the corresponding ONAP modules. To be more specific, communication between Slice Management Functions within ONAP (e.g., CSMF and NSMF) shall use ONAP internal mechanisms such as workflow calls, DMaaPmessages, etc. or standard APIs as appropriate. For example, SO acting as NSMF should support API call directly from CSMF in ONAP, as well as API trigger from an external CSMF via EXT-API.
+
+Network Slice Instance (NSI) Life Cycle View
+--------------------------------------------
+3GPP Specification (3GPP TS 28.530) describes management aspects of a Network Slice Instance, which can be described by the four phases:
+
+- Preparation: The preparation phase includes network slice design, network slice capacity planning, on-boarding and evaluation of the network functions, preparing the network environment and other necessary preparations required to be done before the creation of an NSI.
+- Commissioning: NSI provisioning in the commissioning phase includes creation of the NSI. During NSI creation all needed resources are allocated and configured to satisfy the network slice requirements. The creation of an NSI can include creation and/or modification of the NSI constituents.
+- Operation: The Operation phase includes the activation, supervision, performance reporting (e.g. for KPI monitoring), resource capacity planning, modification and de-activation of an NSI.
+- Decommissioning: Network slice instance provisioning in the decommissioning phase includes decommissioning of non-shared constituents if required and removing the NSI specific configuration from the shared constituents. After the decommissioning phase, the NSI is terminated and does not exist anymore.
+
+The ONAP-based NSI lifecycle management will finally provide the demonstration of all these phases.
+
+Abbreviations
+-------------
+
++---------------+--------------------------------------------+
+| Abbreviation | Meaning |
++===============+============================================+
+| CSMF | Communication Service Management Function |
++---------------+--------------------------------------------+
+| CSI | Communication Service Instance |
++---------------+--------------------------------------------+
+| CST | Communication Service Template |
++---------------+--------------------------------------------+
+| NSI | Network Slice Instance |
++---------------+--------------------------------------------+
+| NSMF | Network Slice Management Function |
++---------------+--------------------------------------------+
+| NSSI | Network Slice Sub-net Instance |
++---------------+--------------------------------------------+
+| NSSMF | Network Slice Sub-net Management Function |
++---------------+--------------------------------------------+
+| NST | Network Slice Template |
++---------------+--------------------------------------------+
+| NSST | Network Slice Sub-net Template |
++---------------+--------------------------------------------+
+
+
+Recap of Frankfurt functionality
+--------------------------------
+In Frankfurt release, CSMF and NSMF within ONAP was implemented, while connecting to an external Core NSSMF.
+From the NSI Life Cycle perspective, the scope for Frankfurt included NSI design and pre-provision, NSI instantiation
+and configuration, and NSI activation and deactivation. In particular:
+
+- CSMF: Functions of slice service creation, slice service activation and deactivation were implemented.
+
+- NSMF: Functions of NSI instantiation, NSI activation and deactivation were implemented. In addition, manual
+ intervention is also provided in NSMF slice task management portal to ensure the selected NSI/NSSI as well as
+ Service Profile and Slice Profile are OK or need adjustment.
+
+- Design of CST, NST and onboarding NSST that are required to support the run-time orchestration functions
+
+- To connect to the external (core) NSSMF, an adaptor was implemented to provide interface between ONAP and 3rd party
+ core NSSMF.
+
+To support the above functions, code impacts in U-UI, SO, OOF and ExtAPI components, and schema change in A&AI
+were implemented. See the `Proposed Functions for R6 and Impacted Modules wiki page <https://wiki.onap.org/display/DW/Proposed+Functions+for+R6+and+Impacted+Modules>`_ for details.
+
+As part of Frankfurt release work, we supported the minimum-scope installation of ONAP to reduce the resource requirements.
+From the module perspective, 5G E2E Slicing use case involves SDC, SO, A&AI, UUI, EXT-API, OOF and Policy modules of ONAP.
+So we will configure these required modules along with the mandatory common modules such as DMaaP. Further, for each module,
+the use case also does not use all of the charts,so we removed the not needed Charts under those modules to optimize the
+resources required for setting up the use case. This approach will help to install a minimum-scope version ONAP for the
+E2E Slicing use case.
+
+Further details of the installation steps are available at: `Install Minimum Scope ONAP for 5G Network Slicing wiki page
+<https://wiki.onap.org/display/DW/Install+Minimum+Scope+ONAP+for+5G+Network+Slicing>`_
+
+Recap of Guilin functionality
+-----------------------------
+From the architecture point of view, in Guilin release, besides the continuation of NSMF which was implemented in
+Frankfurt release, the RAN NSSMF, TN NSSMF, CORE NSSMF have been implemented within ONAP, apart from interacting with
+external RAN NSSMF and external CORE NSSMF.
+
+The following provides an overview of the enhancements done in Guilin release:
+
+- **Enhancements in NSMF**: Service Profile decomposition into Slice Profiles for 3 domains, NSI selection enhancement,
+ E2E slice instance creation including RAN, TN and CN slice sub-net instance creation/reuse, activation/deactivation
+ of E2E slice, and deciding whether to terminate E2E slice or not.
+
+- **RAN NSSMF, TN NSSMF, CN NSSMF within ONAP**: Basic logic for all 3 NSSMFs to support NSSI allocation, activation,
+ deactivation, deletion and modification (in case of reuse of NSSI).
+
+- **Enable NSMF interaction with RAN NSSMF, TN NSSMF, CN NSSMF**: Implement generic NSSMF adaptor for three domain NSSMFs,
+ alignment with standard interfaces (3GPP, IETF), enable the connection to external RAN NSSMF.
+
+- **Design of RAN NSST, TN NSST, CN NSST and Slice Profiles, TN information models**: Basic E2E Slicing model was provided
+ all the related templates designed from SDC, TN related information models.
+
+- **TMF 641 support**: Extension of the TMF 641 based interface from NB of ExtAPI to support service activation,
+ deactivation and termination.
+
+- **RAN and CN NFs set up and initial configurations**: CN NF simulators was developed: AMF, SMF, UPF and configure the
+ S-NSSAI on CN NFs; RAN NF Simulator was enhanced for PM data reporting, CU and Near-RT RIC configuration.
+
+- **KPI monitoring**: Implementation to request details of a KPI via UUI to ONAP DCAE. Providing the requested data to UUI
+ by DCAE using a new microservice (Data Exposure Service - DES). Enhancements in PM-Mapper to do KPI computation is
+ in progress, and will be completed in Honolulu release.
+
+- **Closed Loop**: First steps to realizing a simple Closed Loop in the RAN using PM data collected from the RAN was
+ implemented - update the allowed throughput for a S-NSSAI per Near-RT RIC coverage area based on DL/UL PRB for data
+ traffic that was reported from the RAN. The analysis of the PM data was done using a new Slice Analysis MS in DCAE,
+ and the Policy-based Control Loop framework was applied to trigger the updates in the RAN.
+
+- **Intelligent Slicing**: First steps to realizing a simple ML-based Closed Loop action in the RAN using PM data collected
+ from the RAN was implemented - update the maxNumberofConns for a S-NSSAI in each cell based on PDU session related
+ PM data that was reported from the RAN (PDU sessions requested, successfully setup and failed to be set up). The
+ training was done offline, and the ML model is onboarded as a micro-service to ONAP for demo purpose alone (it is
+ not part of ONAP code/repos). The ML model provides updates to the Slice Analysis MS, which then uses the
+ Policy-based Control Loop framework to trigger the updates in the RAN.
+
+- **Modeling enhancements**: Necessary modeling enhancements to support all the above functionalities.
+
+The base use case page for Guilin release is `E2E Network Slicing Use Case in R7 Guilin <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Use+Case+in+R7+Guilin>`_.
+
+The child wiki pages of the above page contains details of the assumptions, flows and other relevant details.
+
+Honolulu release updates
+------------------------
+In Honolulu release, the following aspects were realized:
+
+- **Modeling Enhancements** were made, details can be found at:
+ `Modeling enhancements in Honolulu <https://wiki.onap.org/display/DW/Modeling+enhancements+in+Honolulu>`_.
+
+- **Functional Enhancements**
+
+ (a) Minor enhancements in NSMF and NSSMFs including NST Selection, Shared slices, coverageArea to
+ coverageAreaTAList mapping, etc.
+ (b) Enhancements related to endpoints for stitching together an end-to-end network slice
+ (c) Use of CPS (instead of Config DB) to determine the list of Tracking Areas corresponding to a given
+ Coverage Area (input by user). For the remaining RAN configuration data, we continue to use Config DB.
+ (d) RRM Policy update by SDN-R to RAN NFs during RAN NSSI creation/reuse
+
+- **Integration Testing**
+ Continuing with integration tests deferred in Guilin release, and associated bug-fixing
+
+Important Remarks
+~~~~~~~~~~~~~~~~~~~
+(a) 2 deployment scenarios for RAN NSSI are supported. In the first scenario, the RAN NSSI comprises also of
+ TN Fronthaul (FH) and TN Midhaul (FH) NSSIs, and RAN NSSMF shall trigger TN NSSMF for TN FH and MH NSSI
+ related actions. In the second scenario, the RAN NSSI comprises only of RAN NFs. TN NSSMF shall be triggered by
+ NSMF for TN FH and MH NSSI related actions. This part is not yet implemented in NSMF within ONAP.
+
+(b) Details of the modeling aspects, flows and other relevant info about the use case are available in:
+ `R8 E2E Network Slicing Use Case <https://wiki.onap.org/display/DW/R8+E2E+Network+Slicing+use+case>`_ and its child wiki pages.
+
+
+Impacted Modules for Honolulu
+-----------------------------
+The code-impacted modules of E2E Network Slicing in Honolulu release are:
+
+- **UUI**: The enhancements done include:
+
+ (a) The coverageArea The coverageArea number param is added in CSMF creation UI. Users could input
+ the grid numbers to specify the area where they want the slicing service to cover.
+ (b) The relation link image of AN/TN/CN has been added. Users can see the links and related params
+ of the three domains.
+ (c) The TN’s connection link with AN/CN has been added in NS Task management GUI.
+
+- **AAI**: Schema changes were introduced. We added some new parameters in 2 nodes:
+
+ (a) ‘Connectivity’ is used to store IETF/ACTN ETH service parameters. New attributes added in order
+ to support the CCVPN network configuration operations on multi-domain (2+) interconnections.
+ (b) ‘Vpn-binding’is used to store ACTN OTN Tunnel model’s parameters.
+
+- **OOF**: Updates include:
+
+ (a) NST selection is enhanced by fetching the templates from SDC directly.
+ (b) coverageArea to coverageAreaTAList mapping is done by OOF (as part of Slice Profile generation)
+ by accessing CPS.
+ (c) Bug-fixes
+
+- **SO**: Main updates include support of NSI shared scenarios by enhancing the interaction with OOF, AAI and
+ UUI. Apart from this some updates/fixes have been made in NSMF, RAN/Core/TN NSSMF functionality in SO, for
+ example:
+
+ (a) *NSMF*: Update NSI selection process support shared NSI and add sst parameter
+ (b) *AN NSSMF*: Activation flow for SDN-R interactions, allocate flow & added timeDelay in QueryJobStatus,
+ support of Option 1 for topmost RAN NSSI
+ (c) *CN NSSMF*: Non-shared allocate flow
+ (d) *TN NSSMF*: Modify TN NSSI operation
+
+- **CPS**: 2 APIs required for the use case are supported. The remaining yang models are also onboarded,
+ however, the API work as well as further enhancements to CPS Core, NF Proxy and Template-Based Data
+ Model Transformer Service shall continue beyond Honolulu.
+
+- **SDN-R**: RRMP Policy updates, enhancements for updating the RAN configuration during slice reuse,
+ closed loop and intelligent slicing.
+
+- **DCAE**:
+
+ (a) *KPI Computation MS*: This MS was introduced newly for computation of slice related KPIs. In this release,
+ it supports basic KPI computation based on formula specified via Policy. Further details about this MS is
+ available at `KPI Computation MS <https://wiki.onap.org/display/DW/DCAE+R8+KPI-Computation+ms>`_
+ (b) *Slice Analysis MS*: Minor updates were done.
+
+Apart from the above, Policy and SDC had test-only impact for this use case.
+
+In addition:
+
+- **Config DB** was updated to handle bugs and gaps found during testing. This is not an official ONAP component, and
+ its functionality is expected to be performed fully by the Configuration Persistence Service (CPS) in future ONAP
+ release (beyond Honolulu).
+
+- **Core NF simulator** and *ACTN simulator* were also updated and checked into ONAP simulator repo.
+
+- **RAN-Sim** has been updated to fix bugs found during testing, and also checked into ONAP simulator repo.
+
+
+Functional Test Cases
+---------------------
+The functional testing of this use case shall cover CSMF/NSMF, the 3 NSSMFs and Closed Loop functionality. We classify the
+test cases into 5 tracks: CSMF/NSMF, RAN NSSMF, Core NSSMF, TN NSSMF and Closed Loop.
+Details of the test cases can be found at:
+`Integration Test details for Honolulu <https://wiki.onap.org/display/DW/Integration+Test+details+for+Honolulu>`_ and its child wiki pages.
+
+
+Operation Guidance
+------------------
+The Honolulu release setup details for the E2E Network Slicing use case will be available at the following page and its
+sub-pages:
+`User Operation Guide for Honolulu release <https://wiki.onap.org/display/DW/User+Operation+Guide+for+Honolulu+release>`_
+
+
+Known Issues and Resolutions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Details of manual configurations, work-arounds and known issues will be documented in the child wiki pages of:
+`User Operation Guide for Honolulu release <https://wiki.onap.org/display/DW/User+Operation+Guide+for+Honolulu+release>`_
+
+The foll. integration tests are carried over to Istanbul release: see `REQ-721 <https://jira.onap.org/browse/REQ-721>`_
+- NSMF: Option 2 testing, remaining regression testing and service termination testing for NSMF
+- RAN NSSMF: RAN NSSI termination, interactions with TN NSSMF for FH/BH NSSI reuse and some minor aspects related to SDN-R <-> RAN interaction
+- TN NSSMF: Checking some minor aspects in SO for modifying TN NSSI.
+- Core NSSMF: Modifying and deallocating a Core NSSI, reusing an existing Core NSSI
+- KPI Computation, Closed Loop & Intelligent Slicing: Some minor aspects on SDN-R <-> RAN-Sim interface needs to be addressed.
+
+Further details of these test cases can be found in REQ jiras for integration testing for Honolulu, and in the
+use case wiki. This means that the functionality associated with these test cases may require updated versions
+of the relevant components - the User Operation Guide will also be updated with details of any bug fixes
+beyond Honolulu as the testing is anyhow continuing as part of Istanbul release.
+
+Istanbul release updates
+------------------------
+Below aspects are covered in Istanbul release:
+
+1. **CPS-TBDMT Enhancements** - This service shall be used to map the erstwhile Config-DB-like REST APIs to appropriate CPS API calls. The purpose of this service is to abstract the details of (possibly multiple, and complex) XPath queries from the users of CPS. It enables CPS-users to continue using simple REST API calls that are intuitive and easy-to-understand and implement. The mapping to appropriate queries to CPS (including mapping of one API call to many Xpath queries) shall be done in a generic way by the CPS-TBDMT service. In Istanbul release, following are the main enhancements done:
+
+ - Support edit query ie. post, put and patch requests to CPS
+
+ - Support Output Transformation
+
+ (a) Extract desired output from the data returned from CPS.
+ (b) If 'transformParam' is not defined in the template no transformation takes place.
+ - Support Multiple query
+
+ (a) Make multiple queries to CPS in single request.
+ (b) If 'multipleQueryTemplateId' is mentioned in the template, it will execute this template first and insert the result to the current template to make multiple queries to CPS.
+ - Support Delete data requests to CPS
+
+ (a) Process delete request type.
+ - Support for dynamic anchor - Accept anchors at run time and execute query
+
+2. **CPS Integration**
+
+ - Config DB is replaced with the CPS component to read, write, update and delete the RAN Slice details. CPS APIs are accessed via CPS-TBDMT component. CPS integration with DCAE - Slice Analysis MS and OOF are completed. SDN-R integration with CPS is completed for the shared RAN Slice flow, activateRANslice and terminateRANSlice implementations are in progress.
+ - A new SDN-C karaf feature is introduced to register the cm-handle (anchor) with CPS. The integration with CPS-DMI plugin will be done in Jakarta release.
+
+3. **NSMF based TN Slices** - Support for interacting with TN NSSMF directly from NSMF for front haul and mid haul slice subnets. There will be separate SDC template for this scenario. NST will have 5 NSSTs - CN NSST, AN NSST, TN FH NSST, TN MH NSST, TN BH NSST.
+
+4. **KPI Monitoring** - Implementation is done in KPI Computation MS to configure the required KPIs and the KPI computation formula based on policies.
+
+5. **Closed Loop** - Closed Loop updates are sent over A1 interface to Near-RT RIC. This is done at the POC level. This will be further enhanced in Jakarta release to make use of the A1-Policy Management Service in CCSDK.
+
+6. **Intelligent Slicing** - End to end intelligent slicing - closed loop flow is tested with the initial version of Machine Learning MS.
+
+7. **Carry-over Testing from Honolulu Release**
+
+ - RAN NSSMF Testing
+
+ (a) Testing completed for the allocation, modification, activation and deactivation of the RAN slice to support option1
+ (b) Integration Testing of AN NSSMF with SDNR interactions for allocate and modify flow is completed
+ - E2E Testing
+
+ (a) Service instantiation for non-shared and shared scenario and fixes to support option 1 are done
+ (b) NSI selection process support for shared NSI is tested
+
+Impacted Modules for Istanbul Release
+-------------------------------------
+- **SO**
+ (a) Support of NSI termination by enhancing the interaction with OOF, AAI and UUI
+ (b) RAN NSSI Termination support with OOF & SDN-R interactions
+ (c) Bug fixes in Option 1 (CSMF, NSMF and NSSMFs are within ONAP & TN-FH, TN-MH are created by RAN NSSMF)
+ - **CSMF**: Fixed sNSSAI format and updated authentication for NSMF invocation
+ - **NSMF**: Fixes in NSI termination issues to support OOF interaction for NSI termination query and added subnet Type support for respective TN Domain
+ - **AN NSSMF**: Fixes for different termination scenarios in Option 1
+ - **CN NSSMF**: Bug fixes in shared allocate flow, modify flow and terminate flow
+ - Slice Profile alignment with NSSMF
+ (d) NSMF based TN Slices (TN-FH, TN-MH are created by NSMF) - Work flow changes to support this approach
+
+- **OOF**
+ (a) Integration with CPS for coverage area to coverage area TA list
+ (b) Bug fixes in NxI termination
+
+- **DCAE**
+ (a) Minor changes in Slice Analysis MS to support CPS integration
+ (b) KPI Computation MS in enhanced to support policy based KPIs and formula
+
+- **SDN-R**
+ (a) Bug fixes in instantiateRANSliceAllocate, instantiateRANSliceAllocateModify, activateRANSlice, terminateRANSlice Directed Graphs
+ (b) CPS integration for the instantiateRANSliceAllocateModify, activateRANSlice, terminateRANSlice Directed Graphs
+ (c) A new karaf feature is introduced to register the cm-handle with CPS
+
+- **CPS-TBDMT**
+ (a) This component is enhanced to support different type of queries based on templates
+
+- **CPS**
+ (a) Bug fixes and support for GET, POST, PATCH and DELETE type of queries.
+
+Istanbul Release - Functional Test cases
+----------------------------------------
+**Honolulu release carry-over test cases**
+ (a) Different possible scenarios of E2E Slice (eMBB) creation are tested in I-release
+ (b) RAN slice Termination testing completed
+ (c) Test cases to validate slice reuse and terminate using Option 2 (Core NSSMF and RAN NSSMF external) are completed
+
+**R9 Integration Testing**
+ (a) RAN NSSMF integration with CPS is covered for RANSlice modification, activation, deactivation and termination
+ (b) NSMF driven TN-FH and TN-MH slices creation is tested
+ (c) CPS impacts in closed loop scenario is validated and few test cases are deferred to Jakarta release
+
+ Integration test plan is available at `Integration Testing in Istanbul Release <https://wiki.onap.org/display/DW/R9+Integration+Test+for+E2E+Network+Slicing>`_
+
+Istanbul Release - Operation Guidance
+-------------------------------------
+The steps for E2E network slicing use case will be available at `User Operation Guidance - Istanbul Release <https://wiki.onap.org/pages/viewpage.action?pageId=111118867>`_. It is an update to the user manual created in Honolulu release.
+
+Istanbul Release - Known issues and Solutions
+---------------------------------------------
+
+**REGISTER 3RD PARTY CONTROLLERS**
+
+The ONAP TSC approved on July 9th, 2020 to change the status of ESR GUI Module
+to an 'unmaintained' project. Further information about 'Unmaintained Projects'
+can be found in the `ONAP Developer Wiki. <https://wiki.onap.org/x/Pw_LBQ>`__
+
+But excluding the ESR GUI module from ONAP does not mean that the "external
+system registration" mechanism is excluded; i.e. only the GUI is not available
+anymore.
+
+Nevertheless, in order to register the 3rd party controllers (like it is done
+in E2E network slicing use case and recently in Cloud Leased Line "CLL" use
+case as part of Intent-Based Networking), AAI's API are invoked manually.
+
+To do so, please send the following CURL command (PUT) to your AAI, with the
+attached xml payload. In the payload, please adjust the controller name (in
+this case sdnc1) and the controller ip address accordingly based on your
+environment:
+
+CURL COMMAND:
+
+.. code-block:: bash
+
+ curl -k -X PUT https://{{your-onap-ip-address}}:30233/aai/v16/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/sdnc1 -u "AAI:AAI" -H "X-FromAppId:postman" -H "Content-Type:application/xml" -H "Accept: application/xml" -H "X-TransactionId:9999" -d @/home/onap/esr-registration-controller-1.xml
+
+
+PAYLOAD (esr-registration-controller-1.xml):
+
+.. code-block:: xml
+
+ <?xml version="1.0" encoding="UTF-8"?>
+ <esr-thirdparty-sdnc xmlns="http://org.onap.aai.inventory/v16">
+ <thirdparty-sdnc-id>sdnc1</thirdparty-sdnc-id>
+ <location>Core</location>
+ <product-name>TSDN</product-name>
+ <esr-system-info-list>
+ <esr-system-info>
+ <esr-system-info-id>sdnc1</esr-system-info-id>
+ <system-name>sdnc1</system-name>
+ <type>WAN</type>
+ <vendor>Huawei</vendor>
+ <version>V3R1</version>
+ <service-url>http://192.168.198.10:18181</service-url>
+ <user-name>onos</user-name>
+ <password>rocks</password>
+ <system-type>nce-t-controller</system-type>
+ <protocol>RESTCONF</protocol>
+ <ssl-cacert>example-ssl-cacert-val-20589</ssl-cacert>
+ <ssl-insecure>true</ssl-insecure>
+ <ip-address>192.168.198.10</ip-address>
+ <port>26335</port>
+ <cloud-domain>example-cloud-domain-val-76077</cloud-domain>
+ <default-tenant>example-default-tenant-val-71148</default-tenant>
+ <passive>true</passive>
+ <remote-path>example-remotepath-val-5833</remote-path>
+ <system-status>example-system-status-val-23435</system-status>
+ </esr-system-info>
+ </esr-system-info-list>
+ </esr-thirdparty-sdnc>
+
+
+Additional issues occurred during the deployment and integration testing will be
+listed in the ONAP Developer Wiki at `Network Slicing - Issues and Solutions <https://wiki.onap.org/display/DW/Network+Slicing+-+Issues+and+Solutions>`_
+
+Jakarta Release Updates
+-----------------------
+In Jakarta release, the following aspects are covered:
+
+1. **E2E Network Slicing Solution**
+ - Slice selection based on resource occupancy level. With this enhancement, NSMF/NSSMF is able to monitor and update resource levels at NSI/NSSI level. OOF returns the solution for NSI/NSSI selection based on the criteria. In case of shared scenario, NSI/NSSI can be shareable only if sufficient resources are available in the network. RAN NSSMF’s resource occupancy is considered for this release. Resource occupancy of Core and TN NSSMFs will be considered in future releases.
+2. **RAN Slicing**
+ - Optimization of cm-handle registration with CPS-DMI Plugin for RAN NF instances to upload yang model.
+ - CPS integration with SDN-R for RAN slice allocation and reconfiguration scenarios
+ - CPS integration stabilization for RAN slice activate/deactivate and terminate scenarios. Validation and bug fix for CPS integration of RAN slice lifecycle.
+3. **Transport Slicing**
+ - OOF involvement in TN slice reuse and terminate scenarios
+ - Implementation of the call to OOF for allocateNSSI to enable TN NSSI reuse in TN NSSMF
+ - Implementation of the call to OOF for terminateNxi API to deallocate TN NSSI (which may not be terminated even when NSI is terminated) in TN NSSMF
+ - Closed-loop enhancement in CCVPN to support Transport Slicing’s closed-loop (Covered in CCVPN use case).
+4. **Closed Loop**
+ - IBN based Closed loop for Network Slicing. This enhancement makes use of intents and Machine Learning models for closed loop. ML prediction microservice enhancement is done as a POC work in Jakarta release.
+ - CPS integration stabilization, which validates and enhances CPS integration for closed loop.
+5. **Carryover tests from Istanbul release**
+ - Option-1 (internal NSMF, NSMF and NSSMF)
+ - Pending test cases for E2E Slice termination
+ - Bug fixes and testing for Core slicing
+ - NF instantiation issue with same NSST
+ - Multiple non-share Core slice creation issue
+
+Impacted Modules for Jakarta Release
+------------------------------------
+- **SO**: Requirements below are identified for Jakarta release and have impacts in SO component:
+ (1) Use of Optimization solution (OOF) in allocateNSSI, deallocateNSSI in TN NSSMF
+ (2) Bug fixes/enhancements of carryover test cases from Istanbul release
+
+- **OOF**: OOF component has an impact for the requirement below:
+ (1) NSI/NSSI Selection enhancements based on resource occupancy levels
+
+- **DCAE**: The requirements below are identified for Jakarta release and have impacts in DCAE component:
+ (1) Slice selection taking into consideration of resource occupancy levels
+ (2) CPS integration in closed loop – This was done in I-release. Expecting minor enhancements in Slice Analysis MS once after the other components impacts w.r.t CPS integration and E2E testing are completed.
+ (3) IBN based Closed loop for Network Slicing - This will have impact in E2E Slicing closed loop and TN Slicing closed loop.
+
+- **CCSDK**: The requirements below are identified for network slicing use case in Jakarta release and have impacts in CCSDK component. Most of these requirements fall under the category of CPS integration.
+ (1) Optimizing cm-handle registration with CPS-DMI Plugin to upload yang model
+ (2) CPS Integration with SDN-R for RAN Slice allocate and reconfigure scenarios
+ (3) CPS Integration Stabilization - RAN Slice activate/deactivate and terminate scenarios
+
+Jakarta Release - Functional Test cases
+---------------------------------------
+The functional testing of this use case covers CSMF/NSMF, RAN/CN/TN NSSMFs and Closed Loop functionality. Test cases are classified into 5 tracks: E2E network slicing, RAN NSSMF, TN NSSMF, Closed Loop and carryover testing. Details of the test cases can be found at: `E2E Network Slicing Tests for Jakarta Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Tests+for+Jakarta+Release>`_ and its child wiki pages.
+
+Jakarta Release - Operation Guidance
+------------------------------------
+The setup and operation details for E2E network slicing use case are available at `User Operation Guidance - Jakarta Release <https://wiki.onap.org/display/DW/User+Operation+Guidance+-+Jakarta+Release>`_.
+
+Jakarta Release - Automate Network Slicing Option2 preparation step
+-------------------------------------------------------------------
+
+Automation of the Network Slicing Option2 use case is under development. At this moment automation of the preparation step is completed, with the source code under `SMO package repo <https://github.com/sebdet/oran-deployment>`_. The detailed introduction of the framework can be found at `SMO package introduction <https://wiki.o-ran-sc.org/display/IAT/Automated+deployment+and+testing+-+using+SMO+package+and+ONAP+Python+SDK>`_.
+
+The preparation scripts are python scripts, based on the ONAP pythonsdk framework. More libraries are added under SMO package in order to run the preparation scripts.
+
+The preparation scripts locate in folder **test/pythonsdk/src/orantests/network_slicing**. Before running the script, please open **settings.py** under folder **test/pythonsdk/src/orantests/configuration**. Make sure the URL settings for all the components are the good values.
+
+If the settings are good, go to folder **test/pythonsdk/src/orantests/network-slicing** and run the following command to trigger the preparation script:
+
+
+.. code-block:: bash
+
+ cd ./test/pythonsdk/src/orantests/network-slicing
+ tox -e ns-tests
+
+The command will trigger the main script **test_network_slicing.py**, which in turn triggers the preparation script of each component.
+
+The whole preparation process will configure the components and also verifies a bit whether the configuration was done successfully at the end of each step.
+
+The whole process may take about 1 hour to complete. You can monitor the progress using the log file **pythonsdk.debug.log** located in the folder **network_slicing/preparation**.
+
+If everything goes fine, you will see similar logs as shown below in the end.
+
+.. image:: files/ns_automation/ns_automation_suc.png
+
+If things goes wrong, please read the logs to identify which part has go wrong and try to fix that step manually.
+
+Then you can update the **test_network_slicing.py**, disable steps that are already complete, and replay the tox command to complete the rest of the configuration.
+
+
+Please note, when checking **test_network_slicing.py** in details, you will find some of the preparation steps might require extra input parameters, such as **cst_id**, **cst_invariant_id** and **sp_id**. These values could be found in both logs and SDC UI.
+
+.. image:: files/ns_automation/ns_automation_test_class.png
+
+In case it failed in the middle of the SDC template creation, please update the **sdc_template_suffix** variable inside the **test_network_slicing.py** and then rerun the script with tox command.
+
+Since SDC doesn't support creating template with the same name, neither deleting of any templates, you have to add a suffix to the original name to create template with a new name.
+
+.. image:: files/ns_automation/ns_automation_sdc_suffix.png
+
+
+Jakarta Release - Known issues and Solutions
+--------------------------------------------
+Details of up to date manual configurations, known issues, solutions and work-arounds can be found in the following wiki page: `Jakarta Release - Issues and Solutions <https://wiki.onap.org/display/DW/Jakarta+Release+-+Issues+and+Solutions>`_.
+
+Kohn Release Updates
+-----------------------
+In Kohn release, the following enhancements are implemented:
+
+- IBN driven E2E Network Slicing support including enhancement to Slice Analysis MS to listen to real-time user intents posted by AAI using DCAE SDK dmaap-client lib and and report slice KPI to UUI.
+- KPI computation enhancements including new KPI added and new UUI display design, KPI spanning multiple resources, error handling for missing counters.
+- DCAE R11 global requirements contribution `(See the wikipage here) <https://wiki.onap.org/display/DW/R11+Global+requirements+Contribution>`_.
+
+Kohn release also fixed a few critical bugs in Jakarta release.
+
+Impacted Modules for Kohn Release
+------------------------------------
+- **DCAE**: Requirements below for Kohn release have impacts on DCAE component:
+ (1) Enhancement to Slice Analysis MS
+ (2) KPI computation enhancements
+ (3) DCAE R11 global requirements and bug fixes
+
+- **UUI**: Requirements below for Kohn release have impacts on UUI component
+ (1) Slicing KPI monitoring and display for IBN driven network slicing
+
+Kohn Release Functional Test Cases
+---------------------------------------
+Details of the test cases can be found at: `E2E Network Slicing Tests for Kohn Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Tests+for+Kohn+Release>`_ and its child wiki pages.
+
+London Release Updates
+-----------------------
+The following enhancements are added to the London release:
+
+- Support of 3GPP 28.532 based APIs for network slicing
+
+Impacted Modules for London Release
+------------------------------------
+- **SO**: Requirements below for London release have impacts on SO component:
+ (1) Enhancement to SO macro flow to support 3GPP 28.532 based APIs for network slicing
+ (2) NSST selection APIs for integration with OOF
+
+- **OOF**: Requirements below for Londond release have impacts on OOF component:
+ (1) NSST selection APIs for integration with SO
+
+London Release Test Cases
+--------------------------
+Details of the test cases can be found at: `E2E Network Slicing Tests for London Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Testing+for+London+Release>`_.
diff --git a/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst b/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst
new file mode 100644
index 000000000..65d0e41a3
--- /dev/null
+++ b/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst
@@ -0,0 +1,130 @@
+.. nfv_testing_automation_platform_requirements:
+
+:orphan:
+
+=======================================================
+NFV Testing Automatic Platform Requirements- User Guide
+=======================================================
+
+.. Overview: this page used to explain how to use NFV testing automatic platform,
+ the relevant requirements include REQ-335(Support for Test Topology
+ Auto Design), REQ-336(Support for Test Environment Auto Deploy),
+ REQ_337(Support for Test Task Auto Execution),REQ-338(Support for
+ Test Result Auto Analysis & Certification).
+
+Description
+===========
+
+There are a large number of cross-department and cross-organization communications
+during the traditional network element, system or equipment network access test.
+And the manual errors are inevitable, the knowledge in test field cannot be
+solidified. The cost of each test is high and the test cycle is always long.
+After introducing NFV, because network element software and hardware equipment are
+layered decoupled, the introduction of a large number of open source components as
+well as the frequent upgrade of the software itself, make network access test
+become more complicated and frequent.
+
+Testing has become a bottleneck during the introduction and iteration of new
+technologies. Therefore, it is urgent to introduce automated test tools.
+By introducing testing automatic capabilities including topology auto design,
+test environment auto deploy, test task auto execution and test result auto
+analysis & certification, it can solidify domain knowledge, and help reduce labor
+costs, shorten test cycle, improve test efficiency , optimize test accuracy.
+
+Requirement Details
+===================
+
+Test Topology Auto Design( enhancement in SDC)
+----------------------------------------------
+
+1.Quickly design a test service (topology) composed with tested VNF and test
+ environment (One way is to define abstract testing service (topology) template
+ for each type of VNF);
+
+2.For the service designed, can be imported into SDC for modification or enhancement,
+ or the test template can be reused for different test environments (the SDC needs
+ to support service import).
+
+Test Environment Auto Deploy (enhancement in VF-C)
+--------------------------------------------------
+
+By getting VM/VL/Port/VNF/NS instance information from Openstack via Multi-cloud
+to VF-C for instance information storage, enable VTP obtaining all the real-time
+instance information.
+
+Test Task Auto Execution(enhancement in VNFSDK, CLI)
+----------------------------------------------------
+1. Test instruments integration:
+
+* Test Case execution;
+* Test Case discovering and auto registration;
+* Robot profile integration
+
+2. VTP capability expansion:
+
+* Loading different test scripts and cases- Scenario Active Management ;
+* Flexible test process definition(Middle);
+* Test report customization
+* Profile HTTP API support
+
+3. Execution-Standard / Open source test case support
+
+* Enable ETSI NFV APIs conformance test cases in VTP;
+* Enable CNCF CNF conformance test case in VTP.
+
+4. Test Result Auto Analysis & Certification
+
+* The test objects that passed test certification are put into marketplace
+* OVP integrates with VTP to automatically receive VTP test results:
+
+ * Enable OVP with HTTP API for submit the result
+ * Enable VTP for result submission into OVP.
+
+New Features and Guide (Guilin Release)
+=======================================
+
+SDC New features
+----------------
+
+Service import
+>>>>>>>>>>>>>>
+
+1. Add a button “IMPORT SERVICE CSAR" to perform service CSAR import.
+2. When clicking the “IMPORT SERVICE CSAR” button on the portal, a window will
+ pop up to select the service CSAR file to be imported.
+3. After selecting the service CSAR file to be imported, it will switch to the
+ general information input page for creating the service.
+4. After filling in all the required fields, you can click the "create" button
+ to create a new service.
+5. Add a new API for the request of importing service CSAR.
+
+Abstract service template
+>>>>>>>>>>>>>>>>>>>>>>>>>
+
+1. On the general page of VF, add a IS_ABSTRACT_RESOURCE selection box, which is
+ false by default. If it is an abstract VNF, select true manually.
+2. Add three APIs to handle the corresponding requests of abstract service template:
+ 2.1 Return whether the service is a abstract service: GET /v1/catalog/abstract/service/serviceUUID/{uuid}/status
+ 2.2 Copy a new service based on the existing service: POST /v1/catalog/abstract/service/copy
+ 2.3 Replace the abstract VNF in the abstract service template with the actual VNF: PUT /v1/catalog/abstract/service/replaceVNF
+
+VTP New features
+----------------
+1. Added active scenario and profile management support
+2. Added integration with Robot CSIT tests
+3. Enabled auto discovery of test cases from 3rd party tool integration
+4. Added support for cnf-conformance test support( In order to enable CNF
+ conformance tool in VTP, please refer `the guide <https://gerrit.onap.org/r/gitweb?p=vnfsdk/validation.git;a=blob;f=cnf-conformance/README.md;h=cda3dee762f4dd2873613341f60f6662880f006a;hb=refs/heads/master>`_
+5. New VTP API has been updated: see the `VTP API wiki page <https://wiki.onap.org/display/DW/VTP+REST+API+v1>`_
+
+CLI New features
+----------------
+
+1. Enabled auto discover and registration of products functionalities as commands
+2. Profile management commands are added
+3. For the VTP Command line usage, please refer :ref:`CLI User Guide <onap-cli:cli_user_guide>`
+
+Test Status and Plans
+=====================
+
+See `the status of the test wiki page <https://wiki.onap.org/display/DW/Automatic+Testing+Requirements>`_
diff --git a/docs/docs_StndDefined_Events_Collection_Mechanism.rst b/docs/docs_StndDefined_Events_Collection_Mechanism.rst
new file mode 100644
index 000000000..89c6481c4
--- /dev/null
+++ b/docs/docs_StndDefined_Events_Collection_Mechanism.rst
@@ -0,0 +1,97 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+:orphan:
+
+.. _docs_StndDefined_Events_Collection_Mechanism:
+
+VES Collector - Standard Defined Events Collection Mechanism
+------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+
+The target of standard defined events collection mechanism development was to allow collection of events defined by standards organizations using VES Collector,
+and providing them for consumption by analytics applications running on top of DCAE platform. The following features have been implemented:
+
+1. Event routing, based on a new CommonHeader field “stndDefinedNamespace”
+2. Standards-organization defined events can be included using a dedicated stndDefinedFields.data property
+3. Standards-defined events can be validated using openAPI descriptions provided by standards organizations, and indicated in stndDefinedFields.schemaReference
+
+`Standard Defined Events Collection Mechanism description <https://docs.onap.org/projects/onap-dcaegen2/en/jakarta/sections/services/ves-http/stnd-defined-validation.html>`_
+
+.. note::
+
+ VES Collector orchestrated using Helm or Cloudify uses standard defined domain schema files bundled within VES collector image during image build.
+ Also new Helm based installation mechanism for collectors doesn't support yet certain features available with the traditional Cloudify orchestration based mechanisms:
+ - Obtaining X.509 certificates from external CMP v2 server for secure xNF connections
+ - Exposing the Collector port in Dual Stack IPv4/IPv6 networks.
+
+
+How to Configure VES Collector
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+By default config maps containing schema files are defined in the `OOM <https://github.com/onap/oom/tree/jakarta/kubernetes/dcaegen2-services/resources/external>`_ repository and installed with dcaegen2 module.
+In Istanbul release in OOM are used schema files from https://forge.3gpp.org/rep/sa5/MnS/blob/SA88-Rel16/OpenAPI/.
+The newest schema files can be found in https://forge.3gpp.org/rep/sa5/MnS/tree/Rel-16-SA-91/OpenAPI
+If for production/test purpose are required different or newest schema files please follow procedure for `config map update <https://docs.onap.org/projects/onap-dcaegen2/en/latest/sections/configuration.html#config-maps>`_.
+
+In order to prepare second instance of VES Collector please follow below procedure:
+
+1. (Optional step) If VES Collector should obtaining X.509 certificates from CMPv2 server for secure xNF connections please follow below steps:
+
+ - Install `Cert Manager <https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/infra_guides/oom_infra_base_config_setup.html#install-cert-manager>`_
+ - Configure `Cert Service <https://docs.onap.org/projects/onap-oom-platform-cert-service/en/jakarta/sections/configuration.html>`_ if external CMP v2 server is in use.
+
+2. If usage of config maps from OOM containing schema files is required please follow procedure for
+ `external repo schema files from OOM connection to VES collector <https://docs.onap.org/projects/onap-dcaegen2/en/jakarta/sections/services/ves-http/installation.html#external-repo-schema-files-from-oom-connection-to-ves-collector>`_
+ with changes described below.
+
+ As new instance of VES Collector will be introduced to ONAP namespace there is need to modify parameters from ``/inputs/k8s-ves-inputs-tls.yaml`` in Bootstrap POD
+
+ - external_port - set here ``node port`` from range ``30000-32767`` not used in ONAP instance for example ``30519``
+ - ``service_component_type``, ``service_id``, ``service_component_name_override`` - set here custom service name e.g. ``dcae-ves-collector-std-def-evnents``
+
+ (Optional step) If VES Collector should also obtaining X.509 certificates from CMP v2 and its clients should successfully validate its hostname then following parameters need to modified in ``/inputs/k8s-ves-inputs-tls.yaml`` file.
+
+ - ``external_cert_use_external_tls`` - change from ``false`` to ``true``
+ - ``external_cert_common_name`` - set same value as used in ``service_component_name_override parameter``
+ - ``service_component_name_override`` - add following values:
+ - all IPv4 addresses of ONAP worker hosts
+ - all IPv6 addresses of ONAP worker hosts
+ - all FQDN names of ONAP worker hosts
+ - ``service_component_name_override`` parameter value.
+
+ Deploy new instance of VES collector using ``/inputs/k8s-ves-inputs-tls.yaml``
+
+3. (Optional step) If ONAP is installed in Dual Stack and VES Collector should listen in IPv6 network
+
+ - on RKE node prepare file ``ves-ipv6.yaml`` with following content (below is an example of file for ``dcae-ves-collector-std-def-evnents`` service name created in section 2, in ``node port`` set once again value from range ``30000-32767`` not used in ONAP instance for example ``30619`` )
+ .. code-block:: bash
+
+ apiVersion: v1
+ kind: Service
+ metadata:
+ name: xdcae-ves-collector-std-def-evnents
+ namespace: onap
+ spec:
+ externalTrafficPolicy: Cluster
+ ipFamily: IPv6
+ ports:
+ - name: xdcae-ves-collector-std-def-evnents
+ nodePort: 30619
+ port: 8443
+ protocol: TCP
+ targetPort: 8443
+ selector:
+ app: dcae-ves-collector-std-def-evnents
+ sessionAffinity: None
+ type: NodePort
+
+ - apply prepared service and check if it working
+ .. code-block:: bash
+
+ kubectl -n onap apply -f ves-ipv6.yaml
+
+ kubectl -n onap get svc | grep collector-std-def-evnents
+ xdcae-ves-collector-std-def-evnents NodePort fd00:101::6ad <none> 8443:30619/TCP
diff --git a/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst b/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst
new file mode 100644
index 000000000..eb549bb99
--- /dev/null
+++ b/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst
@@ -0,0 +1,189 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_testing_5g_pnf_software_upgrade_with_schema_update:
+
+
+:orphan:
+
+Testing xNF Software Upgrade in association to schema updates
+-------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+This procedure only describes the test instruction to upgrade schema of a service instance with at least one PNF resource based on a new onboarding package.
+
+This procedure can be used to upgrade a service instance with more than one PNF resource.
+
+A. Pre-conditions
+~~~~~~~~~~~~~~~~~
+* A service template with at least one PNF resource has been created in SDC and distributed to run time
+
+* At least one service instance has been instantiated, including PNF registration and configuration, in run time
+
+* This service instance is in health condition
+
+* A new PNF onboarding package, which contains a new software version and new artifacts, is ready for onboarding
+
+* This procedure does not support addition of new PNF resource or deletion of existing PNF resource in the service template.
+
+
+B. Update and re-distribute the service template:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ The service template must be updated with updated schema information for the PNF resources, and then redistributed to run time.
+
+ 1. Update an existing PNF resource artifact and attach the same to an existing service template.
+
+ - url to portal: https://portal.api.simpledemo.onap.org:30225/ONAPPORTAL/login.htm
+
+ - password for users: demo123456!
+
+ - Login as cs0008, go to "ONBOARD", where all the available VSPs and Services are listed.
+
+
+ 2. Follow below mentioned procedure to update VSP and Service.
+
+ - `Update VF/PNF <https://docs.onap.org/en/kohn/guides/onap-user/design/resource-onboarding/index.html#update-vfcs-in-a-vsp-optional>`_
+
+ - `Update Service <https://docs.onap.org/en/kohn/guides/onap-user/design/service-design/index.html#update-service-optional>`_
+
+
+C. Trigger PNF service level software upgrade with schema update:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Schema update procedure can be triggered manually by invoking appropriate rest end points through the postman client.
+
+ 3. Get the service level workflow uuid by fetching all the available workflows from SO:
+
+ - GET http://REPO_IP:SO_PORT/onap/so/infra/workflowSpecifications/v1/workflows
+
+ - From the response, fetch the workflow uuid against the workflow name “ServiceLevelUpgrade”.
+
+ .. image:: files/softwareUpgrade/workflowList.png
+
+
+ 4. Select one service instance which need to be upgraded
+
+ - Retrieve all services instance from AAI using:
+
+ - GET https://REPO_IP:AAI_PORT/business/customers/customer/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances
+
+ - Select one service instance from the service instance list received from above query.
+
+
+ 5. Get all Service-Model-Version from AAI Using Service-Model-InVariant-UUId:
+
+ - Use the Service-Model-InVariant-UUId from the selected service instance (previous step) as model-invariant-id in this query.
+
+ - GET https://REPO_IP:AAI_PORT/aai/v21/service-design-and-creation/models/model/${model-invariant-id}/model-vers
+
+ - Select one model version Id from the model version list received from above querying. The selected model version Id will be used as the target service model version at upgrade procedure.
+
+ .. image:: files/softwareUpgrade/serviceModelVersions.png
+
+
+ 6. Invoke the service level upgrade workflow to update the schema of xNF resources.
+
+ - Invoke the service level workflow by passing the older version service model id and the service level workflow uuid for “Service Level workflow” fetched in the previous steps.
+
+ - In the body of the POST request, json input needs to be supplied that contains info on the model version to which we are going to trigger the update. (2.0)
+
+ - POST http://REPO_IP:SO_PORT/onap/so/infra/instanceManagement/v1/serviceInstances/${serviceInstanceId}/workflows/${serviceLevel_workflow_uuid}
+
+ - Attaching below a sample request json :
+
+{
+
+ "requestDetails": {
+
+ "subscriberInfo": {
+
+ "globalSubscriberId": "807c7a02-249c-4db8-9fa9-bee973fe08ce"
+
+ },
+
+ "modelInfo": {
+
+ "modelVersion": "2.0",
+
+ "modelVersionId": "8351245d-50da-4695-8756-3a22618377f7",
+
+ "modelInvariantId": "fe41489e-1563-46a3-b90a-1db629e4375b",
+
+ "modelName": "Service_with_pnfs",
+
+ "modelType": "service"
+
+ },
+
+ "requestInfo": {
+
+ "suppressRollback": false,
+
+ "requestorId": "demo",
+
+ "instanceName": "PNF 2",
+
+ "source": "VID"
+
+ },
+
+ "requestParameters": {
+
+ "subscriptionServiceType": "pNF",
+
+ "userParams": [
+
+ {
+
+ "name": "targetSoftwareVersion",
+
+ "value": "pnf_sw_version-4.0.0"
+
+ }
+
+ ],
+
+ "aLaCarte": false,
+
+ "payload": "{\"k1\": \"v1\"}"
+
+ },
+
+ "project": {
+
+ "projectName": "ServiceLevelUpgrade"
+
+ },
+
+ "owningEntity": {
+
+ "owningEntityId": "67f2e84c-734d-4e90-a1e4-d2ffa2e75849",
+
+ "owningEntityName": "OE-Test"
+
+ }
+
+ }
+
+}
+
+Note down the request id for the schema update request that can be used in the subsequent steps to track the progress.
+
+
+ 7. Verify the service level upgrade workflow status
+
+ - GET http://REPO_IP:SO_PORT/onap/so/infra/orchestrationRequests/v7/${requestID}
+
+ - Verify the response status code and message for the request id fetched in the previous step.
+
+ - For successful upgrade completion, the response code must be “200” with appropriate success message.
+
+
+ 8. Verify PNF Configuration for Service Level Upgrade from AAI
+
+ - GET https://REPO_IP:AAI_PORT/aai/v16/network/pnfs/pnf/{PNF_NAME}
+
+ - Verify the software version of the pnf resource updated in AAI.
+
+ .. image:: files/softwareUpgrade/verifyPNF.png
diff --git a/docs/docs_pnf_onboarding_preonboarding.rst b/docs/docs_pnf_onboarding_preonboarding.rst
new file mode 100644
index 000000000..fb33ec370
--- /dev/null
+++ b/docs/docs_pnf_onboarding_preonboarding.rst
@@ -0,0 +1,29 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_pnf_onboarding_preonboarding:
+
+:orphan:
+
+5G - PNF Pre-Onboarding & Onboarding
+------------------------------------
+
+Description
+~~~~~~~~~~~
+
+Use case introduces possibility of pre-onboarding and onboarding a vendor PNF onboarding package in ONAP for 5G and other use cases.
+`Detailed 5G - PNF Pre-Onboarding & Onboarding use case documentation <https://wiki.onap.org/pages/viewpage.action?pageId=45303641>`_
+PNF CSAR Package that is onboarded as Vendor Software Package to SDC must meet the following requirements: `VNF or PNF CSAR Package Requirements <https://docs.onap.org/en/frankfurt/submodules/vnfrqts/requirements.git/docs/Chapter5/Tosca/ONAP%20VNF%20or%20PNF%20CSAR%20Package.html>`_
+Before SDC Onboarding, PNF onboarding package/archive can be verified using VNF SDK tools.
+
+
+How to Use
+~~~~~~~~~~
+- PNF pre-onboarding (VNF SDK verification)
+ The pre-onboarding step is optional and it can be used to verify a vendor PNF onboarding package/archive format by VNF SDK tools
+ `VNF SDK Tools Documentation <https://docs.onap.org/en/frankfurt/submodules/vnfsdk/model.git/docs/index.html>`_
+ `VNF SDK Test Cases <https://wiki.onap.org/pages/viewpage.action?pageId=58231094>`_
+
+- PNF onboarding (SDC Resource Onboarding)
+ The onboarding step is mandatory in ONAP.
+ A vendor-provided PNF onboarding package must be onboarded according to procedure: `SDC Resource Onboarding <https://docs.onap.org/en/frankfurt/guides/onap-user/design/resource-onboarding/index.html>`_
diff --git a/docs/docs_postman.rst b/docs/docs_postman.rst
deleted file mode 100644
index 30cf83a79..000000000
--- a/docs/docs_postman.rst
+++ /dev/null
@@ -1,190 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0
-.. International License. http://creativecommons.org/licenses/by/4.0
-.. Copyright 2017 AT&T Intellectual Property. All rights reserved.
-
-.. _postman-guides:
-
-Test ONAP API with Postman
-==========================
-
-Postman
--------
-
-Postman is a tool that allows a user to run REST API.
-
-The user defines the API requests and has the possibility to group them
-in files called a "Collections".
-
-The user can then run each individual API request or run a complete collection.
-
-Postman includes the possibility to define "environment" variables.
-
-An API request can then get or set a value from/to that "environment" variable.
-
-Get and install Postman tool on your own machine (Linux/windows).
-
-Postman is available here: https://www.getpostman.com/
-
-
-ONAP Postman collections
-------------------------
-
-ONAP Integration project provides several Postman collections with two
-environment files.
-
-Those Postman Collections will allow a Developer to experiment various ONAP API
-on various ONAP components (SDC, NBI, SO, AAI, SDNC)
-
-- declare a vendor
-- declare a VSP
-- upload a package
-- declare a VF based on the VSP
-- declare a Service composed of the VF and a Virtual Link
-- distribute all those informations
-- declare a customer, a service subscription
-- declare OwningEntity, Platform...
-- declare a Complex, Cloud Region, Tenant
-- associate customer/service/tenant
-- declare a service instance via a serviceOrder
-- declare a vnf
-- declare a vf-module
-- declare a network
-
-A collection is also provided to delete objects
-(reminder: it is not possible to delete object in SDC)
-
-They have been tested with Onap ElAlto (they are not all compatible with
-Dublin, and there is not guaranty about ONAP "master" as API definition
-can change)
-
-
-Download ONAP Postman collections
----------------------------------
-
-From your machine, git clone the ONAP Integration project.
-
-::
-
- git clone "https://gerrit.onap.org/r/integration"
-
-
-Import ONAP Postman collections
--------------------------------
-
-ONAP Postman collection are in the repository integration/test/postman
-
-Launch Postman tool
-
-import all ONAP Collections into Postman
-
-.. figure:: files/postman/import.png
- :align: center
-
-And you should see all the collections into Postman
-
-.. figure:: files/postman/collections.png
- :align: center
-
-Each collection is made of several API operations
-
-.. figure:: files/postman/collection-detail.png
- :align: center
-
-
-Running ONAP Postman collections
---------------------------------
-
-Running all those collections, in the order, from 1 to 10 will create a lot of
-objects in ONAP components :
-
-- SDC : vendor, VSP, zip file upload, VF from VSP, Service, add VF to Service
-- VID : OwningEntity, LineOfBusiness, Project, Platform
-- AAI : customer, subscription, cloud region, tenant
-- NBI : serviceOrder to add a service instance, serviceOrder to delete
- a service instance
-
-The order is very important because a lot of API requests will need the API
-response from the previous operation to get and set some variable values.
-
-.. figure:: files/postman/collection-detail-test.png
- :align: center
-
-It is possible to run the complete collection using Postman
-
-.. figure:: files/postman/run.png
- :align: center
-
-You need, a zip file that contains Heat files for a VNF.
-
-Collection 3 is about uploading that file into ONAP SDC.
-
-.. figure:: files/postman/zipfile.png
- :align: center
-
-Before running those collections, once in Postman, you need to have a look
-at "globals" environment parameters.
-
-.. figure:: files/postman/globals.png
- :align: center
-
-All variables that begin by "auto" must NOT be changed (they will be modified
-using API response).
-
-All other variables must be adapted to your needs.
-
-In particular, you need to put your own values for cloud_region_id, tenant_name
-and tenant_id to fit with the place where you will instantiate the VNF.
-
-
-::
-
- service:freeradius
- vf_name:integration_test_VF_freeradius
- vsp_name:integration_test_VSP
- vendor_name:onap_integration_vendor
- owning_entity:integration_test_OE
- platform:integration_test_platform
- project:integration_test_project
- lineofbusiness:integration_test_LOB
- customer_name:generic
- cloud_owner_name:OPNFV
- cloud_region_id:RegionOne
- tenant_name:openlab-vnfs
- tenant_id:234a9a2dc4b643be9812915b214cdbbb
- externalId:integration_test_BSS-order-001
- service_instance_name:integration_test_freeradius_instance_001
-
-
-Using Newman
-------------
-
-Newman is a tool that allow to run postman collections via command-line
-
-On a linux server, with Docker installed on it, run those lines:
-
-::
-
- git clone https://gitlab.com/Orange-OpenSource/lfn/onap/onap-tests.git
- cd onap-tests/postman
- sudo apt-get -y install zip
- USECASE=$'ubuntu16'
- zip -j $USECASE.zip ../onap_tests/templates/heat_files/$USECASE/*
- TAB=$'\t\t\t\t\t\t\t'
- sed -i -e "s/.*src.*/$TAB\"src\": \"$USECASE.zip\"/" 03_Onboard_VSP_part2.postman_collection.json
- docker pull postman/newman:alpine
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 01_Onboard_Vendor.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json --reporters cli,json --reporter-cli-no-assertions --reporter-cli-no-console
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 02_Onboard_VSP_part1.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 03_Onboard_VSP_part2.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 04_Onboard_VSP_part3.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 05_Onboard_VF.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 06_Onboard_Service.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 07_Declare_owningEntity_LineOfBusiness_project_platform.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 08_Declare_Customer_Service_Subscription_Cloud.postman_collection.json --insecure --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json
- docker run --network="host" --volume="/home/debian/rene/onap-tests/postman:/etc/newman" postman/newman:alpine run 10_instantiate_service_vnf_vfmodule.postman_collection.json --environment integration_test_urls.postman_environment.json --globals globals.postman_globals.json --export-environment integration_test_urls.postman_environment.json --reporters cli,json --reporter-cli-no-assertions --reporter-cli-no-console
-
-All collections are processed, then you can see results and you will
-also obtain result json files in the onap-tests/postman/newman directory
-
-Of course you can adapt globals variables in globals.postman_globals.json
-or change the USECASE=$'ubuntu16' value to onboard any heat template located
-in onap_tests/templates/heat_files directory
diff --git a/docs/docs_robot.rst b/docs/docs_robot.rst
index 96b4b7c65..f572f2799 100644
--- a/docs/docs_robot.rst
+++ b/docs/docs_robot.rst
@@ -3,6 +3,8 @@
.. _docs_robot:
+:orphan:
+
Robot
-----
diff --git a/docs/docs_scaleout.rst b/docs/docs_scaleout.rst
index b47c0693c..80ee6bf95 100644
--- a/docs/docs_scaleout.rst
+++ b/docs/docs_scaleout.rst
@@ -1,28 +1,191 @@
.. _docs_scaleout:
+:orphan:
+
VF Module Scale Out Use Case
----------------------------
Source files
~~~~~~~~~~~~
-- Heat templates directory: https://git.onap.org/demo/tree/heat/vLB_CDS?h=elalto
+- Heat templates directory: https://git.onap.org/demo/tree/heat?h=guilin
+- Heat templates directory (vLB_CDS use case): https://git.onap.org/demo/tree/heat/vLB_CDS?h=guilin
Additional files
~~~~~~~~~~~~~~~~
-- DCAE blueprint: https://git.onap.org/integration/tree/docs/files/scaleout/k8s-tca-clamp-policy-05162019.yaml
- TOSCA model template: https://git.onap.org/integration/tree/docs/files/scaleout/service-Vloadbalancercds-template.yml
-- Naming policy script: https://git.onap.org/integration/tree/docs/files/scaleout/push_naming_policy.sh
+- Naming policy script: :download:`push_naming_poliy.sh <files/scaleout/push_naming_policy.sh>`
+- Controller Blueprint Archive (to use with CDS) : https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vLB_CDS_Kotlin?h=guilin
+- TCA blueprint: :download:`guilin-tca.yaml <files/scaleout/latest-tca-guilin.yaml>`
+
+Useful tool
+~~~~~~~~~~~
+POSTMAN collection that can be used to simulate all inter process queries : https://www.getpostman.com/collections/878061d291f9efe55463
+To be able to use this postman collection, you may need to expose some ports that are not exposed in OOM by default.
+These commands may help for exposing the ports:
+
+::
+
+ kubectl port-forward service/cds-blueprints-processor-http --address 0.0.0.0 32749:8080 -n onap &
+ kubectl port-forward service/so-catalog-db-adapter --address 0.0.0.0 30845:8082 -n onap &
+ kubectl port-forward service/so-request-db-adapter --address 0.0.0.0 32223:8083 -n onap &
+
+OOM Installation
+~~~~~~~~~~~~~~~~
+Before doing the OOM installation, take care to the following steps:
+
+Set the right Openstack values for Robot and SO
+===============================================
+
+The config for robot must be set in an OOM override file before the OOM installation, this will initialize the robot framework & SO with all the required openstack info.
+A section like that is required in that override file
+
+::
+
+ robot:
+ enabled: true
+ flavor: small
+ appcUsername: "appc@appc.onap.org"
+ appcPassword: "demo123456!"
+ openStackKeyStoneUrl: "http://10.12.25.2:5000"
+ openStackKeystoneAPIVersion: "v3"
+ openStackPublicNetId: "5771462c-9582-421c-b2dc-ee6a04ec9bde"
+ openStackTenantId: "c9ef9a6345b440b7a96d906a0f48c6b1"
+ openStackUserName: "openstack_user"
+ openStackUserDomain: "default"
+ openStackProjectName: "CLAMP"
+ ubuntu14Image: "trusty-server-cloudimg-amd64-disk1"
+ ubuntu16Image: "xenial-server-cloudimg-amd64-disk1"
+ openStackPrivateNetCidr: "10.0.0.0/16"
+ openStackPrivateNetId: "fd05c1ab-3f43-4f6f-8a8c-76aee04ef293"
+ openStackPrivateSubnetId: "fd05c1ab-3f43-4f6f-8a8c-76aee04ef293"
+ openStackSecurityGroup: "f05e9cbf-d40f-4d1f-9f91-d673ba591a3a"
+ openStackOamNetworkCidrPrefix: "10.0"
+ dcaeCollectorIp: "10.12.6.10"
+ vnfPubKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh"
+ demoArtifactsVersion: "1.6.0"
+ demoArtifactsRepoUrl: "https://nexus.onap.org/content/repositories/releases"
+ scriptVersion: "1.6.0"
+ nfsIpAddress: "10.12.6.10"
+ config:
+ openStackEncryptedPasswordHere: "e10c86aa13e692020233d18f0ef6d527"
+ openStackSoEncryptedPassword: "1DD1B3B4477FBAFAFEA617C575639C6F09E95446B5AE1F46C72B8FD960219ABB0DBA997790FCBB12"
+ so:
+ enabled: true
+ so-catalog-db-adapter:
+ config:
+ openStackUserName: "opesntack_user"
+ openStackKeyStoneUrl: "http://10.12.25.2:5000/v3"
+ openStackEncryptedPasswordHere: "1DD1B3B4477FBAFAFEA617C575639C6F09E95446B5AE1F46C72B8FD960219ABB0DBA997790FCBB12"
+ openStackKeystoneVersion: "KEYSTONE_V3"
+
+The values that must be changed according to your lab are all "openStack******" parameters + dcaeCollectorIp + nfsIpAddress
+
+**Generating SO Encrypted Password:**
+
+The SO Encrypted Password uses a java based encryption utility since the
+Java encryption library is not easy to integrate with openssl/python that
+Robot uses in Dublin and upper versions.
+
+.. note::
+ To generate SO ``openStackEncryptedPasswordHere`` and ``openStackSoEncryptedPassword``
+ ensure `default-jdk` is installed::
+
+ apt-get update; apt-get install default-jdk
+
+ Then execute (on oom repository)::
+
+ SO_ENCRYPTION_KEY=`cat ~/oom/kubernetes/so/resources/config/mso/encryption.key`
+ OS_PASSWORD=XXXX_OS_CLEARTESTPASSWORD_XXXX
+
+ git clone http://gerrit.onap.org/r/integration
+ cd integration/deployment/heat/onap-rke/scripts
+
+ javac Crypto.java
+ java Crypto "$OS_PASSWORD" "$SO_ENCRYPTION_KEY"
+
+**Update the OpenStack parameters:**
+
+There are assumptions in the demonstration VNF Heat templates about the
+networking available in the environment. To get the most value out of these
+templates and the automation that can help confirm the setup is correct, please
+observe the following constraints.
+
+
+``openStackPublicNetId:``
+ This network should allow Heat templates to add interfaces.
+ This need not be an external network, floating IPs can be assigned to the
+ ports on the VMs that are created by the heat template but its important that
+ neutron allow ports to be created on them.
+
+``openStackPrivateNetCidr: "10.0.0.0/16"``
+ This ip address block is used to assign OA&M addresses on VNFs to allow ONAP
+ connectivity. The demonstration Heat templates assume that 10.0 prefix can be
+ used by the VNFs and the demonstration ip addressing plan embodied in the
+ preload template prevent conflicts when instantiating the various VNFs. If
+ you need to change this, you will need to modify the preload data in the
+ Robot Helm chart like integration_preload_parameters.py and the
+ demo/heat/preload_data in the Robot container. The size of the CIDR should
+ be sufficient for ONAP and the VMs you expect to create.
+
+``openStackOamNetworkCidrPrefix: "10.0"``
+ This ip prefix mush match the openStackPrivateNetCidr and is a helper
+ variable to some of the Robot scripts for demonstration. A production
+ deployment need not worry about this setting but for the demonstration VNFs
+ the ip asssignment strategy assumes 10.0 ip prefix.
+
+**Generating ROBOT Encrypted Password:**
+
+The Robot encrypted Password uses the same encryption.key as SO but an
+openssl algorithm that works with the python based Robot Framework.
+
+.. note::
+ To generate Robot ``openStackEncryptedPasswordHere`` call on oom respository::
+
+ cd so/resources/config/mso/
+ /oom/kubernetes/so/resources/config/mso# echo -n "<openstack tenant password>" | openssl aes-128-ecb -e -K `cat encryption.key` -nosalt | xxd -c 256 -p``
+
+Initialize the Customer and Owning entities
+===========================================
+
+The robot script can be helpful to initialize the customer and owning entity that
+will be used later to instantiate the VNF (PART 2 - Scale Out Use Case Instantiation)
+
+::
+
+ In the oom_folder/kubernetes/robot/ execute the following command:
+ ./demo-k8s.sh onap init_customer
+
+If this command is unsuccessful it means that the parameters provided to the OOM installation were not correct.
+
+- Verify and Get the tenant/owning entity/cloud-regions defined in AAI by Robot script:
+ These values will be required by the POSTMAN collection when instantiating the Service/vnf ...
+
+To get them some POSTMAN collection queries are useful to use:
+
+- GET "AAI Owning Entities"
+- GET "AAI Cloud-regions"
+- GET "AAI Cloud-regions/tenant"
Description
~~~~~~~~~~~
-The scale out use case uses a VNF composed of three virtual functions. A traffic generator (vPacketGen), a load balancer (vLB), and a DNS (vDNS). Communication between the vPacketGen and the vLB, and the vLB and the vDNS occurs via two separate private networks. In addition, all virtual functions have an interface to the ONAP OAM private network, as shown in the topology below.
+
+The scale out use case uses a VNF composed of three virtual functions. A traffic
+generator (vPacketGen), a load balancer (vLB), and a DNS (vDNS). Communication
+between the vPacketGen and the vLB, and the vLB and the vDNS occurs via two
+separate private networks. In addition, all virtual functions have an interface
+to the ONAP OAM private network, as shown in the topology below.
.. figure:: files/scaleout/topology.png
:align: center
-The vPacketGen issues DNS lookup queries that reach the DNS server via the vLB. vDNS replies reach the packet generator via the vLB as well. The vLB reports the average amount of traffic per vDNS instances over a given time interval (e.g. 10 seconds) to the DCAE collector via the ONAP OAM private network.
+The vPacketGen issues DNS lookup queries that reach the DNS server via the vLB.
+vDNS replies reach the packet generator via the vLB as well. The vLB reports the
+average amount of traffic per vDNS instances over a given time interval (e.g. 10
+seconds) to the DCAE collector via the ONAP OAM private network.
-To run the use case, make sure that the security group in OpenStack has ingress/egress entries for protocol 47 (GRE). Users can test the VNF by running DNS queries from the vPakcketGen:
+To run the use case, make sure that the security group in OpenStack has
+ingress/egress entries for protocol 47 (GRE). Users can test the VNF by running
+DNS queries from the vPakcketGen:
::
@@ -62,7 +225,14 @@ The output below means that the vLB has been set up correctly, has forwarded the
The Scale Out Use Case
~~~~~~~~~~~~~~~~~~~~~~
-The Scale Out use case shows how users/network operators can add Virtual Network Function Components (VNFCs) as part of a VF Module that has been instantiated in the Service model, in order to increase capacity of the network. ONAP El Alto release supports scale out with manual trigger by directly calling SO APIs and closed-loop-enabled automation from Policy. For El Alto, the APPC controller is used to demonstrate post-scaling VNF reconfiguration operations. APPC can handle different VNF types, not only the VNF described in this document.
+
+The Scale Out use case shows how users/network operators can add Virtual Network
+Function Components (VNFCs) as part of a VF Module that has been instantiated in
+the Service model, in order to increase capacity of the network. ONAP Frankfurt
+release supports scale out with manual trigger by directly calling SO APIs and
+closed-loop-enabled automation from Policy. For Frankfurt, the APPC controller is
+used to demonstrate post-scaling VNF reconfiguration operations. APPC can handle
+different VNF types, not only the VNF described in this document.
The figure below shows all the interactions that take place during scale out operations.
@@ -75,43 +245,93 @@ There are four different message flows:
- Red: Closed-loop enabled scale out.
- Black: Orchestration and VNF lifecycle management (LCM) operations.
-The numbers in the figure represent the sequence of steps within a given flow. Note that interactions between the components in the picture and AAI, SDNC, and DMaaP are not shown for clarity's sake.
-
-Scale out with manual trigger (green flow) and closed-loop-enabled scale out (red flow) are mutually exclusive. When the manual trigger is used, VID directly triggers the appropriate workflow in SO (step 1 of the green flow in the figure above). See Section 4 for more details.
-
-When closed-loop enabled scale out is used, Policy triggers the SO workflow. The closed loop starts with the vLB periodically reporting telemetry about traffic patterns to the VES collector in DCAE (step 1 of the red flow). When the amount of traffic exceeds a given threshold (which the user defines during closed loop creation in CLAMP - see Section 1-4), DCAE notifies Policy (step 2), which in turn triggers the appropriate action. For this use case, the action is contacting SO to augment resource capacity in the network (step 3).
-
-At high level, once SO receives a call for scale out actions, it first creates a new VF module (step 1 of the black flow), then calls APPC to trigger some LCM actions (step 2). APPC runs VNF health check and configuration scale out as part of LCM actions (step 3). At this time, the VNF health check only reports the health status of the vLB, while the configuration scale out operation adds a new vDNS instance to the vLB internal state. As a result of configuration scale out, the vLB opens a connection towards the new vDNS instance.
+The numbers in the figure represent the sequence of steps within a given flow.
+Note that interactions between the components in the picture and AAI, SDNC, and
+DMaaP are not shown for clarity's sake.
+
+Scale out with manual trigger (green flow) and closed-loop-enabled scale out
+(red flow) are mutually exclusive. When the manual trigger is used, VID directly
+triggers the appropriate workflow in SO (step 1 of the green flow in the figure
+above). See Section 4 for more details.
+
+When closed-loop enabled scale out is used, Policy triggers the SO workflow.
+The closed loop starts with the vLB periodically reporting telemetry about traffic
+patterns to the VES collector in DCAE (step 1 of the red flow). When the amount
+of traffic exceeds a given threshold (which the user defines during closed loop
+creation in CLAMP - see Section 1-4), DCAE notifies Policy (step 2), which in turn
+triggers the appropriate action. For this use case, the action is contacting SO to
+augment resource capacity in the network (step 3).
+
+At high level, once SO receives a call for scale out actions, it first creates a
+new VF module (step 1 of the black flow), then calls APPC to trigger some LCM
+actions (step 2). APPC runs VNF health check and configuration scale out as part
+of LCM actions (step 3). At this time, the VNF health check only reports the
+health status of the vLB, while the configuration scale out operation adds a new
+vDNS instance to the vLB internal state. As a result of configuration scale out,
+the vLB opens a connection towards the new vDNS instance.
At deeper level, the SO workflow works as depicted below:
.. figure:: files/scaleout/so-blocks.png
:align: center
-SO first contacts APPC to run VNF health check and proceeds on to the next block of the workflow only if the vLB is healthy (not shown in the previous figure for simplicity's sake). Then, SO assigns resources, instantiates, and activates the new VF module. Finally, SO calls APPC again for configuration scale out and VNF health check. The VNF health check at the end of the workflow validates that the vLB health status hasn't been negatively affected by the scale out operation.
+SO first contacts APPC to run VNF health check and proceeds on to the next block
+of the workflow only if the vLB is healthy (not shown in the previous figure for
+simplicity's sake). Then, SO assigns resources, instantiates, and activates the
+new VF module. Finally, SO calls APPC again for configuration scale out and VNF
+health check. The VNF health check at the end of the workflow validates that the
+vLB health status hasn't been negatively affected by the scale out operation.
PART 1 - Service Definition and Onboarding
------------------------------------------
+
This use-case requires operations on several ONAP components to perform service definition and onboarding.
+1-1 VNF Configuration Modeling and Upload with CDS (Recommended way)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1-1 VNF Configuration Modeling and Upload with CDS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Since Dublin, the scale out use case integrates with the Controller Design Studio (CDS) ONAP component to automate the generation of cloud configuration at VNF instantiation time. The user interested in running the use case only with manual preload can skip this section and start from Section 1-2. The description of the use case with manual preload is provided in Section5.
+Since Dublin, the scale out use case integrates with the Controller Design Studio (CDS) ONAP component to automate the generation of cloud configuration at VNF instantiation time. The user interested in running the use case only with manual preload can skip this section and start from Section 1-2. The description of the use case with manual preload is provided in Section5.
Users can model this configuration at VNF design time and onboard the blueprint to CDS via the CDS GUI. The blueprint includes naming policies and network configuration details (e.g. IP address families, network names, etc.) that CDS will use during VNF instantiation to generate resource names and assign network configuration to VMs through the cloud orchestrator.
Please look at the CDS documentation for details about how to create configuration models, blueprints, and use the CDS tool: https://wiki.onap.org/display/DW/Modeling+Concepts. For running the use case, users can use the standard model package that CDS provides out of the box, which can be found here: https://wiki.onap.org/pages/viewpage.action?pageId=64007442
+::
+
+ For the current use case you can also follow these steps (Do not use the SDC flow to deploy the CBA when importing a VSP, this is not going to work anymore since Guilin):
+ 1. You must first bootstrap CDS by using the query in the POSTMAN collection query named POST "CDS Bootstrap"
+ 2. You must upload the attached CBA by using the POSTMAN collection named POST "CDS Save without Validation", the CBA zip file can be attached in the POSTMAN query
+ Controller Blueprint Archive (to use with CDS) : https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vLB_CDS_Kotlin?h=guilin
+ 3. Create a zip file with the HEAT files located here: https://git.onap.org/demo/tree/heat/vLB_CDS?h=guilin
+ 4. Create the VSP & Service in the SDC onboarding and SDC Catalog + Distribute the service
+ To know the right values that must be set in the SDC Service properties assignment you must open the CBA zip and look at the TOSCA-Metadata/TOSCA.meta file
+ This file looks like that:
+ TOSCA-Meta-File-Version: 1.0.0
+ CSAR-Version: 1.0
+ Created-By: Seaudi, Abdelmuhaimen <abdelmuhaimen.seaudi@orange.com>
+ Entry-Definitions: Definitions/vLB_CDS.json
+ Template-Tags: vLB_CDS
+ Template-Name: vLB_CDS
+ Template-Version: 1.0.0
+ Template-Type: DEFAULT
+
+ - The sdnc_model_version is the Template-Version
+ - The sdnc_model_name is the Template-Name
+ - The sdnc_artifact_name is the prefix of the file you want to use in the Templates folder, in our CBA example it's vnf (that is supposed to reference the /Templates/vnf-mapping.json file)
+
+ Follow this guide for the VSP onboarding + service creation + properties assignment + distribution part (just skip the CBA attachment part as the CBA should have been pushed manually with the REST command): https://wiki.onap.org/pages/viewpage.action?pageId=64007442
+
+ Note that in case of issues with the AAI distribution, this may help : https://jira.onap.org/browse/AAI-1759
1-2 VNF Onboarding and Service Creation with SDC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Once the configuration blueprint is uploaded to CDS, users can define and onboard a service using SDC. SDC requires users to onboard a VNF descriptor that contains the definition of all the resources (private networks, compute nodes, keys, etc.) with their parameters that compose a VNF. The VNF used to demonstrate the scale out use case supports Heat templates as VNF descriptor, and hence requires OpenStack as cloud layer. Users can use the Heat templates linked at the top of the page to create a zip file that can be uploaded to SDC during service creation. To create a zip file, the user must be in the same folder that contains the Heat templates and the Manifest file that describes the content of the package. To create a zip file from command line, type:
::
zip ../vLB.zip *
-For a complete description of service design and creation, please refer to the SDC wiki page: https://wiki.onap.org/display/DW/Design
+For a complete description of service design and creation, please refer to the SDC documentation.
During the creation of the service in SDC, there are a few extra steps that need to be executed to make the VNF ready for scale out. These require users to login to the SDC Portal as service designer user (username: cs0008, password: demo123456!).
@@ -133,14 +353,14 @@ For CDS parameters, users can search for names starting with "sdnc". These param
:align: center
-After importing the VSP, users need to onboard the DCAE blueprint and the Policy Model used to design closed loops in CLAMP. This step is only required for users that want to run closed loop; users interested in manual scale out only can skip the remainder of the section.
+After importing the VSP, users need to onboard the DCAE blueprint used to design closed loops in CLAMP. This step is only required for users that want to run closed loop; users interested in manual scale out only can skip the remainder of the section. Note that since Frankfurt users are not required to upload a Policy model from SDC, as Policy models are now managed by the Policy Engine.
-From the "Composition" tab in the service menu, select the artifact icon on the right, as shown below:
+To upload a DCAE blueprint, from the "Composition" tab in the service menu, select the artifact icon on the right, as shown below:
.. figure:: files/scaleout/1.png
:align: center
-Upload the DCAE blueprint linked at the top of the page using the pop-up window.
+Upload the DCAE blueprint (choose the one depending on your ONAP release, as the orginal TCA was depecrated in Guilin a new one is available to use) linked at the top of the page using the pop-up window.
.. figure:: files/scaleout/2.png
:align: center
@@ -150,21 +370,6 @@ The blueprint will appear in the artifacts section on the right.
.. figure:: files/scaleout/3.png
:align: center
-To attach a Policy Model to the service, open the Policy drop-down list on left.
-
-.. figure:: files/scaleout/4.png
- :align: center
-
-Then, add the TCA Policy.
-
-.. figure:: files/scaleout/5.png
- :align: center
-
-The Policy will be attached to the service defined in SDC
-
-.. figure:: files/scaleout/6.png
- :align: center
-
Finally, users need to provide the maximum number of VNF instances that ONAP is allowed to create as part of the scale out use case by setting up deployment properties.
.. figure:: files/scaleout/7.png
@@ -178,10 +383,11 @@ This VNF only supports scaling the vDNS, so users should select the vDNS module
At this point, users can complete the service creation in SDC by testing, accepting, and distributing the Service Models as described in the SDC user manual.
-
1-3 Deploy Naming Policy
~~~~~~~~~~~~~~~~~~~~~~~~
+
This step is only required if CDS is used.
+Note that in Guilin, the default naming policy is already deployed in policy so this step is optional
In order to instantiate the VNF using CDS features, users need to deploy the naming policy that CDS uses for resource name generation to the Policy Engine. User can copy and run the script at the top of the page from any ONAP pod, for example Robot or Drools. The script uses the Policy endpoint defined in the Kubernetes domain, so the execution has to be triggered from some pod in the Kubernetes space.
@@ -191,52 +397,161 @@ In order to instantiate the VNF using CDS features, users need to deploy the nam
./push_naming_policy.sh
+
1-4 Closed Loop Design with CLAMP
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-This step is only required if closed loop is used.
-Once the service model is distributed, users can design the closed loop from CLAMP, using the GUI at https://clamp.api.simpledemo.onap.org:30258/designer/index.html In El Alto, CLAMP doesn't authenticate with AAF, so users have to login using "admin" and "password" as username and password, respectively.
+This step is only required if closed loop is used, for manual scaleout this section can be skipped.
+
+Here are Json examples that can be copy pasted in each policy configuration by clicking on the button EDIT JSON, just replace the value "LOOP_test_vLB_CDS" by your loop ID:
+For TCA config:
+::
+
+ {
+ "tca.policy": {
+ "domain": "measurementsForVfScaling",
+ "metricsPerEventName": [
+ {
+ "policyScope": "DCAE",
+ "thresholds": [
+ {
+ "version": "1.0.2",
+ "severity": "MAJOR",
+ "thresholdValue": 200,
+ "closedLoopEventStatus": "ONSET",
+ "closedLoopControlName": "LOOP_test_vLB_CDS",
+ "direction": "LESS_OR_EQUAL",
+ "fieldPath": "$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta"
+ }
+ ],
+ "eventName": "vLoadBalancer",
+ "policyVersion": "v0.0.1",
+ "controlLoopSchemaType": "VM",
+ "policyName": "DCAE.Config_tca-hi-lo"
+ }
+ ]
+ }
+ }
+
+For Drools config:
+
+::
+
+ {
+ "abatement": false,
+ "operations": [
+ {
+ "failure_retries": "final_failure_retries",
+ "id": "policy-1-vfmodule-create",
+ "failure_timeout": "final_failure_timeout",
+ "failure": "final_failure",
+ "operation": {
+ "payload": {
+ "requestParameters": "{\"usePreload\":false,\"userParams\":[]}",
+ "configurationParameters": "[{\"ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[16].value\",\"oam-ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[30].value\"}]"
+ },
+ "target": {
+ "entityIds": {
+ "resourceID": "Vlbcds..vdns..module-3",
+ "modelInvariantId": "e95a2949-8ba5-433d-a88f-587a6244b4ea",
+ "modelVersionId": "4a6ceddc-147e-471c-ae6f-907a0df76040",
+ "modelName": "Vlbcds..vdns..module-3",
+ "modelVersion": "1",
+ "modelCustomizationId": "7806ed67-a826-4b0e-b474-9ca4fa052a10"
+ },
+ "targetType": "VFMODULE"
+ },
+ "actor": "SO",
+ "operation": "VF Module Create"
+ },
+ "failure_guard": "final_failure_guard",
+ "retries": 1,
+ "timeout": 300,
+ "failure_exception": "final_failure_exception",
+ "description": "test",
+ "success": "final_success"
+ }
+ ],
+ "trigger": "policy-1-vfmodule-create",
+ "timeout": 650,
+ "id": "LOOP_test_vLB_CDS"
+ }
+
+For Frequency Limiter config:
+
+::
+
+ {
+ "id": "LOOP_test_vLB_CDS",
+ "actor": "SO",
+ "operation": "VF Module Create",
+ "limit": 1,
+ "timeWindow": 10,
+ "timeUnits": "minute"
+ }
-Use the "Closed Loop" link to open a distributed model.
+Once the service model is distributed, users can design the closed loop from CLAMP, using the GUI at https://clamp.api.simpledemo.onap.org:30258
-.. figure:: files/scaleout/12.png
+Use the "Loop Instance" link to create a closed loop using a distributed model.
+
+.. figure:: files/scaleout/clamp/1.png
:align: center
-Select the closed loop associated to the distributed service model.
+Select the distributed service model.
-.. figure:: files/scaleout/13.png
+.. figure:: files/scaleout/clamp/2.png
:align: center
The closed loop main page for TCA microservices is shown below.
-.. figure:: files/scaleout/14.png
+.. figure:: files/scaleout/clamp/3.png
:align: center
-Click on the TCA box to create a configuration policy. From the pop-up window, users need to click "Add item" to create a new policy and fill it in with specific information, as shown below.
+Click on the TCA box to create a configuration policy. From the pop-up window, users need to click "Add" to create a new policy and fill it in with specific information, as shown below.
-.. figure:: files/scaleout/15.png
+.. figure:: files/scaleout/clamp/4.png
:align: center
For this use case, the control loop schema type is "VM", while the event name has to match the event name reported in the VNF telemetry, which is "vLoadBalancer".
Once the policy item has been created, users can define a threshold that will be used at runtime to evaluate telemetry reported by the vLB. When the specified threshold is crossed, DCAE generates an ONSET event that will tell Policy Engine which closed loop to activate.
-.. figure:: files/scaleout/16.png
+.. figure:: files/scaleout/clamp/5.png
+ :align: center
+
+Since Frankfurt, users are required to define the PDP group for the configuration policy, as shown in the figure below.
+
+.. figure:: files/scaleout/clamp/6.png
+ :align: center
+
+After the configuration policy is created, users need to create the operational policy, which the Policy Engine uses to determine which actions and parameters should be used during closed loop. From the "Loop Instance" tab, users can select "Modify" to add a new Policy Model of type Drools:
+
+.. figure:: files/scaleout/clamp/7.png
:align: center
-After the configuration policy is created, users need to create the operational policy, which the Policy Engine uses to determine which actions and parameters should be used during closed loop.
+Users are required to provide basic closed loop information, like ID, timeout, and trigger, as shown in the example below. The trigger name, in particular, must match the name of the root operational policy created during the next step.
-.. figure:: files/scaleout/17.png
+.. figure:: files/scaleout/clamp/8.png
:align: center
-Since El Alto, CLAMP adds the "Policy Decision Entry" parameter, which has to contain the name of the root operational policy in the decision tree. For this use case, there is only one operational policy, called "vlbpolicy2" in the example above ("Policy ID" parameter). As such, "Policy Decision Entry" has to be set to "vlbpolicy2" as well. During creation of the operational policy, the user should select "VF Module Create" recipe and "SO" actor. The payload section is:
+To create a new operational policy, users can use the "Add" button below, and fill up the fields in the CLAMP GUI as shown in the example below, making sure that the "id" matches the "trigger" field defined before:
+
+.. figure:: files/scaleout/clamp/9.png
+ :align: center
+
+During creation of the operational policy, the user should select "VF Module Create" recipe and "SO" actor. The payload section is a JSON object like below:
::
- requestParameters: '{"usePreload":false,"userParams":[]}'
- configurationParameters: '[{"ip-addr":"$.vf-module-topology.vf-module-parameters.param[16].value","oam-ip-addr":"$.vf-module-topology.vf-module-parameters.param[30].value"}]'
+ {"requestParameters":"{\"usePreload\":true,\"userParams\":[]}",
+ "configurationParameters":"[{\"ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[16]\",\"oam-ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[30]\"}]"}
+
+Users can use the "Edit JSON" button to upload the payload.
+
+.. figure:: files/scaleout/clamp/10.png
+ :align: center
-Policy Engine passes the payload to SO, which will then use it during VF module instantiation to resolve configuration parameters. The JSON path
+The Policy Engine passes the payload to SO, which will then use it during VF module instantiation to resolve configuration parameters. The JSON path
::
@@ -244,19 +559,34 @@ Policy Engine passes the payload to SO, which will then use it during VF module
indicates that resolution for parameter "ip-addr" is available at "$.vf-module-topology.vf-module-parameters.param[16].value" in the JSON object linked by the VF module self-link in AAI. See section 1-7 for an example of how to determine the right path to configuration parameters.
-The target tab allows users to select the target type for the closed loop. For this use case, the user should select VF module as target type, as we are scaling a VF module. Please select the vDNS module as target resource ID.
+The "targetType" tab allows users to select the target type for the closed loop. For this use case, the user should select VF module as target type, as we are scaling a VF module. Please select the vDNS module as target resource ID.
+
+.. figure:: files/scaleout/clamp/11.png
+ :align: center
+
+As with configuration policy, users need to assign the PDP group to the operational policy.
+
+.. figure:: files/scaleout/clamp/12.png
+ :align: center
+
+For what regards guard policies, either "Frequency Limiter", or "MinMax", or both can be used for the scale out use case. They can be added using the "Modify" item in the "Loop Instance" tab.
-.. figure:: files/scaleout/18.png
+.. figure:: files/scaleout/clamp/13.png
:align: center
-For what regards guard policies, either "Frequency Limiter", or "MinMax", or both can be used for the scale out use case. The example below shows the definition of a "Frequency Limiter" guard policy. The policy name should be guard.frequency.<policy ID> for Frequency Limiter and guard.minmax.<policy ID> for MinMax, where <policy ID> is vlbpolicy2 in the example above.
+The example below shows the definition of a "Frequency Limiter" guard policy. Note that some optional fields, such as id and time interval, should be added to the policy using the "Object Properties" button:
-.. figure:: files/scaleout/19.png
+.. figure:: files/scaleout/clamp/14.png
:align: center
-Once the operational policy design is completed, users can submit and then deploy the closed loop clicking the "Submit" and "Deploy" buttons, respectively, as shown below.
+The user needs to manually insert id, actor, and operation so as to match the same fields defined in the operational policy.
-.. figure:: files/scaleout/20.png
+.. figure:: files/scaleout/clamp/15.png
+ :align: center
+
+Once the operational policy design is completed, users can submit and then deploy the closed loop clicking the "Submit" and "Deploy" buttons from the "Loop Operations" tab, as shown below.
+
+.. figure:: files/scaleout/clamp/16.png
:align: center
At this point, the closed loop is deployed to Policy Engine and DCAE, and a new microservice will be deployed to the DCAE platform.
@@ -264,6 +594,7 @@ At this point, the closed loop is deployed to Policy Engine and DCAE, and a new
1-5 Creating a VNF Template with CDT
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Before running scale out use case, the users need to create a VNF template using the Controller Design Tool (CDT), a design-time tool that allows users to create and on-board VNF templates into APPC. The template describes which control operation can be executed against the VNF (e.g. scale out, health check, modify configuration, etc.), the protocols that the VNF supports, port numbers, VNF APIs, and credentials for authentication. Being VNF agnostic, APPC uses these templates to "learn" about specific VNFs and the supported operations.
CDT requires two input:
@@ -319,7 +650,7 @@ Here is an example of API for the vLB VNF used for this use case. We name the fi
To create the VNF template in CDT, the following steps are required:
-- Connect to the CDT GUI: http://ANY-K8S-IP:30289
+- Connect to the CDT GUI: http://ANY_K8S_IP:30289
- Click "My VNF" Tab. Create your user ID, if necessary
- Click "Create new VNF" entering the VNF type as reported in VID or AAI, e.g. vLoadBalancerMS/vLoadBalancerMS 0
- Select "ConfigScaleOut" action
@@ -330,6 +661,8 @@ To create the VNF template in CDT, the following steps are required:
- Click "Reference Data" Tab
- Click "Save All to APPC"
+Note, if a user gets an error when saving to Appc (cannot connect to AppC network), he should open a browser to http://ANY_K8S_IP:30211 to accept AppC proxy certificate
+
For health check operation, we just need to specify the protocol, the port number and username of the VNF (REST, 8183, and "admin" respectively, in the case of vLB/vDNS) and the API. For the vLB/vDNS, the API is:
::
@@ -347,6 +680,7 @@ At this time, CDT doesn't allow users to provide VNF password from the GUI. To u
1-6 Setting the Controller Type in SO Database
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Users need to specify which controller to use for the scale out use case. For Dublin, the supported controller is APPC. Users need to create an association between the controller and the VNF type in the SO database.
To do so:
@@ -358,7 +692,7 @@ To do so:
mysql -ucataloguser -pcatalog123
-- Use catalogdb databalse
+- Use catalogdb database
::
@@ -376,6 +710,7 @@ SO has a default entry for VNF type "vLoadBalancerMS/vLoadBalancerMS 0"
1-7 Determining VNF reconfiguration parameters
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
The post scale out VNF reconfiguration is VNF-independent but the parameters used for VNF reconfiguration depend on the specific use case. For example, the vLB-vDNS-vPacketGenerator VNF described in this documentation use the vLB as "anchor" point. The vLB maintains the state of the VNF, which, for this use case is the list of active vDNS instances. After creating a new vDNS instance, the vLB needs to know the IP addresses (of the internal private network and management network) of the new vDNS. The reconfiguration action is executed by APPC, which receives those IP addresses from SO during the scale out workflow execution. Note that different VNFs may have different reconfiguration actions. A parameter resolution is expressed as JSON path to the SDNC VF module topology parameter array. For each reconfiguration parameter, the user has to specify the array location that contains the corresponding value (IP address in the specific case). For example, the "configurationParameters" section of the input request to SO during scale out with manual trigger (see Section 4) contains the resolution path to "ip-addr" and "oam-ip-addr" parameters used by the VNF.
::
@@ -877,7 +1212,30 @@ In future releases, we plan to leverage CDS to model post scaling VNF reconfigur
PART 2 - Scale Out Use Case Instantiation
-----------------------------------------
-This step is only required if CDS is used.
+
+Manual queries with POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This step is only required if CDS is used, otherwise you can use VID to instantiate the service and the VNF.
+Note that the POSTMAN collection linked at the top of this page, does provide some level of automatic scripting that will automatically get values between requests and provision the following queries
+
+You must enter in the postman config different variables:
+- "k8s" -> The k8s loadBalancer cluster node
+- "cds-service-model" -> The SDC service name distributed
+- "cds-instance-name" -> A name of your choice for the vnf instance (This must be changed each time you launch the instantiation)
+
+These useful requests are:
+CDS#1 - SDC Catalog Service -> This gets the Sdc service and provision some variables
+CDS#2 - SO Catalog DB Service VNFs - CDS -> This gets info in SO and provision some variables for the instantiation
+CDS#3 - SO Self-Serve Service Assign & Activate -> This starts the Service/vnf instantiation
+Open the body and replace the values like tenantId, Owning entity, region, and all the openstack values everywhere in the payload
+
+Note that you may have to add "onap_private_net_cidr":"10.0.0.0/16" in the "instanceParams" array depending of your openstack network configuration.
+
+CDS#4 - SO infra Active Request -> Used to get the status of the previous query
+
+Manual queries without POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GET information from SDC catalogdb
@@ -1113,18 +1471,15 @@ that will instantiate Service, VNF, VF modules and Heat stacks:
"projectName":"Project-Demonstration"
},
"owningEntity":{
- "owningEntityId":"6f6c49d0-8a8c-4704-9174-321bcc526cc0",
- "owningEntityName":"OE-Demonstration"
+ "owningEntityId":"6f6c49d0-8a8c-4704-9174-321bcc526cc0",
+ "owningEntityName":"OE-Demonstration"
},
"modelInfo":{
- "modelVersion":"1.0",
- "modelVersionId":"{{service-uuid}}",
- "modelInvariantId":"{{service-invariantUUID}}",
- "modelName":"{{service-name}}",
- "modelType":"service"
- }
- }
-}'
+ "modelVersion":"1.0",
+ "modelVersionId":"{{service-uuid}}",
+ "modelInvariantId":"{{service-invariantUUID}}",
+ "modelName":"{{service-name}}",
+ "modelType":"service"}}}'
Note that the "dcae_collector_ip" parameter has to contain the IP address of one of the Kubernetes cluster nodes, 10.12.5.214 in the example above. In the response to the Macro request, the user will obtain a requestId that will be usefulto follow the instantiation request status in the ONAP SO:
@@ -1143,7 +1498,8 @@ PART 3 - Post Instantiation Operations
3-1 Post Instantiation VNF configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-CDS executes post-instantiation VNF configuration if the "skip-post-instantiation" flag in the SDC service model is set to false, which is the default behavior. Manual post-instantiation configuration is necessary if the "skip-post-instantiation" flag in the service model is set to true or if the VNF is instantiated using the preload approach, which doesn't include CDS. Regardless, this step is NOT required during scale out operations, as VNF reconfiguration will be triggered by SO and executed by APPC.
+
+CDS executes post-instantiation VNF configuration if the "skip-post-instantiation" flag in the SDC service model is set to false, which is the default behavior. Manual post-instantiation configuration is necessary if the "skip-post-instantiation" flag in the service model is set to true or if the VNF is instantiated using the preload approach, which doesn't include CDS. Regardless, this step is NOT required during scale out operations, as VNF reconfiguration will be triggered by SO and executed by APPC.
If VNF post instantiation is executed manually, in order to change the state of the vLB the users should run the following REST call, replacing the IP addresses in the VNF endpoint and JSON object to match the private IP addresses of their vDNS instance:
@@ -1170,6 +1526,7 @@ At this point, the VNF is fully set up.
3-2 Updating AAI with VNF resources
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
To allow automated scale out via closed loop, the users need to inventory the VNF resources in AAI. This is done by running the heatbridge python script in /root/oom/kubernetes/robot in the Rancher VM in the Kubernetes cluster:
::
@@ -1182,7 +1539,25 @@ Note that "vlb_onap_private_ip_0" used in the heatbridge call is the actual para
PART 4 - Triggering Scale Out Manually
--------------------------------------
-For scale out with manual trigger, VID is not supported at this time. Users can run the use case by directly calling SO APIs:
+For scale out with manual trigger, VID is not supported at this time.
+
+Manual queries with POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Note that the POSTMAN collection linked at the top of this page, does provide some level of automatic scripting that will automatically get values between requests and provision the following queries
+
+You must enter in the postman config different variables:
+- "k8s" -> The k8s loadBalancer cluster node
+- "cds-service-model" -> The SDC service name distributed
+- "cds-instance-name" -> A name of your choice for the vnf instance (This must be changed each time you launch the instantiation)
+
+CDS#5 - SO ScaleOut -> This will initiate a Scaleout manually
+CDS#7 - SO ScaleIn -> This will initiate a ScaleIn manually
+
+Manual queries without POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Users can run the use case by directly calling SO APIs:
::
@@ -1398,7 +1773,7 @@ These IDs are also used in the URL request to SO:
::
- http://<Any_K8S_Node_IP_Address>:30277/onap/so/infra/serviceInstantiation/v7/serviceInstances/7d3ca782-c486-44b3-9fe5-39f322d8ee80/vnfs/9d33cf2d-d6aa-4b9e-a311-460a6be5a7de/vfModules/scaleOut
+ http://<Any_K8S_Node_IP_Address>:30277/onap/so/infra/serviceInstantiation/v7/serviceInstances/7d3ca782-c486-44b3-9fe5-39f322d8ee80/vnfs/9d33cf2d-d6aa-4b9e-a311-460a6be5a7de/vfModules/scaleOut
Finally, the "configurationParameters" section in the JSON request to SO contains the parameters that will be used to reconfigure the VNF after scaling. Please see Section 1-7 for an in-depth description of how to set the parameters correctly.
@@ -1428,7 +1803,7 @@ The procedure is similar to one described above, with some minor changes:
4) **Controller type selection** in SO works as described in Section 1-6.
-5) **VNF instantiation from VID**: users can use VID to create the service, the VNF, and instantiate the VF modules. In the VID main page, users should select GR API (this should be the default option).
+5) **VNF instantiation from VID**: users can use VID to create the service, the VNF, and instantiate the VF modules. In the VID main page, users should select GR API (this should be the default option).
.. figure:: files/scaleout/vid.png
:align: center
@@ -1736,6 +2111,7 @@ Module-1 Preload
Module-2 Preload
~~~~~~~~~~~~~~~~
+
::
@@ -2052,11 +2428,14 @@ To instantiate VF modules, please refer to this wiki page: https://wiki.onap.org
PART 6 - Known Issues and Resolutions
-------------------------------------
-1) When running closed loop-enabled scale out, the closed loop designed in CLAMP conflicts with the default closed loop defined for the old vLB/vDNS use case
-Resolution: Change TCA configuration for the old vLB/vDNS use case
+ 1) When running closed loop-enabled scale out, the closed loop designed in CLAMP conflicts with the default closed loop defined for the old vLB/vDNS use case
+
+Resolution: Change TCA configuration for the old vLB/vDNS use case
-- Connect to Consul: http://<ANY K8S VM IP ADDRESS>:30270 and click on "Key/Value" → "dcae-tca-analytics"
+- Connect to Consul: http://ANY_K8S_IP:30270 and click on "Key/Value" → "dcae-tca-analytics"
- Change "eventName" in the vLB default policy to something different, for example "vLB" instead of the default value "vLoadBalancer"
-- Change "subscriberConsumerGroup" in the TCA configuration to something different, for example "OpenDCAE-c13" instead of the default value "OpenDCAE-c12"
+- Change "subscriberConsumerGroup" in the TCA configuration to something different, for example "OpenDCAE-c13" instead of the default value "OpenDCAE-c12"
- Click "UPDATE" to upload the new TCA configuration
+
+2) During Guilin testing, it has been noticed that there is an issue between SO and APPC for Healthcheck queries, this does not prevent the use case to proceed but limit APPC capabilities
diff --git a/docs/docs_usecases.rst b/docs/docs_usecases.rst
index 7eff6f485..a8efb0d63 100644
--- a/docs/docs_usecases.rst
+++ b/docs/docs_usecases.rst
@@ -1,55 +1,11 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
- Copyright 2018 Huawei Technologies Co., Ltd. All rights reserved.
.. _docs_usecases:
-Verified Use Cases and Functional Requirements
-----------------------------------------------
+:orphan:
-Description
-~~~~~~~~~~~
-This session includes use cases and functional requirements which have been verified in Dublin release by the Integration team:
- 1. What has been implemented
- 2. Step by step instruction on how to deploy them, including the links to download the related assets and resources
- 3. Known issues and workaround
+.. toctree::
+ :maxdepth: 1
-The final testing status can be found at `Dublin Release Integration Testing Status Summary <https://wiki.onap.org/display/DW/Dublin+Release+Integration+Testing+Status>`_
-
-Use Cases
-~~~~~~~~~
-:ref:`vFirewall Use Case <docs_vfw>`
-
-:ref:`VF Module Scale Out Use Case (vLoadBalancer/vDNS example) <docs_scaleout>`
-
-:ref:`vCPE Use Case <docs_vcpe>`
-
-:ref:`CCVPN (Cross Domain and Cross Layer VPN) Use Case <docs_ccvpn>`
-
-:ref:`vFirewall/vDNS with HPA Use Case <docs_vfw_hpa>`
-
-:ref:`vFirewall Traffic Distribution Use Case <docs_vfw_traffic>`
-
-:ref:`BBS (Broadband Service) Use Case <docs_bbs>`
-
-:ref:`vIPsec with HPA Use Case <docs_vipsec_hpa>`
-
-:ref:`vFirewall/edgex with multicloud kubernetes plugin <docs_vfw_edgex_multicloud_k8s>`
-
-Functional Requirements
-~~~~~~~~~~~~~~~~~~~~~~~
-:ref:`5G - Real Time PM and High Volume Stream Data Collection <docs_realtime_pm>`
-
-:ref:`5G - Bulk PM <docs_5g_bulk_pm>`
-
-:ref:`5G - Configuration over NETCONF <docs_5G_Configuration_over_NETCONF>`
-
-:ref:`5G - OOF and PCI <docs_5G_oof_pci>`
-
-:ref:`PNF Support - PNF Plug and Play <docs_5g_pnf_pnp>`
-
-:ref:`PNF Support - PNF Software Upgrade <docs_5g_pnf_software_upgrade>`
-
-:ref:`Change Management Flexible Designer and Orchestrator <docs_CM_flexible_designer_orchestrator>`
-
-:ref:`Change Management Schedule Optimization <docs_CM_schedule_optimizer>`
+ usecases/deprecated_usecases.rst
diff --git a/docs/docs_usecases_release.rst b/docs/docs_usecases_release.rst
new file mode 100644
index 000000000..e8f3c401f
--- /dev/null
+++ b/docs/docs_usecases_release.rst
@@ -0,0 +1,34 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_usecases_release:
+
+:orphan:
+
+Kohn Use Cases and Requirements
+===============================
+
+Description
+-----------
+
+This session includes use cases and functional requirements which have been
+officially verified in Kohn release by the ONAP community.
+
+For each use case or functional requirement, you can find contact names and a
+link to the associated documentation.
+
+This documentation deals with
+
+ 1. What has been implemented
+ 2. Step by step instructions to deploy and execute the tests, including the
+ links to download the related assets and resources
+ 3. Known issues and workarounds
+
+.. toctree::
+ :maxdepth: 1
+
+ usecases/release_usecases.rst
+ usecases/release_automated_usecases.rst
+ usecases/release_requirements.rst
+ usecases/release_non_functional_requirements.rst
+ usecases/deprecated_usecases.rst
diff --git a/docs/docs_vCPE with Tosca VNF.rst b/docs/docs_vCPE with Tosca VNF.rst
deleted file mode 100644
index 4a5b6fc69..000000000
--- a/docs/docs_vCPE with Tosca VNF.rst
+++ /dev/null
@@ -1,159 +0,0 @@
-.. _docs_vcpe_tosca:
-
-vCPE with Tosca VNF
-----------------------------
-
-VNF Packages and NS Packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-vCPE tosca file url: https://git.onap.org/demo/tree/tosca/vCPE
-
-5 VNFs are here for the ONAP vCPE use case. This VNFD is transformed manually from vCPE heat template.
-Please run "./generate_csar.sh" to create the CSAR package files for these 5 VNFS. CSAR package file is just a zip formatted file. If you want to use SRIOV SRIOV-NIC", please run "./generate_csar.sh sriov" to create the CSAR package files for SRIOV.
-All the generated VNF packges can be found in the following link:
-- VNF packages: https://wiki.onap.org/display/DW/vCPE+with+Tosca+VNF+Test+Guide
-- NS packages: https://wiki.onap.org/display/DW/vCPE+with+Tosca+VNF+Test+Guide
-
-Description
-~~~~~~~~~~~
-The vCPE with Tosca VNF shows how to use ONAP to deploy tosca based vCPE. ONAP Casablanca release supports deployment,termination and manual heal Tosca based vCPE. User can trigger the above operation via UUI. and User can first chose Network serivce type and conrresponding service template in UUI and then UUI will directly invoke VF-C Northbound interfaces to do the life cycle management. In Casablanca release, we bypass SO, in the following release, we can add SO to the workflow. The main projects involved in this use case include: SDC, A&AI, UUI,VF-C, Multicloud,MSB, Policy,OOF.
-The use case is composed of five virtual functions (VFs): Infrastructure including vDNS, vDHCP, vAAA(Authorization, Authentication, Accounting) and vWEB, vBNG(Virtual Broadband Network Gateway), vGMUX(Virtual Gateway Multiplexer), vBRGEMU(Bridged Residential Gateway) and vGW(Virtual Gateway). Infrastructure VF run in one VM. the other VFs run in separate four VMs. We will send much data from vBRGEMU to vGW. we need to accelarate it using SRIOV-NIC.
-The original vCPE Use Case Wiki Page can be found here: https://wiki.onap.org/pages/viewpage.action?pageId=3246168
-
-How to Use
-~~~~~~~~~~
-
-
-Configuration:
-~~~~~~~~~~~~~~
-1) VIM Configuration
-If you want to use SRIOV-NIC, you need first config SRIOV NIC to refer to [1].
-[1] https://docs.openstack.org/ocata/networking-guide/config-sriov.html
-
-ONAP managing 1 cloud-region which have three flavors.
-Flavor 11:
-2 vcpus, 1 Gbytes of memory, 20Gb disk
-Numa page size: 2Mbytes and number pages 512
-::
-
- openstack flavor create onap.hpa.flavor11 -id auto --ram 1024 --disk 20 --vcpus 2
-
-Flavor 12:
-2 vcpus, 2 Gbytes of memory, 20Gb disk
-Numa page size: 2Mbytes and number pages 1024
-::
-
- openstack flavor create onap.hpa.flavor12 -id auto --ram 2048 --disk 20 --vcpus 2
-
-Flavor 13:
-2 vcpus, 4 Gbytes of memory, 20Gb disk
-Huge page size: 2Mbytes and number pages 2048
-1 SRIOV-NIC VF
-::
-
- openstack flavor create onap.hpa.flavor13 -id auto --ram 4096 --disk 20 -vcpus 2
- openstack flavor set onap.hpa.flavor11 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-1234-5678-physnet1:1
- openstack aggregate create --property sriov_nic=sriov-nic-intel-1234-5678-physnet1:1 hpa_aggr11
-
-comments: you must change 1234 and 5678 to real vendor id and product id. you also need change physnet1 to the provider network.
-
-2)Policy Configuration
-After the patch https://gerrit.onap.org/r/#/c/73502/ is merged. With the generated policy and do some manually update as follows, the service could be distributed successfully and the Policy/VFC/OOF could work as excepted.
-
-- Need manually modify policy item because the “vendor id” and “PCI device id” and “architecture” must be changed in different VIMs since we have different PCI devices in different VIMs
-- The value of mandatory in CSAR is “true”, OOF is case intensive, it needs to use “True”. Have to update it. suggest OOF to use ignoreCase in R4.
-- The attribute key in CSAR is pciNumDevices, but the responding one in OOF/Mutlicloud is pciCount. Suggest keeping alignment in R4.
-- The policy scope has to add a value “us” into it which is a configuration issue in OOF side. Policy side also need do improvement to deal with policy scope automatically append instead of replacement so such policy could be used by several services at the same time.
-
-Design Time:
-~~~~~~~~~~~
-1) Because SDC doesn't export ETSI aigned VNF package and NS package, so in this release, we put the real ETSI aligned package as package artifact.
-2) When design Network service in SDC, should assign "gvnfmdriver" as the value of nf_type in Properties Assignment. so that VF-C can know will use gvnfm to manage VNF life cycle.
-
-Run Time:
-~~~~~~~~
-1) First onboard VNF/NS package from SDC to VF-C catalog in sequence.
-2) Trigger the NS operation via UUI
-
-More details can be fonud here: https://wiki.onap.org/display/DW/vCPE+with+Tosca+VNF+Test+Guide
-
-Test Status and Plans
-~~~~~~~~~~~~~~~~~~~~~
-The test plan 3 in https://wiki.onap.org/pages/viewpage.action?pageId=41421112.
-Test Plan 3: VF-C HPA testing
-This test plan covers the tests related to testing
-Support for the vCPE use case in VF-C
-Use vCPE (Infra, vGW, vBNG, vBRGEMU and vGMUX)
-
-Infra part of policy asking for:
-::
-
- 2 vcpus
- >= 2Gbytes of memory
- > 40Gbytes of disk
-
-vGW part of policy asking for:
-::
-
- 2 vcpus
- >=4Gbytes of memory
- >= 40Gbytes of disk
- Numa page size: 2Mbytes and pages 1024
- with one SRIOV-NIC
-
-vBNG part of policy asking for:
-::
-
- 2 vcpus
- >= 2Gbytes of memory
- > 40Gbytes of disk
- Numa page size: 2Mbytes and pages 1024
- with one SRIOV-NIC
-
-vBGREMU part of policy asking for:
-::
-
- 2 vcpus
- >= 2Gbytes of memory
- >= 40Gbytes of disk
- Numa page size: 2Mbytes and pages 1024
- with one SRIOV-NIC
-
-vGMUX part of policy asking for:
-::
-
- 2 vcpus
- >= 2Gbytes of memory
- > 40Gbytes of disk
- Numa page size: 2Mbytes and pages 1024
- with one SRIOV-NIC
-
-Instantiate the VNF
-Check for results:
-It would have selected flavor13 for vGW, vBNG, vBRGEMU and vGMUX VMs. It would have selected flavor13 and flavor12 for Infrastructure.
-
-This case completed all tests as found here: https://wiki.onap.org/display/DW/vCPE+with+TOSCA+VNF+Integration+Test++-+Test+status
-
-Known Issues and Resolutions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-- Some SDC NS data model is not aligned to VFC NS data model, VFC NS also according to ETSI SOL0001. we also can refer to https://jira.onap.org/browse/SDC-1897. we have a workaround for this issue, we put the service as artifact file and distribute to VFC.
-- NFV Tosca parser bug https://jira.opnfv.org/browse/PARSER-187. we also filed a bug in VFC https://jira.onap.org/browse/VFC-1196.
-- 'artifacts' definition is missing in the exported csar's VDU node, we also can refer to https://jira.onap.org/browse/SDC-1900. It’s a very hacky workaround in VFC’s GVFNM. Because currently the only use case will use GVFNM is vCPE, which only uses the ubuntu16.04 image, so GVFNM just makes the ubuntu16.04 image as the default if the "sw_image" artifact is missing in the SDC’s exported CSAR.
-- OOF patch https://gerrit.onap.org/r/#/c/73332/ is not accepted by 1.2.4 image. 1.2.5 is available in nexus3 repo. But not available in Casablanca release. If you want to use it, you can use 1.2.5-SNAPSHOT-latest. If you use 1.2.4 image, you also need to modify code according to the patch.
-- vnflcm notification error patch https://gerrit.onap.org/r/#/c/73852/
-- grant error patch not merged into VF-C 1.2.2 image: https://gerrit.onap.org/r/#/c/73833/ and https://gerrit.onap.org/r/#/c/73770/
-- VF-C catalog config should be updated with the right SDC URL and user/pwd
-Resolution: Disable VFC catalog livenessprobe and update configuration
-
-a) edit dev-vfc-catalog deployment
-b) remove livenessprobe section
-c) enter into catalog pod and update configuration
-::
-
-kubectl -n onap exec -it dev-vfc-catalog-6978b76c86-87722 /bin/bash
-config file location: service/vfc/nfvo/catalog/catalog/pub/config/config.py
-Update the SDC configuration as follows:
-SDC_BASE_URL = "http://msb-iag:80/api"
-SDC_USER = "aai"
-SDC_PASSWD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
-
diff --git a/docs/docs_vCPE.rst b/docs/docs_vCPE.rst
index cff5f3f27..6a8e2c536 100644
--- a/docs/docs_vCPE.rst
+++ b/docs/docs_vCPE.rst
@@ -4,12 +4,14 @@
.. _docs_vcpe:
+:orphan:
+
vCPE Use Case
-----------------------------
+-------------
Description
~~~~~~~~~~~
-vCPE use case is based on Network Enhanced Residential Gateway architecture specified in Technical Report 317 (TR-317), which defines how service providers deploy residential broadband services like High Speed Internet Access. The use case implementation has infrastructure services and customer service. The common infrastructure services are deployed first and shared by all customers. The use case demonstrates ONAP capabilities to design, deploy, configure and control sophisticated services.
+vCPE use case is based on Network Enhanced Residential Gateway architecture specified in Technical Report 317 (TR-317), which defines how service providers deploy residential broadband services like High Speed Internet Access. The use case implementation has infrastructure services and customer service. The common infrastructure services are deployed first and shared by all customers. The use case demonstrates ONAP capabilities to design, deploy, configure and control sophisticated services.
More details on the vCPE Use Case can be found on wiki page https://wiki.onap.org/pages/viewpage.action?pageId=3246168
@@ -25,135 +27,88 @@ Here are the main steps to run the use case in Integration lab environment, wher
1. Run Robot script from Rancher node to onboard VNFs, create and distribute models for vCPE four infrastructure services, i.e. infrastructure, brg, bng and gmux
-::
+::
demo-k8s.sh onap init
-
-2. Add customer SDN-ETHERNET-INTERNET (see the use case tutorial wiki page for detail)
-
-3. Add identity-url to RegionOne data in A&AI. First use POSTMAN to GET cloud-region RegionOne data, then add identity-url and PUT back to A&AI
-::
-
- GET https://{{aai}}:{{port}}/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne
+2. Add route on sdnc cluster VM node, which is the cluster VM node where pod sdnc-sdnc-0 is running on. This will allow ONAP SDNC to configure BRG later on.
::
- PUT https://{{aai}}:{{port}}/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne
- {
- "cloud-owner": "CloudOwner",
- "cloud-region-id": "RegionOne",
- "cloud-type": "SharedNode",
- "owner-defined-type": "OwnerType",
- "cloud-region-version": "v1",
- "identity-url": "http://10.12.25.2:5000/v2.0",
- "cloud-zone": "CloudZone",
- "resource-version": "1559336510793",
- "relationship-list": {
- ... ...
-
-4. Add route on sdnc cluster VM node, which is the cluster VM node where pod sdnc-sdnc-0 is running on. This will allow ONAP SDNC to configure BRG later on.
-
-::
-
ip route add 10.3.0.0/24 via 10.0.101.10 dev ens3
-5. Install Python and other Python libraries
+3. Install Python and other Python libraries
::
-
+
integration/test/vcpe/bin/setup.sh
-6. Change the Openstack env parameters and one customer service related parameter in vcpecommon.py
+4. Setup vcpe scripts by adjusting relevant parts of provided vcpeconfig.yaml config file. Most importantly adjust the Openstack env parameters shown below. Please issue 'vcpe.py --help' for detailed usage info.
-::
+::
- cloud = {
- '--os-auth-url': 'http://10.12.25.2:5000',
- '--os-username': 'xxxxxxxxxx',
- '--os-user-domain-id': 'default',
- '--os-project-domain-id': 'default',
- '--os-tenant-id': 'xxxxxxxxxxxxxxxx' if oom_mode else '1e097c6713e74fd7ac8e4295e605ee1e',
- '--os-region-name': 'RegionOne',
- '--os-password': 'xxxxxxxxxxx',
- '--os-project-domain-name': 'xxxxxxxxx' if oom_mode else 'Integration-SB-07',
- '--os-identity-api-version': '3'
- }
+ cloud_name: 'xxxxxxxx'
- common_preload_config = {
- 'oam_onap_net': 'xxxxxxxx' if oom_mode else 'oam_onap_lAky',
- 'oam_onap_subnet': 'xxxxxxxxxx' if oom_mode else 'oam_onap_lAky',
- 'public_net': 'xxxxxxxxx',
+ common_preload_config:
+ 'oam_onap_net': 'xxxxxxxx'
+ 'oam_onap_subnet': 'xxxxxxxxxx'
+ 'public_net': 'xxxxxxxxx'
'public_net_id': 'xxxxxxxxxxxxx'
- }
-::
+"cloud_name" should be set to Openstack cloud name from clouds.yaml. By default this file is at ~/.config/openstack directory; if it's located in scripts directory it will have precedence over the beforementoined one. Example clouds.yaml.example file is provided.
- # CHANGEME: vgw_VfModuleModelInvariantUuid is in rescust service csar, open service template with filename like service-VcpesvcRescust1118-template.yml and look for vfModuleModelInvariantUUID under groups vgw module metadata.
- self.vgw_VfModuleModelInvariantUuid = 'xxxxxxxxxxxxxxx'
-
-7. Initialize vcpe
+5. Run Robot to create and distribute for vCPE customer service. This step assumes step 1 has successfully distributed all vcpe models except customer service model
::
-
- vcpe.py init
-8. Run a command from Rancher node to insert vcpe customer service workflow entry in SO catalogdb. You should be able to see a sql command printed out from the above step output at the end, and use that sql command to replace the sample sql command below (inside the double quote) and run it from Rancher node:
+ ete-k8s.sh onap distributevCPEResCust
+
+6. If running with oom_mode=False initialize SDNC ip pool by running below command from k8s control node. It will be done automatically otherwise.
::
- kubectl exec dev-mariadb-galera-mariadb-galera-0 -- mysql -uroot -psecretpassword catalogdb -e "INSERT INTO service_recipe (ACTION, VERSION_STR, DESCRIPTION, ORCHESTRATION_URI, SERVICE_PARAM_XSD, RECIPE_TIMEOUT, SERVICE_TIMEOUT_INTERIM, CREATION_TIMESTAMP, SERVICE_MODEL_UUID) VALUES ('createInstance','1','vCPEResCust 2019-06-03 _04ba','/mso/async/services/CreateVcpeResCustService',NULL,181,NULL, NOW(),'6c4a469d-ca2c-4b02-8cf1-bd02e9c5a7ce')"
+ kubectl -n onap exec -it dev-sdnc-sdnc-0 -- /opt/sdnc/bin/addIpAddresses.sh VGW 10.5.0 22 250
-9. Run Robot to create and distribute for vCPE customer service. This step assumes step 1 has successfully distributed all vcpe models except customer service model
+7. Initialize vcpe
::
- ete-k8s.sh onap distributevCPEResCust
+ vcpe.py init
-10. Manually copy vCPE customer service csar (starting with service-Vcperescust) under Robot container /tmp/csar directory to Rancher vcpe/csar directory, now you should have these files:
+8. If running with oom_mode=False run a command printed at the end of the above step from k8s control node to insert vcpe customer service workflow entry in SO catalogdb. It will be done automatically otherwise.
-::
- root@sb00-nfs:~/integration/test/vcpe/csar# ls -l
- total 528
- -rw-r--r-- 1 root root 126545 Jun 26 11:28 service-Demovcpeinfra-csar.csar
- -rw-r--r-- 1 root root 82053 Jun 26 11:28 service-Demovcpevbng-csar.csar
- -rw-r--r-- 1 root root 74179 Jun 26 11:28 service-Demovcpevbrgemu-csar.csar
- -rw-r--r-- 1 root root 79626 Jun 26 11:28 service-Demovcpevgmux-csar.csar
- -rw-r--r-- 1 root root 78156 Jun 26 11:28 service-Demovcpevgw-csar.csar
- -rw-r--r-- 1 root root 83892 Jun 26 11:28 service-Vcperescust20190625D996-csar.csar
-
-11. Instantiate vCPE infra services
+9. Instantiate vCPE infra services
::
vcpe.py infra
-12. From Rancher node run vcpe healthcheck command to check connectivity from sdnc to brg and gmux, and vpp configuration of brg and gmux. Write down BRG MAC address printed out at the last line
+10. From Rancher node run vcpe healthcheck command to check connectivity from sdnc to brg and gmux, and vpp configuration of brg and gmux.
::
healthcheck-k8s.py --namespace <namespace name> --environment <env name>
-13. Instantiate vCPE customer service. Input the BRG MAC when prompt
+11. Instantiate vCPE customer service.
::
vcpe.py customer
-14. Update libevel.so in vGMUX VM and restart the VM. This allows vGMUX to send events to VES collector in close loop test. See tutorial wiki for details
+12. Update libevel.so in vGMUX VM and restart the VM. This allows vGMUX to send events to VES collector in close loop test. See tutorial wiki for details
-15. Run heatbridge. The heatbridge command usage: demo-k8s.sh <namespace> heatbridge <stack_name> <service_instance_id> <service> <oam-ip-address>, please refer to vCPE tutorial page on how to fill in those paraemters. See an example as following:
+13. Run heatbridge. The heatbridge command usage: demo-k8s.sh <namespace> heatbridge <stack_name> <service_instance_id> <service> <oam-ip-address>, please refer to vCPE tutorial page on how to fill in those paraemters. See an example as following:
::
~/integration/test/vcpe# ~/oom/kubernetes/robot/demo-k8s.sh onap heatbridge vcpe_vfmodule_e2744f48729e4072b20b_201811262136 d8914ef3-3fdb-4401-adfe-823ee75dc604 vCPEvGMUX 10.0.101.21
-16. Start closed loop test by triggering packet drop VES event, and monitor if vGMUX is restarting. You may need to run the command twice if the first run fails
+14. Start closed loop test by triggering packet drop VES event, and monitor if vGMUX is restarting. You may need to run the command twice if the first run fails
-::
+::
vcpe.py loop
diff --git a/docs/docs_vCPE_tosca_local.rst b/docs/docs_vCPE_tosca_local.rst
new file mode 100644
index 000000000..8b903adb7
--- /dev/null
+++ b/docs/docs_vCPE_tosca_local.rst
@@ -0,0 +1,210 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+ Copyright 2020 CMCC Technologies Co., Ltd. All rights reserved.
+
+.. _docs_vcpe_tosca_local:
+
+:orphan:
+
+vCPE Tosca Local Mode Use Case
+------------------------------
+
+Description
+~~~~~~~~~~~
+vCPE tosca use case is based on Network Enhanced Residential Gateway architecture specified in Technical Report 317 (TR-317), which defines how service providers deploy residential broadband services like High Speed Internet Access. The use case implementation has infrastructure services and customer service. The common infrastructure services are deployed first and shared by all customers. The use case demonstrates ONAP capabilities to design, deploy, configure and control sophisticated services.
+
+More details on the vCPE Use Case can be found on wiki page https://wiki.onap.org/pages/viewpage.action?pageId=3246168
+
+Local is the way how to distribute the network elements. Here we use local means we want upload the csar file to distribute the vnf and ns configurations.
+
+Source Code
+~~~~~~~~~~~
+vcpe tosca local test scripts: https://git.onap.org/integration/tree/test/vcpe_tosca/local/vcpe_tosca_test.py
+
+How to Use
+~~~~~~~~~~
+The use case has been automated by vcpe_tosca_test scripts. The followings are the main steps to run the use case in Integration lab environment:
+
+1) Install ONAP CLI environment, open_cli_product is onap-dublin.
+ Use https://git.onap.org/integration/tree/test/vcpe_tosca/local/scripts/install-alpine.sh to install ONAP CLI.
+
+2) Prepare openstack test environment.
+
+ * Create project(tenant) and user on openstack
+
+ Openstack Horizon--Identity--Projects page
+
+ .. image:: files/vcpe_tosca/create_project.png
+
+ Openstack Horizon--Identity--Users page
+
+ .. image:: files/vcpe_tosca/create_user.png
+
+ Manage Project Members
+
+ .. image:: files/vcpe_tosca/manage_project_user.png
+
+ * Create and upload image for VNF
+
+ Identify the version of the lab server, my lab server is Ubuntu 16.04.3 LTS.
+
+ ::
+
+ root@onap-dengyuanhong-master:~# cat /etc/lsb-release
+ DISTRIB_ID=Ubuntu
+ DISTRIB_RELEASE=16.04
+ DISTRIB_CODENAME=xenial
+ DISTRIB_DESCRIPTION="Ubuntu 16.04.3 LTS"
+
+
+ Download the related ubuntu image from https://cloud-images.ubuntu.com/
+
+ .. image:: files/vcpe_tosca/image.png
+
+ Openstack Horizon--Project--Compute--Images page, create an image named image, the name must be the same with image which is defined in vnf csar file.
+
+ .. image:: files/vcpe_tosca/create_image.png
+
+3) Update the configuration file vcpe_config.json under https://git.onap.org/integration/tree/test/vcpe_tosca/local/config
+
+ You should update the values if you want to run in your environment.
+
+ Firstly, identify the Region name you used on your openstack environment, our Region name is RegionOne, it will be used by the configuration file.
+
+ ::
+
+ [wrsroot@controller-0 ~(keystone_admin)]$ openstack region list
+ +-----------+---------------+-------------+
+ | Region | Parent Region | Description |
+ +-----------+---------------+-------------+
+ | RegionOne | None | |
+ +-----------+---------------+-------------+
+
+
+ Secondly, update the values according to your environment.
+
+ ::
+
+ "open_cli_product": set to CLI product you installed, onap-dublin is OK for this test.
+ "open_cli_home": set to the oclip home path,
+ "aai_url": set to msb ip and port you used,
+ "msb_url": set to msb ip and port you used,
+ "multicloud_url": set to msb ip and port you used,
+
+ "complex_name": set to any complex name you want to use, the name must be unique
+
+ "cloud_region_data": {
+ "RegionOne":(update to your Region name) {
+ "cloud-region-version": the cloud region version of your Cloud region, you can keep "titanium_cloud"
+ "esr-system-info-id": "1111ce1f-aa78-4ebf-8d6f-4b62773e9b01",
+ "service-url": the ip change to your openstack ip address,
+ "user-name": the user name you created on openstack,
+ "password": the user password you created on openstack,
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": the project name you created on openstack,
+ "tenant-id": the project id you created on openstack,
+ "cloud-type": "openstack",
+ "identity-url": the ip change to your openstack ip address,
+ "system-status": "active"
+ }
+ }
+ "cloud-owner": set to any cloud name you want to use, , the name must be unique
+ "service_name": set to any service name you want to use, the name must be unique
+ "customer_name": set to any customer name you want to use, the name must be unique
+ "subscriber_name": set to any subscriber name you want to use, the name must be unique
+
+ "vfc-url": set to msb ip and port you used,
+ "vnfs": {
+ "vgw": {
+ "path": "/csar/vgw.csar", set to you vgw csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "infra": {
+ "path": "/csar/infra.csar", set to you infra csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbng": {
+ "path": "/csar/vbng.csar", set to you vbng csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbrgemu": {
+ "path": "/csar/vbrgemu.csar", set to you vbrgemu csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vgmux": {
+ "path": "/csar/vgmux.csar", set to you vgmux csar file path
+ "key": "key2",
+ "value": "value2"
+ }
+ },
+ "ns": {
+ "key": "key1",
+ "value": "value1",
+ "path": "ns_vgw.csar", set to you ns csar file path
+ "name": "vcpe11"
+ },
+ "location": "VCPE22_RegionOne", set to CloudOwner_CloudRegion
+ "vnfm_params": {
+ "GVNFMDRIVER": {
+ "type": "gvnfmdriver",
+ "vendor": "vfc",
+ "version": "v1.0",
+ "url": set to msb ip and port you used,
+ "vim-id": "VCPE22_RegionOne", set to CloudOwner_CloudRegion
+ "user-name": "admin",
+ "user-password": "admin",
+ "vnfm-version": "v1.0"
+ }
+ }
+
+
+4) The vnf csar file include Infra, vGW, vBNG, vBRGEMU and vGMUX, and the ns csar file is ns. https://git.onap.org/integration/tree/test/vcpe_tosca/local/csar
+
+
+5) The key test script is vcpe_tosca_test.py which is under https://git.onap.org/integration/tree/test/vcpe_tosca/local
+
+ Run command is
+
+ ::
+
+ python3 -m unittest vcpe_tosca_test.py
+
+ Before run the command, you should install requests: pip install requests, and update the path of configuration file vcpe_config.json.
+
+5) Release of our environment
+
+ ::
+
+ vfc-nslcm: 1.3.8
+ vfc-vnflcm: 1.3.8
+ vfc-gvnfm: 1.3.8
+ modeling-etsicatalog: 1.0.5
+ multicloud-framework: 1.5.1
+ multicloud-windriver: 1.5.5
+ cli: onap-dublin
+
+
+Note
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+1) You must authorize admin to vcpe_case when managing project members in openstack.
+
+2) You should create an image named image before running the test script, the name must be the same with image which is defined in vnf csar file.
+
+3) You should install ONAP CLI before running the script.
+
+
+Known Issues and Workaround
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+1) There is time out issue when terminating vnf, the solution is refer to
+
+ https://gerrit.onap.org/r/c/vfc/nfvo/driver/vnfm/gvnfm/+/105192
+
+2) The process of terminating job is chaotic, the solution is refer to
+
+ https://gerrit.onap.org/r/c/vfc/nfvo/lcm/+/105449
diff --git a/docs/docs_vCPE_with_Tosca_VNF.rst b/docs/docs_vCPE_with_Tosca_VNF.rst
new file mode 100644
index 000000000..3343bdf9f
--- /dev/null
+++ b/docs/docs_vCPE_with_Tosca_VNF.rst
@@ -0,0 +1,190 @@
+.. _docs_vcpe_tosca:
+
+:orphan:
+
+vCPE with Tosca VNF
+-------------------
+
+VNF Packages and NS Packages
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+vCPE tosca file url: https://git.onap.org/demo/tree/tosca/vCPE_F
+
+5 VNFs are here for the ONAP vCPE use case. The vnf csar file includes Infra, vGW, vBNG, vBRGEMU and vGMUX, and the ns csar file is ns.
+
+Description
+~~~~~~~~~~~
+The vCPE with Tosca VNF shows how to use ONAP to deploy tosca based vCPE. ONAP Casablanca release supports deployment,termination and manual heal Tosca based vCPE. User can trigger the above operation via UUI. and User can first chose Network serivce type and conrresponding service template in UUI and then UUI will directly invoke VF-C Northbound interfaces to do the life cycle management. In Casablanca release, we bypass SO, in the following release, we can add SO to the workflow. The main projects involved in this use case include: SDC, A&AI, UUI,VF-C, Multicloud,MSB, Policy,OOF.
+The use case is composed of five virtual functions (VFs): Infrastructure including vDNS, vDHCP, vAAA(Authorization, Authentication, Accounting) and vWEB, vBNG(Virtual Broadband Network Gateway), vGMUX(Virtual Gateway Multiplexer), vBRGEMU(Bridged Residential Gateway) and vGW(Virtual Gateway). Infrastructure VF run in one VM. the other VFs run in separate four VMs. We will send much data from vBRGEMU to vGW. we need to accelarate it using SRIOV-NIC.
+The original vCPE Use Case Wiki Page can be found here: https://wiki.onap.org/pages/viewpage.action?pageId=3246168
+
+How to Use
+~~~~~~~~~~
+
+
+Configuration
+~~~~~~~~~~~~~
+1) VIM Configuration
+
+ Prepare openstack test environment.
+
+ * Create project(tenant) and user on openstack
+
+ Openstack Horizon--Identity--Projects page
+
+ .. image:: files/vcpe_tosca/create_project.png
+
+ Openstack Horizon--Identity--Users page
+
+ .. image:: files/vcpe_tosca/create_user.png
+
+ Manage Project Members
+
+ .. image:: files/vcpe_tosca/manage_project_user.png
+
+ * Create and upload image for VNF
+
+ * Register VIM using CLI command or ESR GUI
+
+ .. image:: files/vcpe_tosca/vim.png
+
+2) VNFM Configuration
+
+ Register vnfm using CLI command or ESR GUI.
+
+ .. image:: files/vcpe_tosca/vnfm.png
+
+Design Time
+~~~~~~~~~~~
+1) We put the real ETSI aligned package as package artifact.
+2) When design Network service in SDC, should assign "gvnfmdriver" as the value of nf_type in Properties Assignment. so that VF-C can know will use gvnfm to manage VNF life cycle.
+
+ .. image:: files/vcpe_tosca/sdc.png
+
+Run Time
+~~~~~~~~
+1) First onboard VNF/NS package from SDC to modeling etsicatalog in sequence.
+2) Trigger the NS operation via UUI guide
+
+a) VNF/NS csar package on-boarded guide
+
+ Note:
+
+ * VNF/NS csar package can be distributed from SDC.
+ * VNF csar package should be distributed first, then NS csar package can be distributed.
+ * The csar package list page shows both the on-boarded/distributed csar package and the package from SDC.
+ * When the package from SDC is on-boarded, it will be dropped from the list, and the on-boarded package will be displayed in the list.
+
+ The following shows the guide of on-boarded a NS csar package via UUI:
+
+ Step 1. Select the SDC NS csar package vcpe_test_001 in csar package list package, and click the onboard button, the SDC NS csar package will be on-boarded to Modeling:
+
+ .. image:: files/vcpe_tosca/ns_package_list.png
+
+ Step 2. When the onboard is completed, the SDC csar vcpe_test_001 is dropped from the list, and the on-boarded csar info(vcpe) will be displayed in the csar file list:
+
+ .. image:: files/vcpe_tosca/ns_package_onboard.png
+
+ You can also onboard a VNF csar package by click the VNF tab in the csar package list page, then follow the upper two steps. You should onboard vnfs before ns.
+
+b) NS Instantiate guide
+
+ Note:
+
+ * When an NS package is on-boarded or distributed, you can start NS Instantiating.
+
+ The following steps show the guide of Instantiating NS:
+
+ Step 1. Open the service list page, first select Customer and Service Type, then click Create button.
+
+ .. image:: files/vcpe_tosca/customer_service.png
+
+ Step 2. First select the Service with Network Service, then select the TEMPLATE, then click OK button:
+
+ .. image:: files/vcpe_tosca/ns_create.png
+
+ Step 3. First input the NS Name and Description, then select the vf_location of each vnf, then click Create button:
+
+ .. image:: files/vcpe_tosca/ns_create_input.png
+
+ Step 4. A new record will be added to the list package, the Status column will show the Instantiating progress.
+
+ .. image:: files/vcpe_tosca/ns_instance.png
+
+ Step 5. When NS Instantiating is completed, the Status will updated to completed, and you can refresh the package, the Status will be updated to Active.
+
+ .. image:: files/vcpe_tosca/ns_active.png
+
+c) NS heal guide
+
+ Note:
+
+ * VF-C R3 healing only suport restart a vm of an VNF.
+
+ The following shows the guide of healing an VNF of an Instantiated NS:
+
+ Step 1. Click + button of an an Instantiated NS, the VNF list of the NS will be displayed:
+
+ .. image:: files/vcpe_tosca/ns_vnf_list.png
+
+ Step 2. Click the heal button of a VNF, select the vm of the VNF, and click OK button:
+
+ .. image:: files/vcpe_tosca/ns_vnf_heal.png
+
+ Step 3. When VNF healing is started, the Status of VNF will shows the progress of healing.
+
+ .. image:: files/vcpe_tosca/ns_vnf_healing.png
+
+ Step 4. When VNF healing is completed, the Status will be updated to completed, you can refresh the page, the Status will be updated to Active again.
+
+ .. image:: files/vcpe_tosca/ns_vnf_healed.png
+
+d) NS delete guide
+
+ The following shows the guide of deleting an VNF of an Instantiated NS:
+
+ Step 1. Select an Instantiated NS record in the list page, then click the delete button:
+
+ .. image:: files/vcpe_tosca/ns_active.png
+
+ Step 2. Select the termination Type and the graceful Termination Timeout, then click OK button:
+
+ .. image:: files/vcpe_tosca/ns_delete.png
+
+ Step 3. When the deleting is started, the Status will be updated to the progress of deleting.
+
+ .. image:: files/vcpe_tosca/ns_deleting.png
+
+ when deleting is completed, the Status will be update to completed, and soon it will be drop from the list.
+
+ .. image:: files/vcpe_tosca/ns_deleted.png
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+This case completed all tests as found here: https://wiki.onap.org/display/DW/vCPE%28tosca%29+-++Integration+test+cases
+
+Known Issues and Resolutions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+1) There is time out issue when terminating vnf, the solution is refer to
+
+ https://gerrit.onap.org/r/c/vfc/nfvo/driver/vnfm/gvnfm/+/105192
+
+2) The process of terminating job is chaotic, the solution is refer to
+
+ https://gerrit.onap.org/r/c/vfc/nfvo/lcm/+/105449
+
+3) Failed to fetch NS package from SDC when having VL resource, the solution is refer to
+
+ https://gerrit.onap.org/r/c/modeling/etsicatalog/+/106074
+
+4) The model msg is error when deleting the vnf package via UUI, the solution is refer to
+
+ https://gerrit.onap.org/r/c/usecase-ui/+/106729
+
+5) Wrong number of services displayed for services-list via UUI, the solution is refer to
+
+ https://gerrit.onap.org/r/c/usecase-ui/+/106719
+
+6) The picture cannot be displayed of ns create model page via UUI, the solution is refer to
+
+ https://gerrit.onap.org/r/c/usecase-ui/+/106715
diff --git a/docs/docs_vFWDT.rst b/docs/docs_vFWDT.rst
index 4eeba7b84..0c13886d2 100755..100644
--- a/docs/docs_vFWDT.rst
+++ b/docs/docs_vFWDT.rst
@@ -1,94 +1,179 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
+
.. _docs_vfw_traffic:
-.. contents::
- :depth: 3
-..
+:orphan:
+
+vFW In-Place Software Upgrade with Traffic Distribution Use Case
+----------------------------------------------------------------
-vFW Traffic Distribution Use Case
----------------------------------
Description
~~~~~~~~~~~
-The purpose of this work is to show Traffic Distribiution functionality implemented in Casablanca and Dublin releases for vFW Use Case.
-The orchstration workflow triggers a change to traffic distribution (redistribution) done by a traffic balancing/distribution entity (aka anchor point).
-The DistributeTraffic action targets the traffic balancing/distribution entity, in some cases DNS, other cases a load balancer external to the VNF instance, as examples.
-Traffic distribution (weight) changes intended to take a VNF instance out of service are completed only when all in-flight traffic/transactions have been completed.
-DistributeTrafficCheck command may be used to verify initial conditions of redistribution or can be used to verify the state of VNFs and redistribution itself.
-To complete the traffic redistribution process, gracefully taking a VNF instance out-of-service/into-service, without dropping in-flight calls or sessions,
-QuiesceTraffic/ResumeTraffic command may need to follow traffic distribution changes. The VNF application remains in an active state.
+The purpose of this work is to show In-Place Software Upgrade Traffic Distribution functionality implemented in Frankfurt release for vFW Use Case.
+The use case is an evolution of vFW Traffic Distribution Use Case which was developed for Casablanca and Dublin releases.
+The orchestration workflow triggers a change of the software on selected instance of the firewall. The change is proceeded with minimization of disruption of the
+service since the firewall being upgraded must have all the traffic migrated out before the upgrade can be started. The traffic migration (redistribution) is done by
+a traffic balancing/distribution entity (aka anchor point). The DistributeTraffic action targets the traffic balancing/distribution entity, in some cases DNS, other cases a load balancer external to the VNF instance, as examples.
+Traffic distribution (weight) changes intended to take a VNF instance out of service are completed only when all in-flight traffic/transactions have been completed.
+DistributeTrafficCheck command may be used to verify initial conditions of redistribution or can be used to verify the state of VNFs and redistribution itself.
+To complete the traffic redistribution process, gracefully taking a VNF instance out-of-service/into-service, without dropping in-flight calls or sessions,
+QuiesceTraffic/ResumeTraffic command may need to follow traffic distribution changes. The upgrade operation consist of the UpgradePreCheck operation which can used to verify
+initial conditions for the operation like difference of the software version to the one requested, SoftwareUpgrade operation is responsible for modification of the software on
+selected vFW instance and UpgradePostCheck LCM actions is used to verify if the software was properly installed on vFW. After the completion of the software upgrade the traffic is migrated to the
+instance of the vFW which was before being upgraded. The workflow can be configured also in such a way to perform only singular migration of the traffic without upgrade of the software
+what allows to experiment with the version of the workflow implemented in the previous releases. All the LCM operations are executed by APPC controller and they are implemented with Ansible protocol. In order to avoid the inconsistency in the VNFs state the Lock/Unlocks
+mechanisms is used to prevent parallel execution of LCM actions on VNFs that are under maintenance because of the workflow that is currently executed on them.
+The VNF application remains in an active state.
+Traffic Distribution and In-Place Software Upgrade functionality is an outcome of Change Management project. Further details can be found on the following pages
-Traffic Distribution functionality is an outcome of Change Management project. Further details can be found on following pages
+- Frankfurt: https://wiki.onap.org/display/DW/Change+Management+Frankfurt+Extensions (Traffic Distribution workflow enhancements)
-https://wiki.onap.org/display/DW/Change+Management+Extensions (DistributeTraffic LCM and Use Case)
+- Dublin: https://wiki.onap.org/display/DW/Change+Management+Extensions (DistributeTraffic LCM and Use Case)
-https://wiki.onap.org/display/DW/Change+Management+Dublin+Extensions (Distribute Traffic Workflow with Optimization Framework)
+- Casablanca https://wiki.onap.org/display/DW/Change+Management+Dublin+Extensions (Distribute Traffic Workflow with Optimization Framework)
-Test Scenario
-~~~~~~~~~~~~~
+Test Scenarios
+~~~~~~~~~~~~~~
.. figure:: files/dt-use-case.png
:scale: 40 %
:align: center
- Figure 1 The idea of Traffic Distribution Use Case
+ Figure 1 The overview of interaction of components in vFW In-Place Software Upgrade with Traffic Distribution Use Case
-The idea of the simplified scenario presented in the Casablanca release is shown on Figure 1. In a result of the DistributeTraffic LCM action traffic flow originated from vPKG to vFW 1 and vSINK 1 is redirected to vFW 2 and vSINK 2 (as it is seen on Figure 2).
-Result of the change can be observed also on the vSINKs' dashboards which show a current incoming traffic. Observation of the dashboard from vSINK 1 and vSINK 2 proves workflow works properly.
+The main idea of the use case and prepared workflow is to show the interaction of different components of ONAP, including AAI, Policy, OOF, APPC for realization of scenario of software upgrade
+of vFW instance with migration of the traffic in time of its upgrade. vFW instance was modified to have two instances of vFW with dedicated vSINKs. The general idea of interaction of ONAP components
+is shown on Figure 1. Software Upgrade is performed on selected vFW instance. vPKG and the other vFW taking action while migration of the traffic out of vFW being upgraded. In a result of the DistributeTraffic
+LCM action traffic flow originated from vPKG to vFW 1 and vSINK 1 is redirected to vFW 2 and vSINK 2 (as it is seen on Figure 2). Result of the change can be observed also on the vSINKs' dashboards which show
+a current incoming traffic. After migration software is upgraded on the vFW and afterwards the traffic can be migrated back to this vFW instance. Observation of the dashboard from vSINK 1 and vSINK 2 proves workflow works properly.
.. figure:: files/dt-result.png
:scale: 60 %
:align: center
- Figure 2 The result of traffic distribution
+ Figure 2 The result of traffic distribution in time of the upgrade
-The purpose of the work in the Dublin release was to built a Traffic Distribution Workflow that takes as an input configuration parameters delivered by Optimization Framework and on their basis several traffic distribution LCM actions are executed by APPC in the specific workflow.
+The traffic distribution sub-workflow takes as an input configuration parameters delivered by Optimization Framework and on their basis several traffic distribution LCM actions are executed by APPC in the specific workflow.
+Further LCM actions are executed in order to present the idea of vFW In-Place Software Upgrade with Traffic Distribution. In this use case also APPC locking mechanisms is demonstrated, changes in APPC for VNFC level Ansible
+actions support and changes for APPC Ansible automation also are used in the use case. The APPC Ansible automation scripts allows to configure LCM actions without the need to enter the CDT portal, however there is
+possibility to do it manually and documentation describes also how to do it. In the same sense, the upload of policy types and policy instances is automated but the documentation describes how to do it manually.
-.. figure:: files/dt-workflow.png
- :scale: 60 %
+The demonstration scripts can be used to execute two different scenarios:
+
+1. Simple distribution of traffic from selected vFW instance to the other one
+
+2. Upgrade of the software on selected vFW instance. Both are preceded with shared phase of identification of VF-modules for reconfiguration what is done with help of Optimization Framework.
+
+Workflows
+~~~~~~~~~
+
+Whole vFW In-Place Software Upgrade with Traffic Distribution use case can be decomposed into following workflows:
+
+1. High level workflow (simplified workflow on Figure 3 and more detailed on Figure 4)
+
+.. figure:: files/vfwdt-workflow-general.png
+ :scale: 100 %
+ :align: center
+
+ Figure 3 The In-Place Software Upgrade with Traffic Distribution general workflow
+
+* Identification of vFW instances (**I**) for migration of the traffic (source and destination) and identification of vPKG instance (anchor point) which would be responsible for reconfiguration of the traffic distribution. This operation id performed by Optimization Framework, HAS algorithm in particular
+
+* Before any operation is started workflow Locks (**II-IV**) with APPC all the VNFs involved in the procedure: vFW 1, vFW 2 and vPKG. In fact this is the vFW being upgraded, vFW which will be used to migrate traffic to and vPKG which performs the traffic distribution procedure. The VNFs needs to be locked in order to prevent the execution of other LCM actions in time of the whole workflow execution. Workflow checks state of the Lock on each VNF (**II**)(**1-6**), if the Locs are free (**III**)(**7**) the Locs are being acquired (**IV**)(**8-14**). If any Lock Check or Lock fails (**7, 14**), workflow is stopped.
+
+* Depending on the workflow type different (Traffic Distribution or In-Place Software Upgrade with Traffic Distribution) LCM action are executed by APPC (**V**). All with Ansible protocol and with VNF and VF-modules identified before by Optimization Framework or the input parameters like selected vFW VNF instance. Workflows are conditional and will not be performed if the preconditions were not satisfied. In case of failure of LCM operation any other actions are canceled.
+
+* At the end workflow Unlocks with APPC the previously Locked VNFs (**VI**)(**15-21**). This operations is performed always even when some steps before were not completed. The purpose is to not leave VNFs in locked state (in maintenance status) as this will prevent future execution of LCM actions or workflows on them. The locks are being automatically released after longer time.
+
+.. figure:: files/vfwdt-general-workflow-sd.png
+ :scale: 80 %
:align: center
- Figure 3 The Traffic Distribution Workflow
+ Figure 4 The In-Place Software Upgrade with Traffic Distribution detailed workflow
+
+2. Identification of VF-modules candidates for migration of traffic (detailed workflow is shown on Figure 5)
-The prepared Traffic Distribution Workflow has following steps:
+.. figure:: files/vfwdt-identification-workflow-sd.png
+ :scale: 80 %
+ :align: center
+
+ Figure 5 Identification of VF-Module candidates for migration of traffic
-- Workflow sends placement request to Optimization Framework (**1**) specific information about the vPKG and vFW-SINK models and VNF-ID of vFW that we want to migrate traffic out from.
- Optimization Framework role is to find the vFW-SINK VNF/VF-module instance where traffic should be migrated to and vPKG which will be associated with this vFW.
+- Workflow sends placement request to Optimization Framework (**1**) specific information about the vPKG and vFW-SINK models and VNF-ID of vFW that we want to upgrade.
+ Optimization Framework role is to find the vFW-SINK VNF/VF-module instance where traffic should be migrated to in time of the upgrade and vPKG which will be associated with this vFW.
Although in our case the calculation is very simple, the mechanism is ready to work for instances of services with VNF having houndreds of VF-modules spread accross different cloud regions.
- Optimization Framework takes from the Policy Framework policies (**2-3**) for VNFs and for relations between each other (in our case there is checked ACTIVE status of vFW-SINK and vPKG VF-modules and the Region to which they belong)
-- Optimization Framework, base on the information from the polcies and service topology information taken from A&AI (**4-11**), offers traffic distribution anchor and destination canidates' pairs (**12-13**) (pairs of VF-modules data with information about their V-Servers and their network interfaces). This information is returned to the workflow script (**14**).
+- Optimization Framework, base on the information from the policies and service topology information taken from A&AI (**4-11**), offers traffic distribution anchor and destination candidates' pairs (**12-13**) (pairs of VF-modules data with information about their V-Servers and their network interfaces). This information is returned to the workflow script (**14**).
+
+- Information from Optimization Framework can be used to construct APPC LCM requests for DistributeTrafficCheck, DistributeTraffic, UpgradePreCheck, SoftwareUpgrade and UpgradePostCheck commands. This information is used to fill CDT templates with proper data for further Ansible playbooks execution. Script generates also here CDT templates for LCM actions which can be uploaded automatically to APPC DB.
+
+3. The Traffic Distribution sub-workflow (simplified workflow on Figure 6 and more detailed on Figure 7)
+
+.. figure:: files/vfwdt-workflow-traffic.png
+ :scale: 100 %
+ :align: center
+
+ Figure 6 The Traffic Distribution general workflow
+
+- In the first DistributeTrafficCheck LCM request on vPGN VNF/VF-Module APPC, over Ansible, checks if already configured destination of vPKG packages is different than already configured one (**I-III**)(**1-8**). If not workflow is stopped (**9**).
+
+- Next, APPC performs the DistributeTraffic action (**IV**)(**10-17**). If operation is completed properly traffic should be redirected to vFW 2 and vSINK 2 instance. If not, workflow is stopped (**18**).
+
+- Finally, APPC executes the DistributeTrafficCheck action (**V**) on vFW 1 in order to verify that it does not receive any traffic anymore (**19-26**) and on vFW 2 in order to verify that it receives traffic forwarded from vFW 2 (**28-35**). Workflow is stopped with failed state (**37**) if one of those conditions was not satisfied (**27, 36**)
+
+.. figure:: files/vfwdt-td-workflow-sd.png
+ :scale: 80 %
+ :align: center
+
+ Figure 7 The Traffic Distribution detailed workflow
+
+4. The In-Place Software Upgrade with Traffic Distribution sub-workflow (simplified workflow on Figure 8 and more detailed on Figure 9)
+
+.. figure:: files/vfwdt-workflow-upgrade.png
+ :scale: 100 %
+ :align: center
+
+ Figure 8 The In-Place Software Upgrade general workflow
+
+- Firstly there is performed the UpgradePreCheck LCM operation on selected vFW instance (**I**)(**1-8**). The Ansible script executed by the APPC checks if the software version is different than the one indicated in workflow's input. If it is the same the workflow is stopped (**9**).
-- Information from Optimization Framework can be used to construct APPC LCM requests for DistributeTrafficCheck and DistributeTraffic commands (**15, 24, 33, 42**). This information is used to fill CDT templates with proper data for further Ansible playbooks execution (**17, 26, 35, 44**)
+- When software of selected vFW instance needs to be upgraded (**II**) then the traffic migration procedure needs to be performed (**III** - see sub-workflow 3). If migration of traffic fails workflow is stopped.
-- In the first DistributeTrafficCheck LCM request on vPGN VNF/VF-Module APPC, over Ansible, checks if already configured destinatrion of vPKG packages is different than already configured. If not workflow is stopped (**23**).
+- Next APPC performs over Ansible procedure of in place software upgrade. In our case this is simple refresh of the software packages on VM in order to simulate some upgrade process. Successful completion of the script should set the version of the software to the one from the upgrade request. If action fails workflow is stopped without further rollback (**18**).
-- Next, APPC performs the DistributeTraffic action like it is shown on Figure 1 and Figure 2 (**25-31**). If operation is completed properly traffic should be redirected to vFW 2 and vSINK 2 instance. If not, workflow is stopped (**32**).
+- Afterwards, APPC performs the UpgradePostCheck LCM action (**IV**)(**19-26**). The script verifies if the version of software is the same like requested before in the upgrade. If not, workflow is stopped without further rollback (**27**).
-- Finally, APPC executes the DistributeTrafficCheck action on vFW 1 in order to verify that it does not receives any traffic anymore (**34-40**) and on vFW 2 in order to verify that it receives traffic forwarded from vFW 2 (**43-49**)
+- Finally, when software upgrade is completed traffic migration procedure needs to be performed again (**VI**) to migrate traffic back to upgraded before vFW instance (see sub-workflow 3). If migration of traffic fails workflow is stopped and rollback is no being performed.
+
+.. figure:: files/vfwdt-upgrade-workflow-sd.png
+ :scale: 80 %
+ :align: center
+
+ Figure 9 The In-Place Software Upgrade detailed workflow
Scenario Setup
--------------
-In order to setup the scenario and to test the DistributeTraffic LCM API in action you need to perform the following steps:
+In order to setup the scenario and to test workflows with APPC LCM APIs in action you need to perform the following steps:
-1. Create an instance of vFWDT (vPKG , 2 x vFW, 2 x vSINK) – dedicated for the DistributeTraffic LCM API tests
+1. Create an instance of vFWDT (vPKG , 2 x vFW, 2 x vSINK) – dedicated for the traffic migration tests
-#. Gather A&AI facts for Traffic Distribution use case configuration
+#. Gather A&AI facts for use case configuration
-#. Install Traffic Distribution workflow packages
+#. Install Software Upgrade and Traffic Distribution workflow packages
-#. Configure Optimization Framework for Traffic Distribution workflow
+#. Configure Optimization Framework for Traffic Distribution candidates gathering
#. Configure vPKG and vFW VNFs in APPC CDT tool
#. Configure Ansible Server to work with vPKG and vFW VMs
-#. Execute Traffic Distribution Workflow
+#. Execute Traffic Distribution or In-Place Upgrade Workflows
You will use the following ONAP K8s VMs or containers:
@@ -98,12 +183,12 @@ You will use the following ONAP K8s VMs or containers:
- APPC Ansible Server container – setup of Ansible Server, configuration of playbook and input parameters for LCM actions
-.. note:: In all occurences <K8S-NODE-IP> constant is the IP address of any K8s Node of ONAP OOM installation which hosts ONAP pods i.e. k8s-node-1 and <K8S-RANCHER-IP> constant is the IP address of K8S Rancher Server
+.. note:: In all occurrences *K8S_NODE_IP* constant is the IP address of any K8s Node of ONAP OOM installation which hosts ONAP pods i.e. k8s-node-1 and *K8S-RANCHER-IP* constant is the IP address of K8S Rancher Server
vFWDT Service Instantiation
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-In order to test a DistributeTraffic LCM API functionality a dedicated vFW instance must be prepared. It differs from a standard vFW instance by having an additional VF-module with a second instance of vFW and a second instance of vSINK. Thanks to that when a service instance is deployed there are already available two instances of vFW and vSINK that can be used for verification of DistributeTraffic LCM API – there is no need to use the ScaleOut function to test DistributeTraffic functionality what simplifies preparations for tests.
+In order to test workflows a dedicated vFW instance must be prepared. It differs from a standard vFW instance by having an additional VF-module with a second instance of vFW and a second instance of vSINK. Thanks to that when a service instance is deployed there are already available two instances of vFW and vSINK that can be used for migration of traffic from one vFW instance to the other one – there is no need to use the ScaleOut function to test workflows what simplifies preparations for tests.
In order to instantiate vFWDT service please follow the procedure for standard vFW with following changes. You can create such service manually or you can use robot framework. For manual instantiation:
@@ -111,13 +196,13 @@ In order to instantiate vFWDT service please follow the procedure for standard v
https://github.com/onap/demo/tree/master/heat/vFWDT
-2. Create Virtual Service in SDC with composition like it is shown on Figure 3
+2. Create Virtual Service in SDC with composition like it is shown on Figure 10
.. figure:: files/vfwdt-service.png
:scale: 60 %
:align: center
- Figure 3 Composition of vFWDT Service
+ Figure 10 Composition of vFWDT Service
3. Use the following payload files in the SDNC-Preload phase during the VF-Module instantiation
@@ -127,15 +212,15 @@ https://github.com/onap/demo/tree/master/heat/vFWDT
- :download:`vFW/SNK 2 preload example <files/vfw-2-preload.json>`
-.. note:: Use publikc-key that is a pair for private key files used to log into ONAP OOM Rancher server. It will simplify further configuration
+.. note:: Use public-key that is a pair for private key files used to log into ONAP OOM Rancher server. It will simplify further configuration
-.. note:: vFWDT has a specific configuration of the networks – different than the one in original vFW use case (see Figure 4). Two networks must be created before the heat stack creation: *onap-private* network (10.0.0.0/16 typically) and *onap-external-private* (e.g. "10.100.0.0/16"). The latter one should be connected over a router to the external network that gives an access to VMs. Thanks to that VMs can have a floating IP from the external network assigned automatically in a time of stacks' creation. Moreover, the vPKG heat stack must be created before the vFW/vSINK stacks (it means that the VF-module for vPKG must be created as a first one). The vPKG stack creates two networks for the vFWDT use case: *protected* and *unprotected*; so these networks must be present before the stacks for vFW/vSINK are created.
+.. note:: vFWDT has a specific configuration of the networks – different than the one in original vFW use case (see Figure 11). Two networks must be created before the heat stack creation: *onap-private* network (10.0.0.0/16 typically) and *onap-external-private* (e.g. "10.100.0.0/16"). The latter one should be connected over a router to the external network that gives an access to VMs. Thanks to that VMs can have a floating IP from the external network assigned automatically in a time of stacks' creation. Moreover, the vPKG heat stack must be created before the vFW/vSINK stacks (it means that the VF-module for vPKG must be created as a first one). The vPKG stack creates two networks for the vFWDT use case: *protected* and *unprotected*; so these networks must be present before the stacks for vFW/vSINK are created.
.. figure:: files/vfwdt-networks.png
:scale: 15 %
:align: center
- Figure 4 Configuration of networks for vFWDT service
+ Figure 11 Configuration of networks for vFWDT service
4. Go to *robot* folder in Rancher server (being *root* user)
@@ -165,12 +250,12 @@ Go to the Rancher node and locate *demo-k8s.sh* script in *oom/kubernetes/robot*
::
./demo-k8s.sh onap init
- ./ete-k8s.sh onap instantiateVFWDT
+ ./ete-k8s.sh onap instantiateVFWDTGRA
-.. note:: You can verify the status of robot's service instantiation process by going to http://<K8S-NODE-IP>:30209/logs/ (login/password: test/test)
+.. note:: You can verify the status of robot's service instantiation process by going to https://K8S_NODE_IP:30209/logs/ (login/password: test/test)
-After successful instantiation of vFWDT service go to the OpenStack dashboard and project which is configured for VNFs deployment and locate vFWDT VMs. Choose one and try to ssh into one them to proove that further ansible configuration action will be possible
+After successful instantiation of vFWDT service go to the OpenStack dashboard and project which is configured for VNFs deployment and locate vFWDT VMs. Choose one and try to ssh into one them to prove that further ansible configuration action will be possible
::
@@ -192,7 +277,7 @@ Preparation of Workflow Script Environment
::
- git clone --single-branch --branch dublin "https://gerrit.onap.org/r/demo"
+ git clone --single-branch --branch frankfurt "https://gerrit.onap.org/r/demo"
3. Enter vFWDT tutorial directory
@@ -206,7 +291,7 @@ what should show following folders
::
root@sb01-rancher:~/demo/tutorials/vFWDT# ls
- playbooks preloads workflow
+ get_secret.sh playbooks policies preloads workflow
.. note:: Remember vFWDT tutorial directory `~/demo/tutorials/vFWDT` for the further use
@@ -220,20 +305,20 @@ what should show following folders
Gathering Scenario Facts
------------------------
-In order to configure CDT tool for execution of Ansible playbooks and for execution of Traffic distribution workflow we need following A&AI facts for vFWDT service
+In order to configure CDT tool for execution of Ansible playbooks and for execution of workflows we need following A&AI facts for vFWDT service
- **vnf-id** of generic-vnf vFW instance that we want to migrate traffic out from
- **vnf-type** of vPKG VNF - required to configure CDT for Distribute Traffic LCMs
-- **vnf-type** of vFW-SINK VNFs - required to configure CDT for Distribute Traffic LCMs
+- **vnf-type** of vFW-SINK VNFs - required to configure CDT for Distribute Traffic and Software Upgrade LCMs
Gathering facts from VID Portal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Enter the VID portal
-::
-
- https://<K8S-NODE-IP>:30200/vid/welcome.htm
+::
+
+ https://K8S_NODE_IP:30200/vid/welcome.htm
2. In the left hand menu enter **Search for Existing Service Instances**
@@ -247,24 +332,24 @@ Gathering facts from VID Portal
:scale: 60 %
:align: center
- Figure 5 vnf-type and vnf-id for vPKG VNF
+ Figure 12 vnf-type and vnf-id for vPKG VNF
.. figure:: files/vfwdt-vid-vnf-1.png
:scale: 60 %
:align: center
- Figure 6 vnf-type and vnf-id for vFW-SINK 1 VNF
+ Figure 13 vnf-type and vnf-id for vFW-SINK 1 VNF
.. figure:: files/vfwdt-vid-vnf-2.png
:scale: 60 %
:align: center
- Figure 7 vnf-type and vnf-id for vFW-SINK 2 VNF
+ Figure 14 vnf-type and vnf-id for vFW-SINK 2 VNF
Gathering facts directly from A&AI
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1. Enter OpenStack dashboard on whicvh vFWDT instance was created and got to **Project->Compute->Instances** and read VM names of vPKG VM and 2 vFW VMs created in vFWDT service instance
+1. Enter OpenStack dashboard on which vFWDT instance was created and got to **Project->Compute->Instances** and read VM names of vPKG VM and 2 vFW VMs created in vFWDT service instance
2. Open Postman or any other REST client
@@ -278,7 +363,7 @@ Gathering facts directly from A&AI
::
- https://<K8S-NODE-IP>:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/
+ https://K8S_NODE_IP:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/
.. note:: *CloudOwner* and *Region* names are fixed for default setup of ONAP
@@ -286,17 +371,17 @@ Gathering facts directly from A&AI
::
- https://<K8S-NODE-IP>:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/<tenant-id>/vservers/?vserver-name=<vm-name>
+ https://K8S_NODE_IP:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/<tenant-id>/vservers/?vserver-name=<vm-name>
-Read from the response (realtionship with *generic-vnf* type) vnf-id of vPKG VNF
+Read from the response (relationship with *generic-vnf* type) vnf-id of vPKG VNF
-.. note:: If you do not receive any vserver candidate it means that heatbridge procedure was not performed or was not completed successfuly. It is mandatory to continue this tutorial
+.. note:: If you do not receive any vserver candidate it means that heatbridge procedure was not performed or was not completed successfully. It is mandatory to continue this tutorial
8. Create new GET query for *generic-vnf* type with following link replacing <vnf-id> with value read from previous GET response
::
- https://<K8S-NODE-IP>:30233/aai/v14/network/generic-vnfs/generic-vnf/<vnf-id>
+ https://K8S_NODE_IP:30233/aai/v14/network/generic-vnfs/generic-vnf/<vnf-id>
9. Repeat this procedure also for 2 vFW VMs and note their *vnf-type* and *vnf-id*
@@ -306,110 +391,54 @@ This sections show the steps necessary to configure Policies, CDT and Ansible se
Configuration of Policies for Optimization Framework
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-We need to enter the Policy editor in order to upload policy types and then the policy rules for the demo. The polcies are required for the Optimization Framework and they guide OOF how to determine
+We need to upload neccessary optimization policy rules required for the demo. The policies are required for the Optimization Framework and they guide OOF how to determine
vFW and vPGN instances used in the Traffic Distribution workflow.
-1. Enter the Policy portal
-
-Specify *demo*:*demo* as a login and password
-
-::
-
- https://<K8S-NODE-IP>:30219/onap/login.htm
-
-From the left side menu enter *Dictionary* section and from the combo boxes select *MicroService Policy* and *MicroService Models* respectively. Below you can see the result.
-
-.. figure:: files/vfwdt-policy-type-list.png
- :scale: 70 %
- :align: center
-
- Figure 8 List of MicroService policy types in the Policy portal
-
-2. Upload the policy types
-
-Before policy rules for Traffic Distribution can be uploaded we need to create policy types to store these rules. For that we need to create following three types:
-
-- VNF Policy - it used to filter vf-module instances i.e. base on their attributes from the AAI like *provStatus*, *cloudRegionId* etc.
-- Query Policy - it is used to declare extra inpt parameters for OOF placement request - in our case we need to specify cloud region name
-- Affinity Policy - it is used to specify the placement rule used for selection vf-module candiate pairs of vFW vf-module instance (traffic destination) and vPGN vf-module instance (anchor point). In this case the match is done by belonging to the same cloud region
-
-Enter vFWDT tutorial directory on Rancher server (already created in `Preparation of Workflow Script Environment`_) and create policy types from the following files
-
-::
-
- root@sb01-rancher:~/demo/tutorials/vFWDT# ls policies/types/
- affinityPolicy-v20181031.yml queryPolicy-v20181031.yml vnfPolicy-v20181031.yml
-
-For each file press *Create* button, choose the policy type file, select the *Micro Service Option* (always one available) and enter the *Version* which must be the same like the one specified for policy instances. In this case pass value *OpenSource.version.1*
-
-.. figure:: files/vfwdt-add-micro-service-policy.png
- :scale: 70 %
- :align: center
-
- Figure 9 Creation of new MicroService policy type for OOF
-
-In a result you should see in the dictionary all three new types of policies declared
-
-.. figure:: files/vfwdt-completed-policy-type-list.png
- :scale: 70 %
- :align: center
-
- Figure 10 Completed list of MicroService policy types in the Policy portal
-
-3. Push the policies into the PDP
+1. Push the policies into the PDP
-In order to push policies into the PDP it is required to execute already prepared *uploadPolicies.sh* script that builds policy creation/update requests and automatically sends them to the Policy PDP pod
+In order to push policies into the PDP it is required to execute already prepared *uploadPolicies.sh* script that prepares policy upload requests and automatically sends them to the Policy PDP pod
::
root@sb01-rancher:~/demo/tutorials/vFWDT# ls policies/rules/
- QueryPolicy_vFW_TD.json affinity_vFW_TD.json uploadPolicies.sh vnfPolicy_vFW_TD.json vnfPolicy_vPGN_TD.json
+ QueryPolicy_vFW_TD.json affinity_vFW_TD.json uploadPolicies.sh dt-policies.sh vnfPolicy_vFW_TD.json vnfPolicy_vPGN_TD.json
-When necessary, you can modify policy json files. Script will read these files and will build new PDP requests based on them. To create new policies execute script in the following way
+When necessary, you can modify policy json files. Script will read these files and will build new PDP requests based on them. To create or update policies execute the script in the following way
::
./policies/rules/uploadPolicies.sh
-To update existing policies execute script with an extra argument
-
-::
-
- ./policies/rules/uploadPolicies.sh U
-
-The result can be verified in the Policy portal, in the *Editor* section, after entering *OSDF_DUBLIN* directory
-
-.. figure:: files/vfwdt-policy-editor-osdf-dublin.png
- :scale: 70 %
- :align: center
-
- Figure 11 List of policies for OOF and vFW traffic distribution
-
Testing Gathered Facts on Workflow Script
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Having collected *vnf-id* and *vnf-type* parameters we can execute Traffic Distribution Workflow Python script. It works in two modes. First one executes ony initial phase where AAI and OOF
+Having collected *vnf-id* and *vnf-type* parameters we can execute Traffic Distribution Workflow Python script. It works in two modes. First one executes ony initial phase where AAI and OOF
is used to collect neccessary information for configuration of APPC and for further execution phase. The second mode performs also second phase which executes APPC LCM actions.
At this stage we will execute script in the initial mode to generate some configuration helpful in CDT and Ansible configuration.
-1. Enter vFWDT tutorial directory on Rancher server (already created in `Preparation of Workflow Script Environment`_) and execute there workflow script with follwoing parameters
+1. Enter vFWDT tutorial directory on Rancher server (already created in `Preparation of Workflow Script Environment`_). In the *workflow* folder you can find workflow script used to gather necessary configuration and responsible for execution of the LCM actions. It has following syntax
::
- python3 workflow.py <VNF-ID> <K8S-NODE-IP> True False True True
+ python3 workflow.py <VNF-ID> <RANCHER_NODE_IP> <K8S_NODE_IP> <IF-CACHE> <IF-VFWCL> <INITIAL-ONLY> <CHECK-STATUS> <VERSION>
+
+- <VNF-ID> - vnf-id of vFW VNF instance that traffic should be migrated out from
+- <RANCHER_NODE_IP> - External IP of ONAP Rancher Node i.e. 10.12.5.160 (If Rancher Node is missing this is NFS node)
+- <K8S_NODE_IP> - External IP of ONAP K8s Worker Node i.e. 10.12.5.212
+- <IF-CACHE> - If script should use and build OOF response cache (cache it speed-ups further executions of script)
+- <IF-VFWCL> - If instead of vFWDT service instance vFW or vFWCL one is used (should be False always)
+- <INITIAL-ONLY> - If only configuration information will be collected (True for initial phase and False for full execution of workflow)
+- <CHECK-STATUS> - If APPC LCM action status should be verified and FAILURE should stop workflow (when False FAILED status of LCM action does not stop execution of further LCM actions)
+- <VERSION> - New version of vFW - for tests '1.0' or '2.0'. Ignore when you want to test traffic distribution workflow
-For now and for further use workflow script has following input parameters:
+2. Execute there workflow script with following parameters
-- vnf-id of vFW VNF instance that traffic should be migrated out from
-- External IP of ONAP Rancher Node i.e. 10.12.5.160 (If Rancher Node is missing this is NFS node)
-- External IP of ONAP K8s Worker Node i.e. 10.12.5.212
-- if script should use and build OOF response cache (cache it speed-ups further executions of script)
-- if instead of vFWDT service instance vFW or vFWCL one is used (should be False always)
-- if only configuration information will be collected (True for initial phase and False for full execution of workflow)
-- if APPC LCM action status should be verified and FAILURE should stop workflow (when False FAILED status of LCM action does not stop execution of further LCM actions)
+::
+
+ python3 workflow.py <VNF-ID> <RANCHER_NODE_IP> <K8S_NODE_IP> True False True True 2.0
-2. The script at this stage should give simmilar output
+3. The script at this stage should give simmilar output
::
@@ -417,6 +446,10 @@ For now and for further use workflow script has following input parameters:
OOF Cache True, is CL vFW False, only info False, check LCM result True
+ New vFW software version 2.0
+
+ Starting OSDF Response Server...
+
vFWDT Service Information:
{
"vf-module-id": "0dce0e61-9309-449a-8e3e-f001635aaab1",
@@ -446,19 +479,21 @@ For now and for further use workflow script has following input parameters:
vofwl01vfw4407 ansible_ssh_host=10.0.110.1 ansible_ssh_user=ubuntu
vofwl02vfw4407 ansible_ssh_host=10.0.110.4 ansible_ssh_user=ubuntu
-The result should have almoast the same information for *vnf-id's* of both vFW VNFs. *vnf-type* for vPKG and vFW VNFs should be the same like those collected in previous steps.
-Ansible Inventory section contains information about the content Ansible Inventor file that will be configured later on `Configuration of Ansible Server`_
+The result should have almoast the same information for *vnf-id's* of both vFW VNFs. *vnf-type* for vPKG and vFW VNFs should be the same like those collected in previous steps.
+Ansible Inventory section contains information about the content Ansible Inventor file that will be configured later on `Configuration of Ansible Server`_. The first phase of the workflow script will generate also the CDT artifacts which can be used for automatic configuration of the CDT tool - they can be ignored for manual CDT configuration.
Configuration of VNF in the APPC CDT tool
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. note:: Automated procedure can be found at the end of the section
+
Following steps aim to configure DistributeTraffic LCM action for our vPKG and vFW-SINK VNFs in APPC CDT tool.
1. Enter the Controller Design Tool portal
::
- https://<K8S-NODE-IP>:30289/index.html
+ https://K8S_NODE_IP:30289/index.html
2. Click on *MY VNFS* button and login to CDT portal giving i.e. *demo* user name
@@ -468,7 +503,7 @@ Following steps aim to configure DistributeTraffic LCM action for our vPKG and v
:scale: 70 %
:align: center
- Figure 12 Creation of new VNF type in CDT
+ Figure 15 Creation of new VNF type in CDT
4. Enter previously retrieved VNF Type for vPKG VNF and press the *NEXT* button
@@ -476,7 +511,7 @@ Following steps aim to configure DistributeTraffic LCM action for our vPKG and v
:scale: 70 %
:align: center
- Figure 13 Creation of new VNF type in CDT
+ Figure 16 Creation of new VNF type in CDT
5. For already created VNF Type (if the view does not open itself) click the *View/Edit* button. In the LCM action edit view in the first tab please choose:
@@ -495,48 +530,64 @@ Following steps aim to configure DistributeTraffic LCM action for our vPKG and v
:scale: 70 %
:align: center
- Figure 14 DistributeTraffic LCM action editing
+ Figure 17 DistributeTraffic LCM action editing
+
+6. Go to the *Template* tab and in the editor paste the request template of LCM actions for vPKG VNF type
-6. Go to the *Template* tab and in the editor paste the request template of the DistributeTraffic LCM action for vPKG VNF type
+For DistributeTraffic and DistributeTrafficCheck LCMs
::
{
"InventoryNames": "VM",
- "PlaybookName": "${()=(book_name)}",
- "NodeList": [{
- "vm-info": [{
- "ne_id": "${()=(ne_id)}",
- "fixed_ip_address": "${()=(fixed_ip_address)}"
- }],
- "site": "site",
- "vnfc-type": "vpgn"
- }],
+ "PlaybookName": "${book_name}",
+ "AutoNodeList": true,
"EnvParameters": {
"ConfigFileName": "../traffic_distribution_config.json",
+ "vnf_instance": "vfwdt"
+ },
+ "FileParameters": {
+ "traffic_distribution_config.json": "${file_parameter_content}"
+ },
+ "Timeout": 3600
+ }
+
+
+For DistributeTraffic and DistributeTrafficCheck LCMs
+
+::
+
+ {
+ "InventoryNames": "VM",
+ "PlaybookName": "${book_name}",
+ "AutoNodeList": true,
+ "EnvParameters": {
+ "ConfigFileName": "../config.json",
"vnf_instance": "vfwdt",
+ "new_software_version": "${new-software-version}",
+ "existing_software_version": "${existing-software-version}"
},
"FileParameters": {
- "traffic_distribution_config.json": "${()=(file_parameter_content)}"
+ "config.json": "${file_parameter_content}"
},
"Timeout": 3600
}
-.. note:: For all this VNF types and for all actions CDT template is the same except **vnfc-type** parameter that for vPKG VNF type should have value *vpgn* and for vFW-SINK VNF type should have value *vfw-sink*
The meaning of selected template parameters is following:
- **EnvParameters** group contains all the parameters that will be passed directly to the Ansible playbook during the request's execution. *vnf_instance* is an obligatory parameter for VNF Ansible LCMs. In our case for simplification it has predefined value
-- **InventoryNames** parameter is obligatory if you want to have NodeList with limited VMs or VNFCs that playbook should be executed on. It can have value *VM* or *VNFC*. In our case *VM* valuye means that NodeList will have information about VMs on which playbook should be executed. In this use case this is always only one VM
-- **NodeList** parameter value must match the group of VMs like it was specified in the Ansible inventory file. *PlaybookName* must be the same as the name of playbook that was uploaded before to the Ansible server.
-- **FileParameters**
+- **InventoryNames** parameter is obligatory if you want to have NodeList with limited VMs or VNFCs that playbook should be executed on. It can have value *VM* or *VNFC*. In our case *VM* value means that NodeList will have information about VMs on which playbook should be executed. In this use case this is always only one VM
+- **AutoNodeList** parameter set to True indicates that template does not need the NodeList section specific and it will be generated automatically base on information from AAI - this requires proper data in the vserver and vnfc objects associated with VNFs
+- **PlaybookName** must be the same as the name of playbook that was uploaded before to the Ansible server.
+- **FileParameters** sections contains information about the configuration files with their content necessary to execute the playbook
.. figure:: files/vfwdt-create-template.png
:scale: 70 %
:align: center
- Figure 15 LCM DistributeTraffic request template
+ Figure 18 LCM DistributeTraffic request template
7. Afterwards press the *SYNCHRONIZE WITH TEMPLATE PARAMETERS* button. You will be moved to the *Parameter Definition* tab. The new parameters will be listed there.
@@ -544,17 +595,27 @@ The meaning of selected template parameters is following:
:scale: 70 %
:align: center
- Figure 16 Summary of parameters specified for DistributeTraffic LCM action.
+ Figure 19 Summary of parameters specified for DistributeTraffic LCM action.
.. note:: For each parameter you can define its: mandatory presence; default value; source (Manual/A&AI). For our case modification of this settings is not necessary
8. Finally, go back to the *Reference Data* tab and click *SAVE ALL TO APPC*.
-.. note:: Remember to configure DistributeTraffic and DistributeTrafficCheck actions for vPKG VNF type and DistributeTrafficCheck action for vFW-SINK
+.. note:: Remember to configure DistributeTraffic and DistributeTrafficCheck actions for vPKG VNF type and UpgradeSoftware, UpgradePreCheck, UpgradePostCheck and DistributeTrafficCheck actions for vFW-SINK
+
+9. Configuration of CDT tool is also automated and all steps above can be repeated with script *configure_ansible.sh*
+
+Enter vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server, make sure that *onap.pem* file is in *playbooks* directory and run
+
+::
+
+ ./playbooks/configure_ansible.sh
Configuration of Ansible Server
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. note:: Automated procedure can be found at the end of the section
+
After an instantiation of the vFWDT service the Ansible server must be configured in order to allow it a reconfiguration of vPKG VM.
1. Copy from Rancher server private key file used for vFWDT VMs' creation and used for access to Rancher server into the :file:`/opt/ansible-server/Playbooks/onap.pem` file
@@ -579,7 +640,7 @@ After an instantiation of the vFWDT service the Ansible server must be configure
chmod 400 onap.pem
chown ansible:ansible onap.pem
-4. Edit the :file:`/opt/ansible-server/Playbooks/Ansible\ \_\ inventory` file including all the hosts of vFWDT service instance used in this use case.
+4. Edit the :file:`/opt/ansible-server/Playbooks/Ansible\ \_\ inventory` file including all the hosts of vFWDT service instance used in this use case.
The content of the file is generated by workflow script `Testing Gathered Facts on Workflow Script`_
::
@@ -605,17 +666,17 @@ After an instantiation of the vFWDT service the Ansible server must be configure
private_key_file = /opt/ansible-server/Playbooks/onap.pem
-.. note:: This is the default privaye key file. In the `/opt/ansible-server/Playbooks/Ansible\ \_\ inventory` different key could be configured but APPC in time of execution of playbbok on Ansible server creates its own dedicated inventory file which does not have private key file specified. In consequence, this key file configured is mandatory for proper execution of playbooks by APPC
+.. note:: This is the default private key file. In the `/opt/ansible-server/Playbooks/Ansible\ \_\ inventory` different key could be configured but APPC in time of execution of playbook on Ansible server creates its own dedicated inventory file which does not have private key file specified. In consequence, this key file configured is mandatory for proper execution of playbooks by APPC
-6. Test that the Ansible server can access over ssh vFWDT hosts configured in the ansible inventory
+6. Test that the Ansible server can access over ssh vFWDT hosts configured in the ansible inventory
::
ansible –i Ansible_inventory vpgn,vfw-sink –m ping
-7. Download the distribute traffic playbook into the :file:`/opt/ansible-server/Playbooks` directory
+7. Download the LCM playbooks into the :file:`/opt/ansible-server/Playbooks` directory
Exit Ansible server pod and enter vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server. Afterwards, copy playbooks into Ansible server pod
@@ -624,13 +685,15 @@ Exit Ansible server pod and enter vFWDT tutorial directory `Preparation of Workf
sudo kubectl cp playbooks/vfw-sink onap/`kubectl get pods -o go-template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' | grep appc-ansible`:/opt/ansible-server/Playbooks/
sudo kubectl cp playbooks/vpgn onap/`kubectl get pods -o go-template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' | grep appc-ansible`:/opt/ansible-server/Playbooks/
-8. After the configuration of Ansible serverthe structure of `/opt/ansible-server/Playbooks` directory should be following
+8. Configuration of ansible server is also automated and all steps above can be repeated with script *configure_ansible.sh* introduced in the previous section
+
+9. After the configuration of Ansible server with script the structure of `/opt/ansible-server/Playbooks` directory should be following
::
/opt/ansible-server/Playbooks $ ls -R
.:
- Ansible_inventory onap.pem vfw-sink vpgn
+ ansible.cfg Ansible_inventory configure_ansible.sh onap.pem server.py upgrade.sh vfw-sink vpgn
./vfw-sink:
latest
@@ -639,11 +702,20 @@ Exit Ansible server pod and enter vFWDT tutorial directory `Preparation of Workf
ansible
./vfw-sink/latest/ansible:
- distributetrafficcheck
+ distributetrafficcheck upgradepostcheck upgradeprecheck upgradesoftware
./vfw-sink/latest/ansible/distributetrafficcheck:
site.yml
+ ./vfw-sink/latest/ansible/upgradepostcheck:
+ site.yml
+
+ ./vfw-sink/latest/ansible/upgradeprecheck:
+ site.yml
+
+ ./vfw-sink/latest/ansible/upgradesoftware:
+ site.yml
+
./vpgn:
latest
@@ -651,7 +723,7 @@ Exit Ansible server pod and enter vFWDT tutorial directory `Preparation of Workf
ansible
./vpgn/latest/ansible:
- distributetraffic distributetrafficcheck
+ distributetraffic distributetrafficcheck
./vpgn/latest/ansible/distributetraffic:
site.yml
@@ -663,55 +735,72 @@ Exit Ansible server pod and enter vFWDT tutorial directory `Preparation of Workf
Configuration of APPC DB for Ansible
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. note:: Automated procedure can be found at the end of the section
+
For each VNF that uses the Ansible protocol you need to configure *PASSWORD* and *URL* field in the *DEVICE_AUTHENTICATION* table. This step must be performed after configuration in CDT which populates data in *DEVICE_AUTHENTICATION* table.
-1. Enter the APPC DB container
+1. Read APPC DB password
+
+Enter vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server.
+
+::
+
+ ./get_secret.sh `kubectl get secrets | grep appc-db-root-pass`
+
+2. Enter the APPC DB container
::
kubectl exec -it -n onap `kubectl get pods -o go-template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' | grep appc-db-0` -- sh
-2. Enter the APPC DB CLI (password is *gamma*)
+3. Enter the APPC DB CLI
::
- mysql -u sdnctl -p
+ mysql -u root -p
-3. Execute the following SQL commands
+4. Execute the following SQL commands
::
MariaDB [(none)]> use sdnctl;
- MariaDB [sdnctl]> UPDATE DEVICE_AUTHENTICATION SET URL = 'http://appc-ansible-server:8000/Dispatch' WHERE ACTION LIKE 'DistributeTraffic%';
- MariaDB [sdnctl]> UPDATE DEVICE_AUTHENTICATION SET PASSWORD = 'admin' WHERE ACTION LIKE 'DistributeTraffic%';
- MariaDB [sdnctl]> select * from DEVICE_AUTHENTICATION;
+ MariaDB [sdnctl]> UPDATE DEVICE_AUTHENTICATION SET URL = 'http://appc-ansible-server:8000/Dispatch' WHERE WHERE PROTOCOL LIKE 'ANSIBLE' AND URL IS NULL;
+ MariaDB [sdnctl]> UPDATE DEVICE_AUTHENTICATION SET PASSWORD = 'admin' WHERE PROTOCOL LIKE 'ANSIBLE' AND PASSWORD IS NULL;
+ MariaDB [sdnctl]> select * from DEVICE_AUTHENTICATION WHERE PROTOCOL LIKE 'ANSIBLE';
-Result should be simmilar to the following one:
+Result should be similar to the following one:
::
+--------------------------+------------------------------------------------------+----------+------------------------+-----------+----------+-------------+------------------------------------------+
| DEVICE_AUTHENTICATION_ID | VNF_TYPE | PROTOCOL | ACTION | USER_NAME | PASSWORD | PORT_NUMBER | URL |
+--------------------------+------------------------------------------------------+----------+------------------------+-----------+----------+-------------+------------------------------------------+
- | 137 | vFWDT 2019-05-20 21:10:/vFWDT_vPKG a646a255-9bee 0 | ANSIBLE | DistributeTraffic | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
- | 143 | vFWDT 2019-05-20 21:10:/vFWDT_vFWSNK b463aa83-b1fc 0 | ANSIBLE | DistributeTraffic | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
- | 149 | vFWDT 2019-05-20 21:10:/vFWDT_vFWSNK b463aa83-b1fc 0 | ANSIBLE | DistributeTrafficCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
- | 152 | vFWDT 2019-05-20 21:10:/vFWDT_vPKG a646a255-9bee 0 | ANSIBLE | DistributeTrafficCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 118 | vFWDT 2020-04-21 17-26-/vFWDT_vFWSNK 1faca5b5-4c29 1 | ANSIBLE | DistributeTrafficCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 121 | vFWDT 2020-04-21 17-26-/vFWDT_vFWSNK 1faca5b5-4c29 1 | ANSIBLE | UpgradeSoftware | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 124 | vFWDT 2020-04-21 17-26-/vFWDT_vFWSNK 1faca5b5-4c29 1 | ANSIBLE | UpgradePreCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 127 | vFWDT 2020-04-21 17-26-/vFWDT_vFWSNK 1faca5b5-4c29 1 | ANSIBLE | UpgradePostCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 133 | vFWDT 2020-04-21 17-26-/vFWDT_vPKG 8021eee9-3a8f 0 | ANSIBLE | DistributeTraffic | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+ | 136 | vFWDT 2020-04-21 17-26-/vFWDT_vPKG 8021eee9-3a8f 0 | ANSIBLE | DistributeTrafficCheck | admin | admin | 8000 | http://appc-ansible-server:8000/Dispatch |
+--------------------------+------------------------------------------------------+----------+------------------------+-----------+----------+-------------+------------------------------------------+
- 4 rows in set (0.00 sec)
+ 6 rows in set (0.00 sec)
+
+4. Configuration of APPC DB is also automated and all steps above can be repeated with script *configure_ansible.sh* introduced in the previous sections
-Testing Traffic Distribution Workflow
--------------------------------------
-Since all the configuration of components of ONAP is already prepared it is possible to enter second phase of Traffic Distribution Workflow execution -
-the execution of DistributeTraffic and DistributeTrafficCheck LCM actions with configuration resolved before by OptimizationFramework.
+Testing Workflows
+-----------------
+
+Since all the configuration of components of ONAP is already prepared it is possible to enter second phase of workflows execution -
+the execution of APPC LCM actions with configuration resolved before by OptimizationFramework.
Workflow Execution
~~~~~~~~~~~~~~~~~~
-In order to run Traffic Distribution Workflow execute following commands from the vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server.
+In order to run workflows execute following commands from the vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server.
+
+For Traffic Distribution workflow run
::
@@ -719,65 +808,83 @@ In order to run Traffic Distribution Workflow execute following commands from th
python3 workflow.py 909d396b-4d99-4c6a-a59b-abe948873303 10.12.5.217 10.12.5.63 True False False True
-The order of executed LCM actions is following:
+The order of executed LCM actions for Traffic Distribution workflow is following:
-1. DistributeTrafficCheck on vPKG VM - ansible playbook checks if traffic destinations specified by OOF is not configued in the vPKG and traffic does not go from vPKG already.
- If vPKG send alreadyt traffic to destination the playbook will fail and workflow will break.
-2. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF. When everything is fine at this stage
- change of the traffic should be observed on following dashboards (please turn on automatic reload of graphs)
+1. CheckLock on vPKG, vFW-1 and vFW-2 VMs
+2. Lock on vPKG, vFW-1 and vFW-2 VMs
+3. DistributeTrafficCheck on vPKG VM - ansible playbook checks if traffic destinations specified by OOF is not configured in the vPKG and traffic does not go from vPKG already.
+ If vPKG send already traffic to destination the playbook will fail and workflow will break.
+4. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF.
+5. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
+6. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
+7. Lock on vPKG, vFW-1 and vFW-2 VMs
- ::
-
- http://<vSINK-1-IP>:667/
- http://<vSINK-2-IP>:667/
-3. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
-4. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
+For In-Place Software Upgrade with Traffic Distribution workflow run
+::
+
+ cd workflow
+ python3 workflow.py 909d396b-4d99-4c6a-a59b-abe948873303 10.12.5.217 10.12.5.63 True False False True 2.0
+
+
+The order of executed LCM actions for In-Place Software Upgrade with Traffic Distribution workflow is following:
+
+1. CheckLock on vPKG, vFW-1 and vFW-2 VMs
+2. Lock on vPKG, vFW-1 and vFW-2 VMs
+3. UpgradePreCheck on vFW-1 VM - checks if the software version on vFW is different than the one requested in the workflow input
+4. DistributeTrafficCheck on vPKG VM - ansible playbook checks if traffic destinations specified by OOF is not configured in the vPKG and traffic does not go from vPKG already.
+ If vPKG send already traffic to destination the playbook will fail and workflow will break.
+5. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF.
+6. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
+7. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
+8. UpgradeSoftware on vFW-1 VM - ansible playbook modifies the software on the vFW instance and sets the version of the software to the specified one in the request
+9. UpgradePostCheck on vFW-1 VM - ansible playbook checks if the software of vFW is the same like the one specified in the workflows input.
+10. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF (reverse configuration).
+11. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
+12. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
+13. Unlock on vPKG, vFW-1 and vFW-2 VMs
+
+
+For both workflows when everything is fine with both workflows change of the traffic should be observed on following dashboards (please turn on automatic reload of graphs). The observed traffic pattern for upgrade scenario should be similar to the one presented in Figure 2
+
+ ::
+
+ http://vSINK-1-IP:667/
+ http://vSINK-2-IP:667/
Workflow Results
~~~~~~~~~~~~~~~~
-Expected result of workflow execution, when everythin is fine, is following:
+Expected result of Traffic Distribution workflow execution, when everything is fine, is following:
::
Distribute Traffic Workflow Execution:
- APPC REQ 0 - DistributeTrafficCheck
- Request Accepted. Receiving result status...
- Checking LCM DistributeTrafficCheck Status
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
+ WORKFLOW << Migrate vFW Traffic Conditionally >>
+ APPC LCM << CheckLock >> [Check vPGN Lock Status]
+ UNLOCKED
+ APPC LCM << CheckLock >> [Check vFW-1 Lock Status]
+ UNLOCKED
+ APPC LCM << CheckLock >> [Check vFW-2 Lock ]
+ UNLOCKED
+ APPC LCM << Lock >> [Lock vPGN]
SUCCESSFUL
- APPC REQ 1 - DistributeTraffic
- Request Accepted. Receiving result status...
- Checking LCM DistributeTraffic Status
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
+ APPC LCM << Lock >> [Lock vFW-1]
+ SUCCESSFUL
+ APPC LCM << Lock >> [Lock vFW-2]
+ SUCCESSFUL
+ APPC LCM << DistributeTrafficCheck >> [Check current traffic destination on vPGN]
+ ACCEPTED
+ APPC LCM << DistributeTrafficCheck >> [Status]
IN_PROGRESS
IN_PROGRESS
IN_PROGRESS
SUCCESSFUL
- APPC REQ 2 - DistributeTrafficCheck
- Request Accepted. Receiving result status...
- Checking LCM DistributeTrafficCheck Status
- IN_PROGRESS
+ WORKFLOW << Migrate Traffic and Verify >>
+ APPC LCM << DistributeTraffic >> [Migrating source vFW traffic to destination vFW]
+ ACCEPTED
+ APPC LCM << DistributeTraffic >> [Status]
IN_PROGRESS
IN_PROGRESS
IN_PROGRESS
@@ -787,49 +894,77 @@ Expected result of workflow execution, when everythin is fine, is following:
IN_PROGRESS
IN_PROGRESS
SUCCESSFUL
- APPC REQ 3 - DistributeTrafficCheck
- Request Accepted. Receiving result status...
- Checking LCM DistributeTrafficCheck Status
- IN_PROGRESS
- IN_PROGRESS
+ APPC LCM << DistributeTrafficCheck >> [Checking traffic has been stopped on the source vFW]
+ ACCEPTED
+ APPC LCM << DistributeTrafficCheck >> [Status]
IN_PROGRESS
IN_PROGRESS
IN_PROGRESS
+ SUCCESSFUL
+ APPC LCM << DistributeTrafficCheck >> [Checking traffic has appeared on the destination vFW]
+ ACCEPTED
+ APPC LCM << DistributeTrafficCheck >> [Status]
IN_PROGRESS
IN_PROGRESS
SUCCESSFUL
+ APPC LCM << Unlock >> [Unlock vPGN]
+ SUCCESSFUL
+ APPC LCM << Unlock >> [Unlock vFW-1]
+ SUCCESSFUL
+ APPC LCM << Unlock >> [Unlock vFW-2]
+ SUCCESSFUL
+
+
+In case we want to execute operation and one of the VNFs is locked because of other operation being executed:
+
+::
+
+ Distribute Traffic Workflow Execution:
+ WORKFLOW << Migrate vFW Traffic Conditionally >>
+ APPC LCM << CheckLock >> [Check vPGN Lock Status]
+ LOCKED
+ Traceback (most recent call last):
+ File "workflow.py", line 1235, in <module>
+ sys.argv[6].lower() == 'true', sys.argv[7].lower() == 'true', new_version)
+ File "workflow.py", line 1209, in execute_workflow
+ _execute_lcm_requests({"requests": lcm_requests, "description": "Migrate vFW Traffic Conditionally"}, onap_ip, check_result)
+ File "workflow.py", line 101, in wrap
+ ret = f(*args, **kwargs)
+ File "workflow.py", line 1007, in _execute_lcm_requests
+ raise Exception("APPC LCM << {} >> FAILED".format(req['input']['action']))
+ Exception: APPC LCM << CheckLock >> FAILED
+
In case of failure the result can be following:
::
Distribute Traffic Workflow Execution:
- APPC REQ 0 - DistributeTrafficCheck
- Request Accepted. Receiving result status...
- Checking LCM DistributeTrafficCheck Status
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
- IN_PROGRESS
+ WORKFLOW << Migrate vFW Traffic Conditionally >>
+ APPC LCM << CheckLock >> [Check vPGN Lock Status]
+ UNLOCKED
+ APPC LCM << CheckLock >> [Check vFW-1 Lock Status]
+ UNLOCKED
+ APPC LCM << CheckLock >> [Check vFW-2 Lock ]
+ UNLOCKED
+ APPC LCM << Lock >> [Lock vPGN]
+ SUCCESSFUL
+ APPC LCM << Lock >> [Lock vFW-1]
+ SUCCESSFUL
+ APPC LCM << Lock >> [Lock vFW-2]
+ SUCCESSFUL
+ APPC LCM << DistributeTrafficCheck >> [Check current traffic destination on vPGN]
+ ACCEPTED
+ APPC LCM << DistributeTrafficCheck >> [Status]
FAILED
- Traceback (most recent call last):
- File "workflow.py", line 563, in <module>
- sys.argv[5].lower() == 'true', sys.argv[6].lower() == 'true')
- File "workflow.py", line 557, in execute_workflow
- confirm_appc_lcm_action(onap_ip, req, check_result)
- File "workflow.py", line 529, in confirm_appc_lcm_action
- raise Exception("LCM {} {} - {}".format(req['input']['action'], status['status'], status['status-reason']))
- Exception: LCM DistributeTrafficCheck FAILED - FAILED
-
-.. note:: When CDT and Ansible is configured properly Traffic Distribution Workflow can fail when you pass as a vnf-id argument the ID of vFW VNF which does not handle traffic at the moment. To solve that pass the VNF ID of the other vFW VNF instance. Because of the same reason you cannot execute twice in a row workflow for the same VNF ID if first execution succedds.
+ APPC LCM <<DistributeTrafficCheck>> [FAILED - FAILED]
+ WORKFLOW << Migrate Traffic and Verify >> SKIP
+ APPC LCM << Unlock >> [Unlock vPGN]
+ SUCCESSFUL
+ APPC LCM << Unlock >> [Unlock vFW-1]
+ SUCCESSFUL
+ APPC LCM << Unlock >> [Unlock vFW-2]
+ SUCCESSFUL
+
+
+.. note:: When CDT and Ansible is configured properly Traffic Distribution Workflow can fail when you pass as a vnf-id argument the ID of vFW VNF which does not handle traffic at the moment. To solve that pass the VNF ID of the other vFW VNF instance. Because of the same reason you cannot execute twice in a row workflow for the same VNF ID if first execution succeeds.
diff --git a/docs/docs_vFW_CNF_CDS.rst b/docs/docs_vFW_CNF_CDS.rst
new file mode 100644
index 000000000..5e01df317
--- /dev/null
+++ b/docs/docs_vFW_CNF_CDS.rst
@@ -0,0 +1,1903 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. Copyright 2022 ONAP
+
+.. _docs_vFW_CNF_CDS:
+
+:orphan:
+
+vFirewall CNF Use Case
+----------------------
+
+Source files
+~~~~~~~~~~~~
+- Heat/Helm/CDS models: `vFW_CNF_CDS Model`_
+- Automation Scripts: `vFW_CNF_CDS Automation`_
+
+Description
+~~~~~~~~~~~
+This use case is a combination of `vFW CDS Dublin`_ and `vFW EDGEX K8S`_ use cases and it is continously improved since Frankfurt release. The aim is to continue improving Kubernetes based Network Functions (a.k.a CNF) support in ONAP. Use case continues where `vFW EDGEX K8S`_ left and brings CDS support into picture like `vFW CDS Dublin`_ did for the old vFW Use case. Predecessor use case is also documented here `vFW EDGEX K8S In ONAP Wiki`_.
+
+This use case shows how to onboard helm packages and to instantiate them with help of ONAP. Following improvements were made in the vFW CNF Use Case:
+
+- vFW Kubernetes Helm charts support overrides (previously mostly hardcoded values)
+- SDC accepts Onboarding Package with many helm packages what allows to keep decomposition of service instance similar to `vFW CDS Dublin`_
+- Compared to `vFW EDGEX K8S`_ use case **MACRO** workflow in SO is used instead of VNF a'la carte workflow
+- No VNF data preloading used, instead resource-assignment feature of CDS is used
+- CDS is used to resolve instantiation time parameters (Helm overrides)
+ * IP addresses with IPAM
+ * Unique names for resources with ONAP naming service
+ * CDS is used to create and upload **multicloud/k8s profile** as part of instantiation flow
+- Combined all models (Heat, Helm, CBA) in to same git repo and a created single onboarding package `vFW_CNF_CDS Model`_
+- vFW CNF status is monitored prior to the completion of the instantiation process.
+- It is possible to not only provide overrides for Helm packages but we can modify Helm packages before instantiation or we can modify CNF after its deployment
+- Use case does not contain Closed Loop part of the vFW demo.
+
+All changes to related ONAP components and Use Case can be found in the following tickets:
+
+- `REQ-182`_
+- `REQ-341`_
+- `REQ-458`_
+- `REQ-627`_
+- `REQ-890`_
+
+The vFW CNF Use Case
+~~~~~~~~~~~~~~~~~~~~
+The vFW CNF CDS use case shows how to instantiate multiple CNF instances in similar way as VNFs bringing CNFs closer to first class citizens in ONAP.
+
+One of the biggest practical change compared to the old demos (any ONAP demo) is that whole network function content (user provided content) is collected to one place and more importantly into git repository (`vFW_CNF_CDS Model`_) that provides version control (that is pretty important thing). That is very basic thing but unfortunately this is a common problem when running any ONAP demo and trying to find all content from many different git repositories and even some files only in ONAP wiki.
+
+Demo git directory has also `Data Dictionary`_ file (CDS model time resource) included.
+
+Another founding idea from the start was to provide complete content in single onboarding package available directly from that git repository. Not any revolutionary idea as that's the official package format ONAP supports and all content supposed to be in that same package for single service regardless of the models and closed loops and configurations etc.
+
+Following table describes all the source models to which this demo is based on.
+
+=============== ================= ===========
+Model Git reference Description
+--------------- ----------------- -----------
+Heat `vFW_NextGen`_ Heat templates used in original vFW demo but split into multiple vf-modules
+Helm `vFW_Helm Model`_ Helm templates used in `vFW EDGEX K8S`_ demo
+CDS model `vFW CBA Model`_ CDS CBA model used in `vFW CDS Dublin`_ demo
+=============== ================= ===========
+
+.. note:: Since the Guilin release `vFW_CNF_CDS Model`_ contains sources that allow to model and instantiate CNF with VNF/Heat orchestration approach (Frankfurt) and with native Helm orchestration approach (Guilin and beyond). VNF/Heat orchestration approach is deprecated and will not be enhanced in the future. Please follow README.txt description and further documentation here to generate and select appropriate onboarding package which will leverage appropriate SO orchestration path.
+
+Since Honolulu release vFW CNF use case supports three different scenarios where different capabilities of CNF Orchestration in ONAP can be experimented:
+
+.. figure:: files/vFW_CNF_CDS/scenarios.png
+ :scale: 60 %
+ :align: center
+
+ vFW CNF Scenarios
+
+- Scenario 1: simple deployment of vFW CNF instance
+- Scenario 2: deployment of vFW CNF instance with enrichment of the Helm deployment with profiling mechanism
+- Scenario 3: deployment of vFW CNF instance with Day2 configuration applied and CNF status checked as a part of a config-deploy operation
+
+The 3rd scenario presents the most comprehensive way of managing the CNF in ONAP, including Day 0/1/2 operations. It shows also how to combine in the Day2 operation information for the AAI and SDNC MDSAL. All scenarios can be supported by execution of the dedicated Healthcheck workflow `3-5 Verification of the CNF Status`_.
+
+Modeling of Onboarding Package/Helm
+...................................
+
+The starting point for this demo was Helm package containing one Kubernetes application, see `vFW_Helm Model`_. In this demo we decided to follow SDC/SO vf-module concept the same way as original vFW demo was split into multiple vf-modules instead of one (`vFW_NextGen`_). The same way we splitted Helm version of vFW into multiple Helm packages each matching one dedicated vf-module.
+
+The Jakarta version of the `vFW_CNF_CDS Model`_ contains files required to create **VSP onboarding packages in Helm Native format** where each Helm package is standalone and is natively understood in consequence by SO. The **Dummy Heat** (available in Frankfurt release already) one that considers association of each Helm package with dummy heat templates since Jakarta is not a prt of the vFW CNF demo. If you are interested to see how to onboard and orchestrate the CNF using the **Dummy Heat** approach, please open the Istanbul version of the documentation. The VSP Helm packages are matched to the vf-module concept, so basically each Helm application after instantiation is visible to ONAP as a separate vf-module. The **Native Helm** format for onboarding has **crucial** role in the further orchestration approach applied for Helm package instantiation as it leverages the **CNF Adapter** and it populates k8s resource information to AAI what plays significant role in the Day operation for CNFs, including closed-loop automation with Prometheus. Read more in `3-1 CNF Orchestration Paths in ONAP`_
+
+Produced **Native Helm** VSP onboarding package `Creating Onboarding Package`_ format has following MANIFEST file (package_native/MANIFEST.json). The Helm package is delivered as HELM package through SDC and SO. The *isBase* flag of HELM artifact is ignored by SDC but in the manifest one HELM or HEAT artifacts must be defined as isBase = true. If both HEAT and HELM are present in the same manifest file the base one must be always one of HELM artifacts. Moreover, the name of HELM type artifact **MUST** match the specified pattern: *helm_<some_name>* and the HEAT type artifacts, if present in the same manifest, cannot contain keyword *helm*. These limitations are a consequence of current limitations of the SDC onboarding and VSP validation engine and will be adresssed in the future releases.
+
+::
+
+ {
+ "name": "virtualFirewall",
+ "description": "",
+ "data": [
+ {
+ "file": "CBA.zip",
+ "type": "CONTROLLER_BLUEPRINT_ARCHIVE"
+ },
+ {
+ "file": "helm_base_template.tgz",
+ "type": "HELM",
+ "isBase": "true"
+ },
+ {
+ "file": "helm_vfw.tgz",
+ "type": "HELM",
+ "isBase": "false"
+ },
+ {
+ "file": "helm_vpkg.tgz",
+ "type": "HELM",
+ "isBase": "false"
+ },
+ {
+ "file": "helm_vsn.tgz",
+ "type": "HELM",
+ "isBase": "false"
+ }
+ ]
+ }
+
+.. note:: CDS model (CBA package) is delivered as SDC supported own type CONTROLLER_BLUEPRINT_ARCHIVE but the current limitation of VSP onbarding forces to use the artifact name *CBA.zip* to automaticaly recognize CBA as a CONTROLLER_BLUEPRINT_ARCHIVE.
+
+CDS Model (CBA)
+...............
+
+CDS plays a crucial role in the process of CNF instantiation and is responsible for delivery of instantiation parameters, CNF customization, configuration of CBF after the deployment and may be used in the process of CNF status verification.
+
+Creating CDS model was the core of the use case work and also the most difficult and time consuming part. Current template used by use-case should be easily reusable for anyone. Once CDS GUI will be fully working, we think that CBA development should be much easier. For CBA structure reference, please visit it's documentation page `CDS Documentation`_.
+
+At first the target was to keep CDS model as close as possible to `vFW_CNF_CDS Model`_ use case model and only add smallest possible changes to enable also k8s usage. That is still the target but in practice model deviated from the original one already and time pressure pushed us to not care about sync. Basically the end result could be possible much streamlined if wanted to be smallest possible to working only for K8S based network functions.
+
+Base on this example there are demonstrated following features of CDS and CBA model
+
+- resource assignment string, integer and json types
+- sourcing of resolved value on vf-module level from vnf level assignment
+- extracting data from AAI and MD-SAL during the resource assignment
+- custom resource assignment with Kotlin script
+- templating of the vtl files
+- building of imperative workflows
+- utilization of on_succes and on_failure event in imperative workflow
+- handling of the failure in the workflow
+- implementation of custom workflow logic with Kotlin script
+- example of config-assign and config-deploy operation decomposed into many steps
+- complex parametrization of config deploy operation
+- combination and aggregation of AAI and MD-SAL data in config-assign and config-deploy operations
+
+The prepared CBA model demonstrates also how to utilize CNF specific features of CBA, suited for the deployment of CNF with k8splugin in ONAP:
+
+- building and upload of k8s profile template into k8splugin
+- building and upload of k8s configuration template into k8splugin
+- parametrization and creation of configuration instance from configuration template
+- validation of CNF status with Kotlin script
+- execution of the CNF healtcheck
+
+As K8S application is split into multiple Helm packages to match vf-modules, CBA modeling follows the same and for each vf-module there's own template in CBA package. The **Native Helm** approach, requires the Helm artifact names to star with *helm_* prefix, in the same way like names of artifacts in the MANIFEST file of VSP differs. The **Native Helm** artifacts' list is following:
+
+::
+
+ "artifacts" : {
+ "helm_base_template-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/base_template-template.vtl"
+ },
+ "helm_base_template-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Templates/base_template-mapping.json"
+ },
+ "helm_vpkg-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/vpkg-template.vtl"
+ },
+ "helm_vpkg-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Templates/vpkg-mapping.json"
+ },
+ "helm_vfw-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/vfw-template.vtl"
+ },
+ "helm_vfw-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Templates/vfw-mapping.json"
+ },
+ "vnf-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/vnf-template.vtl"
+ },
+ "vnf-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Templates/vnf-mapping.json"
+ },
+ "helm_vsn-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/vsn-template.vtl"
+ },
+ "helm_vsn-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Templates/vsn-mapping.json"
+ }
+ }
+
+SO requires for instantiation name of the profile in the parameter: *k8s-rb-profile-name* and name of the release of thr application: *k8s-rb-instance-release-name*. The latter one, when not specified, will be replaced with combination of profile name and vf-module-id for each Helm instance/vf-module instantiated. Both values can be found in vtl templates dedicated for vf-modules.
+
+CBA offers possibility of the automatic generation and upload to multicloud/k8s plugin the RB profile content. RB profile is required if you want to deploy your CNF into k8s namesapce other than *default*. Also, if you want to ensure particular templating of your Helm charts, specific to particular version of the cluster into which Helm packages will deployed on, profile is used to specify the version of your cluster.
+
+RB profile can be used to enrich or to modify the content of the original helm package. Profile can be also used to add additional k8s helm templates for helm installation or can be used to modify existing k8s helm templates for each create CNF instance. It opens another level of CNF customization, much more than customization of the Helm package with override values. K8splugin offers also *default* profile without content, for default namespace and default cluster version.
+
+::
+
+ ---
+ version: v1
+ type:
+ values: "override_values.yaml"
+ configresource:
+ - filepath: resources/deployment.yaml
+ chartpath: templates/deployment.yaml
+
+
+Above we have exemplary manifest file of the RB profile. Since Frankfurt *override_values.yaml* file does not need to be used as instantiation values are passed to the plugin over Instance API of k8s plugin. In the example, profile contains additional k8s Helm template which will be added on demand to the helm package during its installation. In our case, depending on the SO instantiation request input parameters, vPGN helm package can be enriched with additional ssh service. Such service will be dynamically added to the profile by CDS and later on CDS will upload whole custom RB profile to multicloud/k8s plugin.
+
+In order to support generation and upload of profile, our vFW CBA model has enhanced **resource-assignment** workflow which contains additional step: **profile-upload**. It leverages dedicated functionality introduced in Guilin release that can be used to upload predefined profile or to generate and upload content of the profile with Velocity templating mechanism.
+
+::
+
+ "resource-assignment": {
+ "steps": {
+ "resource-assignment": {
+ "description": "Resource Assign Workflow",
+ "target": "resource-assignment",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "profile-upload"
+ ]
+ },
+ "profile-upload": {
+ "description": "Generate and upload K8s Profile",
+ "target": "k8s-profile-upload",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ]
+ }
+ },
+
+.. note:: In the Frankfurt release profile upload was implemented as a custom Kotlin script included into the CBA. It was responsible for upload of K8S profile into multicloud/k8s plugin. It is still a good example of the integration of Kotlin scripting into the CBA. For those interested in this functionaliy we recommend to look into the `Frankfurt CBA Definition`_ and `Frankfurt CBA Script`_. Since Honolulu we introduce more advanced use of the Kotlin script for verification of the CNF status or custom resolution of complex parameters over Kotlin script - both can be found in the further part of the documentation.
+
+In our example for vPKG helm package we may select *vfw-cnf-cds-vpkg-profile* profile that is included into CBA as a folder. Profile generation step uses Velocity templates processing embedded CDS functionality on its basis ssh port number (specified in the SO request as *vpg-management-port*).
+
+::
+
+ {
+ "name": "vpg-management-port",
+ "property": {
+ "description": "The number of node port for ssh service of vpg",
+ "type": "integer",
+ "default": "0"
+ },
+ "input-param": false,
+ "dictionary-name": "vpg-management-port",
+ "dictionary-source": "default",
+ "dependencies": []
+ }
+
+*vpg-management-port* can be included directly into the helm template and such template will be included into vPKG helm package in time of its instantiation.
+
+::
+
+ apiVersion: v1
+ kind: Service
+ metadata:
+ name: {{ .Values.vpg_name_0 }}-ssh-access
+ labels:
+ vnf-name: {{ .Values.vnf_name }}
+ vf-module-name: {{ .Values.vpg_name_0 }}
+ release: {{ .Release.Name }}
+ chart: {{ .Chart.Name }}
+ spec:
+ type: NodePort
+ ports:
+ - port: 22
+ nodePort: ${vpg-management-port}
+ selector:
+ vf-module-name: {{ .Values.vpg_name_0 }}
+ release: {{ .Release.Name }}
+ chart: {{ .Chart.Name }}
+
+.. warning:: The port value is of Integer type and CDS resolves it as an integer. If the resolved values are returned to SO during the resource resolution phase they are being passed to k8splugin back only as a strings. In consequence, Integer values are passed to the Instantiation API as a strings and then they have go be converted in the helm template to the integer. In order to avoid such conversion it is better to customize override values with Integers in the profile and to skip return of this parameters in the resource resolution phase (they should not be included in the .vtl files).
+
+The mechanism of profile generation and upload requires specific node teamplate in the CBA definition. In our case, it comes with the declaration of two profiles: one static *vfw-cnf-cds-base-profile* in a form of an archive and the second complex *vfw-cnf-cds-vpkg-profile* in a form of a folder for processing and profile generation. Below is the example of the definition of node type for execution of the profile upload operation.
+
+::
+
+ "k8s-profile-upload": {
+ "type": "component-k8s-profile-upload",
+ "interfaces": {
+ "K8sProfileUploadComponent": {
+ "operations": {
+ "process": {
+ "inputs": {
+ "artifact-prefix-names": {
+ "get_input": "template-prefix"
+ },
+ "resource-assignment-map": {
+ "get_attribute": [
+ "resource-assignment",
+ "assignment-map"
+ ]
+ }
+ }
+ }
+ }
+ }
+ },
+ "artifacts": {
+ "vfw-cnf-cds-base-profile": {
+ "type": "artifact-k8sprofile-content",
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz"
+ },
+ "vfw-cnf-cds-vpkg-profile": {
+ "type": "artifact-k8sprofile-content",
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile"
+ },
+ "vfw-cnf-cds-vpkg-profile-mapping": {
+ "type": "artifact-mapping-resource",
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json"
+ }
+ }
+ }
+
+Artifact file determines a place of the static profile or the content of the complex profile. In the latter case we need a pair of profile folder and mapping file with a declaration of the parameters that CDS needs to resolve first, before the Velocity templating is applied to the .vtl files present in the profile content. After Velocity templating the .vtl extensions will be dropped from the file names. The embedded mechanism will include in the profile only files present in the profile MANIFEST file that needs to contain the list of final names of the files to be included into the profile. The figure below shows the idea of profile templating.
+
+.. figure:: files/vFW_CNF_CDS/profile-templating.png
+ :align: center
+
+ K8s Profile Templating
+
+SO requires for instantiation name of the profile in the parameter: *k8s-rb-profile-name*. The *component-k8s-profile-upload* that stands behind the profile uploading mechanism has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in our case their values are resolved on vf-module level resource assignment. The *component-k8s-profile-upload* inputs are following:
+
+- k8s-rb-definition-name [string] - (mandatory) the name under which RB definition was created - **VF Module Model Invariant ID** in ONAP
+- k8s-rb-definition-version [string] - (mandatory) the version of created RB definition name - **VF Module Model Customization ID** in ONAP
+- k8s-rb-profile-name [string] - (mandatory) the name of the profile under which it will be created in k8s plugin. Other parameters are required only when profile must be uploaded because it does not exist yet
+- k8s-rb-profile-source [string] - the source of profile content - name of the artifact of the profile. If missing *k8s-rb-profile-name* is treated as a source
+- k8s-rb-profile-namespace [string] - (mandatory) the k8s namespace name associated with profile being created
+- k8s-rb-profile-kubernetes-version [string] - the version of the cluster on which application will be deployed - it may impact the helm templating process like selection of the api versions for resources so it should match the version of k8s cluster in which resources are bing deployed.
+- k8s-rb-profile-labels [json] - the extra labels (label-name: label-value) to add for each k8s resource created for CNF in the k8s cluster (since Jakarta release).
+- k8s-rb-profile-extra-types [list<json>] - the list of extra k8s types that should be returned by StatusAPI. It may be usefull when k8s resources associated with CNF instance are created outside of the helm package (i.e. by k8s operator) but should be treated like resources of CNF. To make it hapens such resources should have the instance label *k8splugin.io/rb-instance-id* what may be assured by such tools like *kyverno*. Each extra type json object needs *Group*, *Version* and *Kind* attributes. (since Jakarta release).
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
+
+In the SO request user can pass parameter of name *k8s-rb-profile-name* which in our case may have value: *vfw-cnf-cds-base-profile*, *vfw-cnf-cds-vpkg-profile* or *default*. The *default* profile does not contain any content inside and allows instantiation of CNF without the need to define and upload any additional profiles. *vfw-cnf-cds-vpkg-profile* has been prepared to test instantiation of the second modified vFW CNF instance.
+
+K8splugin allows to specify override parameters (similar to --set behavior of helm client) to instantiated resource bundles. This allows for providing dynamic parameters to instantiated resources without the need to create new profiles for this purpose. This mechanism should be used with *default* profile but may be used also with any custom profile.
+
+The overall flow of helm overrides parameters processing is visible on following figure. When *rb definition* (helm package) is being instantiated for specified *rb profile* K8splugin combines override values from the helm package, *rb profile* and from the instantiation request - in the respective order. It means that the value from the instantiation request (SO request input or CDS resource assignment result) has a precedence over the value from the *rb profile* and value from the *rb profile* has a precedence over the helm package default override value. Similarly, profile can contain resource files that may extend or ammend the existing files for the original helm package content.
+
+.. figure:: files/vFW_CNF_CDS/helm-overrides.png
+ :align: center
+
+ The overall flow of helm data processing
+
+Both profile content (4) like the instantiation request values (5) can be generated during the resource assignment process according to its definition for CBA associated with helm package. CBA may generate i.e. names, IP addresses, ports and can use this information to produce the *rb-profile* (3) content. Finally, all three sources of override values, temnplates and additional resources files are merged together (6) by K8splugin in the order exaplained before.
+
+.. figure:: files/vFW_CNF_CDS/helm-overrides-steps.png
+ :align: center
+
+ The steps of processing of helm data with help of CDS
+
+Both profile content (4) like the instantiation request values (5) can be generated during the resource assignment process according to its definition for CBA associated with helm package. CBA may generate i.e. names, IP addresses, ports and can use this information to produce the *rb-profile* (3) content. Finally, all three sources of override values, temnplates and additional resources files are merged together (6) by K8splugin in the order exaplained before.
+
+Besides the deployment of Helm application the CBA of vFW demonstrates also how to use deicated features for config-assign (7) and config-deploy (8) operations. In the use case, *config-assign* and *config-deploy* operations deal mainly with creation and instantiation of configuration template for k8s plugin. The configuration template has a form of Helm package. When k8s plugin instantiates configuration, it creates or may replace existing resources deployed on k8s cluster. In our case the configuration template is used to provide alternative way of upload of the additional ssh-service but it coud be used to modify configmap of vfw or vpkg vf-modules.
+
+In order to provide configuration instantiation capability standard *config-assign* and *config-deploy* workflows have been changed into imperative workflows with first step responsible for collection of information for configuration templating and configuration instantiation. The source of data for this operations is AAI, MDSAL with data for vnf and vf-modules as *config-assign* and *config-deploy* does not receive dedicated input parameters from SO. In consequence both operations need to source from *resource-assignment* phase and data placed in the AAI and MDSAL.
+
+vFW CNF *config-assign* workflow is following:
+
+::
+
+ "config-assign": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config template upload",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-template"
+ ]
+ },
+ "config-template": {
+ "description": "Generate and upload K8s config template",
+ "target": "k8s-config-template",
+ "activities": [
+ {
+ "call_operation": "K8sConfigTemplateComponent.process"
+ }
+ ]
+ }
+ },
+
+vFW CNF *config-deploy* workflow is following:
+
+::
+
+ "config-deploy": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config init and status verification",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-apply"
+ ]
+ },
+ "config-apply": {
+ "description": "Activate K8s config template",
+ "target": "k8s-config-apply",
+ "activities": [
+ {
+ "call_operation": "K8sConfigTemplateComponent.process"
+ }
+ ],
+ "on_success": [
+ "status-verification-script"
+ ]
+ },
+
+
+In our example configuration template for vFW CNF is a helm package that contains the same resource that we can find in the vPKG *vfw-cnf-cds-vpkg-profile* profile - extra ssh service. This helm package contains Helm encapsulation for ssh-service and the values.yaml file with declaration of all the inputs that may parametrize the ssh-service. The configuration templating step leverages the *component-k8s-config-template* component that prepares the configuration template and uploads it to k8splugin. In consequence, it may be used later on for instatiation of the configuration.
+
+In this use case we have two options with *ssh-service-config* and *ssh-service-config-customizable* as a source of the same configuration template. In consequence, or we take a complete template or we have have the template folder with the content of the helm package and CDS may perform dedicated resource resolution for it with templating of all the files with .vtl extensions. The process is very similar to the one describe for profile upload functionality.
+
+::
+
+ "k8s-config-template": {
+ "type": "component-k8s-config-template",
+ "interfaces": {
+ "K8sConfigTemplateComponent": {
+ "operations": {
+ "process": {
+ "inputs": {
+ "artifact-prefix-names": [
+ "helm_vpkg"
+ ],
+ "resource-assignment-map": {
+ "get_attribute": [
+ "config-setup-process",
+ "",
+ "assignment-map",
+ "config-deploy",
+ "config-deploy-setup"
+ ]
+ }
+ }
+ }
+ }
+ }
+ },
+ "artifacts": {
+ "ssh-service-config": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service.tar.gz"
+ },
+ "ssh-service-config-customizable": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-config"
+ },
+ "ssh-service-config-customizable-mapping": {
+ "type": "artifact-mapping-resource",
+ "file": "Templates/k8s-configs/ssh-service-config/ssh-service-mapping.json"
+ }
+ }
+ }
+
+The *component-k8s-config-template* that stands behind creation of configuration template has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in vFW CNF use case their values are resolved on vf-module level dedicated for *config-assign* and *config-deploy* resource assignment step. The *component-k8s-config-template* inputs are following:
+
+- k8s-rb-definition-name [string] - (mandatory) the name under which RB definition was created - **VF Module Model Invariant ID** in ONAP
+- k8s-rb-definition-version [string] - (mandatory) the version of created RB definition name - **VF Module Model Customization ID** in ONAP
+- k8s-rb-config-template-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-template-source [string] - the source of config template content - name of the artifact of the configuration template. When missing, the main definition helm package will be used as a configuration template source (since Jakarta release).
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
+
+In our case the *component-k8s-config-template* component receives all the inputs from the dedicated resource-assignment process *config-setup* that is responsible for resolution of all the inputs for configuration templating. This process generates data for *helm_vpkg* prefix and such one is specified in the list of prefixes of the configuration template component. It means that configuration template will be prepared only for vPKG function.
+
+::
+
+ "k8s-config-apply": {
+ "type": "component-k8s-config-value",
+ "interfaces": {
+ "K8sConfigValueComponent": {
+ "operations": {
+ "process": {
+ "inputs": {
+ "artifact-prefix-names": [
+ "helm_vpkg"
+ ],
+ "k8s-config-operation-type": "create",
+ "resource-assignment-map": {
+ "get_attribute": [
+ "config-setup-process",
+ "",
+ "assignment-map",
+ "config-deploy",
+ "config-deploy-setup"
+ ]
+ }
+ }
+ }
+ }
+ }
+ },
+ "artifacts": {
+ "ssh-service-default": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-config/values.yaml"
+ },
+ "ssh-service-config": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-values/values.yaml.vtl"
+ },
+ "ssh-service-config-mapping": {
+ "type": "artifact-mapping-resource",
+ "file": "Templates/k8s-configs/ssh-service-values/ssh-service-mapping.json"
+ }
+ }
+ }
+
+
+The *component-k8s-config-value* that stands behind creation of configuration instance has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in vFW CNF use case their values are resolved on vf-module level dedicated for *config-assign* and *config-deploy*'s' resource-assignment step. The *component-k8s-config-value* inputs are following:
+
+- k8s-rb-config-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-template-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-value-source [string] - the source of config template content - name of the artifact of the configuration template. If missing *k8s-rb-config-name* is treated as a source
+- k8s-rb-config-version [string] - the version of the configuration to restore during the *rollback* operation. First configuratino after *create* has version *1* and new ones, after *update* will have version of the following numbers. When *rollback* operation is performed all previous versions on the path to the desired one are being restored one, by one. (since Jakarta)
+- k8s-instance-id [string] - (mandatory) the identifier of the rb instance for which the configuration should be applied
+- k8s-config-operation-type [string] - the type of the configuration operation to perform: *create*, *update*, *rollback*, *delete* or *delete_config*. By default *create* operation is performed. *rollback* and *delete_config* types are present since Jakarta release. The *update* operation creates new version of the configuration. *delete* operation creates also new version of configuratino that deletes all the resources in k8s from the cluster. *delete_config* operation aims to delete configuration entirely but it does not delete or update any resources associated with the configuration.
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
+
+Like for the configuration template, the *component-k8s-config-value* component receives all the inputs from the dedicated resource-assignment process *config-setup* that is responsible for resolution of all the inputs for configuration. This process generates data for *helm_vpkg* prefix and such one is specified in the list of prefixes of the configuration values component. It means that configuration instance will be created only for vPKG function (component allows also update or delete of the configuration but in the vFW CNF case it is used only to create configuration instance).
+
+Finally, `Data Dictionary`_ is also included into demo git directory, re-modeling and making changes into model utilizing CDS model time / runtime is easier as used DD is also known.
+
+.. note:: CBA of vFW CNF use case is already enriched and VSP of vFW CNF has CBA included inside. In conequence, when VSP is being onboarded into SDC and service is being distributed, CBA is uploaded into CDS. Anyway, CDS contains in the starter dictionary all data dictionary values used in the use case and enrichment of CBA should work as well.
+
+Instantiation Overview
+----------------------
+
+.. note:: Since Guilin release use case is equipped with automated method **<AUTOMATED>** with python scripts to replace Postman method **<MANUAL>** used in Frankfurt. Nevertheless, Postman collection is good to understand the entire process. If a user selects to follow Postman collection, then automation scripts **must not** be used. **For the entire process use only scripts or only Postman collection**. Both options are described in the further steps of this instruction.
+
+The figure below shows all the interactions that take place during vFW CNF instantiation. It's not describing flow of actions (ordered steps) but rather component dependencies.
+
+.. figure:: files/vFW_CNF_CDS/Instantiation_topology.png
+ :align: center
+
+ vFW CNF CDS Use Case Runtime interactions.
+
+PART 1 - ONAP Installation
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+1-1 Deployment components
+.........................
+
+In order to run the vFW_CNF_CDS use case, we need ONAP Jakarta Release (or later) with at least following components:
+
+======================================================= ===========
+ONAP Component name Describtion
+------------------------------------------------------- -----------
+AAI Required for Inventory Cloud Owner, Customer, Owning Entity, Service, Generic VNF, VF Module
+SDC VSP, VF and Service Modeling of the CNF
+DMAAP Distribution of the onboarding package including CBA to all ONAP components
+SO Required for Macro Orchestration using the generic building blocks
+CDS Resolution of cloud parameters including Helm override parameters for the CNF. Creation of the multicloud/k8s profile for CNF instantion. Creation of configuration template and its instantiation
+SDNC (needs to include netbox and Naming Generation mS) Provides GENERIC-RESOURCE-API for cloud Instantiation orchestration via CDS.
+Policy Used to Store Naming Policy
+AAF Used for Authentication and Authorization of requests
+Portal Required to access SDC.
+MSB Exposes multicloud interfaces used by SO.
+Multicloud K8S plugin part used to pass SO instantiation requests to external Kubernetes cloud region.
+Contrib Chart containing multiple external components. Out of those, we only use Netbox utility in this use-case for IPAM
+Robot Optional. Can be used for running automated tasks, like provisioning cloud customer, cloud region, service subscription, etc ..
+Shared Cassandra DB Used as a shared storage for ONAP components that rely on Cassandra DB, like AAI
+Shared Maria DB Used as a shared storage for ONAP components that rely on Maria DB, like SDNC, and SO
+======================================================= ===========
+
+1-2 Deployment
+..............
+
+In order to deploy such an instance, follow the `ONAP Deployment Guide`_
+
+As we can see from the guide, we can use an override file that helps us customize our ONAP deployment, without modifying the OOM Folder, so you can download this override file here, that includes the necessary components mentioned above.
+
+**override.yaml** file where enabled: true is set for each component needed in demo (by default all components are disabled).
+
+::
+
+ aai:
+ enabled: true
+ aaf:
+ enabled: true
+ cassandra:
+ enabled: true
+ cds:
+ enabled: true
+ contrib:
+ enabled: true
+ dmaap:
+ enabled: true
+ mariadb-galera:
+ enabled: true
+ msb:
+ enabled: true
+ multicloud:
+ enabled: true
+ policy:
+ enabled: true
+ portal:
+ enabled: true
+ robot:
+ enabled: true
+ sdc:
+ enabled: true
+ sdnc:
+ enabled: true
+ so:
+ enabled: true
+
+Then deploy ONAP with Helm with your override file.
+
+::
+
+ helm deploy onap local/onap --namespace onap -f ~/override.yaml
+
+In case redeployment needed `Helm Healer`_ could be a faster and convenient way to redeploy.
+
+::
+
+ helm-healer.sh -n onap -f ~/override.yaml -s /dockerdata-nfs --delete-all
+
+Or redeploy (clean re-deploy also data removed) just wanted components (Helm releases), cds in this example.
+
+::
+
+ helm-healer.sh -f ~/override.yaml -s /dockerdata-nfs/ -n onap -c onap-cds
+
+There are many instructions in ONAP wiki how to follow your deployment status and does it succeeded or not, mostly using Robot Health checks. One way we used is to skip the outermost Robot wrapper and use directly ete-k8s.sh to able to select checked components easily. Script is found from OOM git repository *oom/kubernetes/robot/ete-k8s.sh*.
+
+::
+
+ {
+ failed=
+ for comp in {aaf,aai,dmaap,msb,multicloud,policy,portal,sdc,sdnc,so}; do
+ if ! ./ete-k8s.sh onap health-$comp; then
+ failed=$failed,$comp
+ fi
+ done
+ if [ -n "$failed" ]; then
+ echo "These components failed: $failed"
+ false
+ else
+ echo "Healthcheck successful"
+ fi
+ }
+
+And check status of pods, deployments, jobs etc.
+
+::
+
+ kubectl -n onap get pods | grep -vie 'completed' -e 'running'
+ kubectl -n onap get deploy,sts,jobs
+
+
+1-3 Post Deployment
+...................
+
+After completing the first part above, we should have a functional ONAP deployment for the Jakarta Release.
+
+We will need to apply a few modifications to the deployed ONAP Jakarta instance in order to run the use case.
+
+Retrieving logins and passwords of ONAP components
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Since Frankfurt release hardcoded passwords were mostly removed and it is possible to configure passwords of ONAP components in time of their installation. In order to retrieve these passwords with associated logins it is required to get them with kubectl. Below is the procedure on mariadb-galera DB component example.
+
+::
+
+ kubectl get secret `kubectl get secrets | grep mariadb-galera-db-root-password | awk '{print $1}'` -o jsonpath="{.data.login}" | base64 --decode
+ kubectl get secret `kubectl get secrets | grep mariadb-galera-db-root-password | awk '{print $1}'` -o jsonpath="{.data.password}" | base64 --decode
+
+In this case login is empty as the secret is dedicated to root user.
+
+
+Postman collection setup
+++++++++++++++++++++++++
+
+In this demo we have on purpose created all manual ONAP preparation steps (which in real life are automated) by using Postman so it will be clear what exactly is needed. Some of the steps like AAI population is automated by Robot scripts in other ONAP demos (**./demo-k8s.sh onap init**) and Robot script could be used for many parts also in this demo.
+
+Postman collection is used also to trigger instantiation using SO APIs.
+
+Following steps are needed to setup Postman:
+
+- Import this Postman collection zip
+
+ :download:`Postman collection <files/vFW_CNF_CDS/postman.zip>`
+
+- Extract the zip and import Postman collection into Postman. Environment file is provided for reference, it's better to create own environment on your own providing variables as listed in next chapter.
+ - `vFW_CNF_CDS.postman_collection.json`
+ - `vFW_CNF_CDS.postman_environment.json`
+
+- For use case debugging purposes to get Kubernetes cluster external access to SO CatalogDB (GET operations only), modify SO CatalogDB service to NodePort instead of ClusterIP. You may also create separate own NodePort if you wish, but here we have just edited directly the service with kubectl.
+
+::
+
+ kubectl -n onap edit svc so-catalog-db-adapter
+ - .spec.type: ClusterIP
+ + .spec.type: NodePort
+ + .spec.ports[0].nodePort: 30120
+
+.. note:: The port number 30120 is used in included Postman collection
+
+- You may also want to inspect after SDC distribution if CBA has been correctly delivered to CDS. In order to do it, there are created relevant calls later described in doc, however CDS since Frankfurt doesn't expose blueprints-processor's service as NodePort. This is OPTIONAL but if you'd like to use these calls later, you need to expose service in similar way as so-catalog-db-adapter above:
+
+::
+
+ kubectl edit -n onap svc cds-blueprints-processor-http
+ - .spec.type: ClusterIP
+ + .spec.type: NodePort
+ + .spec.ports[0].nodePort: 30499
+
+.. note:: The port number 30499 is used in included Postman collection
+
+**Postman variables:**
+
+Most of the Postman variables are automated by Postman scripts and environment file provided, but there are few mandatory variables to fill by user.
+
+===================== ===================
+Variable Description
+--------------------- -------------------
+k8s ONAP Kubernetes host
+sdnc_port port of sdnc service for accessing MDSAL
+service-name name of service as defined in SDC
+service-version version of service defined in SDC (if service wasn't updated, it should be set to "1.0")
+service-instance-name name of instantiated service (if ending with -{num}, will be autoincremented for each instantiation request)
+===================== ===================
+
+You can get the sdnc_port value with
+
+::
+
+ kubectl -n onap get svc sdnc -o json | jq '.spec.ports[]|select(.port==8282).nodePort'
+
+Automation Environment Setup
+............................
+
+Whole content of this use case is stored into single git repository and it contains both the required onboarding information as well as automation scripts for onboarding and instantiation of the use case.
+
+::
+
+ git clone --single-branch --branch jakarta "https://gerrit.onap.org/r/demo"
+ cd demo/heat/vFW_CNF_CDS/templates
+
+In order to prepare environment for onboarding and instantiation of the use case make sure you have *git*, *make*, *helm* and *pipenv* applications installed.
+
+The automation scripts are based on `Python SDK`_ and are adopted to automate process of service onboarding, instantiation, deletion and cloud region registration. To configure them for further use:
+
+::
+
+ cd demo/heat/vFW_CNF_CDS/automation
+
+1. Install required packages with
+::
+
+ pipenv pipenv install
+
+2. Run virtual python environment
+::
+
+ pipenv shell --fancy
+
+3. Add kubeconfig files, one for ONAP cluster, and one for k8s cluster that will host vFW
+
+.. note:: Both files can be configured after creation of k8s cluster for vFW instance `2-1 Installation of Managed Kubernetes`_. Make sure that they have configured external IP address properly. If any cluster uses self signed certificates set also *insecure-skip-tls-verify* flag in the config file.
+
+- artifacts/cluster_kubeconfig - IP address must be reachable by ONAP pods, especially *mutlicloud-k8s* pod
+
+- artifacts/onap_kubeconfig - IP address must be reachable by automation scripts
+
+4. Modify config.py file
+
+- SCENARIO - like described in the `The vFW CNF Use Case`_ section
+- NATIVE - when enabled (default) **Native Helm** path will be used, otherwise **Dummy Heat** path will be used (deprecated)
+- MACRO_INSTANTIATION - instantiation method used: macro (default) or a'la carte. A'la carte only for the purpose of use with other use cases
+- K8S_NAMESPACE - k8s namespace to use for deployment of CNF (vfirewall by default)
+- K8S_VERSION - version of the k8s cluster
+- K8S_REGION - name of the k8s region from the CLOUD_REGIONS (kud by default)
+- CLOUD_REGIONS - configuration of k8s or Openstack regions
+- GLOBAL_CUSTOMER_ID - identifier of customer in ONAP
+- VENDOR - name of the Vendor in ONAP
+- SERVICENAME - **Name of your service model in SDC**
+- SKIP_POST_INSTANTIATION - whether post instantiation configuration should be run (it is set indirectly by *SCENARIO*)
+- VNF_PARAM_LIST - list of parameters to pass for VNF creation process
+- VF_MODULE_PARAM_LIST - list of parameters to pass for VF Module creation
+
+.. note:: For automation script it is necessary to modify only SCENARIO constant. Other constants may be modified if needed.
+
+AAI
+...
+
+Some basic entries are needed in ONAP AAI. These entries are needed ones per onap installation and do not need to be repeated when running multiple demos based on same definitions.
+
+Create all these entries into AAI in this order. Postman collection provided in this demo can be used for creating each entry.
+
+**<MANUAL>**
+::
+
+ Postman -> Initial ONAP setup -> Create
+
+- Create Customer
+- Create Owning-entity
+- Create Platform
+- Create Project
+- Create Line Of Business
+
+Corresponding GET operations in "Check" folder in Postman can be used to verify entries created. Postman collection also includes some code that tests/verifies some basic issues e.g. gives error if entry already exists.
+
+**<AUTOMATED>**
+
+This step is performed jointly with onboarding step `3-2 Onboarding`_
+
+Naming Policy
++++++++++++++
+
+Naming policy is needed to generate unique names for all instance time resources that are wanted to be modeled in the way naming policy is used. Those are normally VNF, VNFC and VF-module names, network names etc. Naming is general ONAP feature and not limited to this use case.
+
+This usecase leverages default ONAP naming policy - "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP".
+To check that the naming policy is created and pushed OK, we can run the command below from inside any ONAP pod.
+
+::
+
+ curl --silent -k --user 'healthcheck:zb!XztG34' -X GET "https://policy-api:6969/policy/api/v1/policytypes/onap.policies.Naming/versions/1.0.0/policies/SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP/versions/1.0.0"
+
+.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
+
+PART 2 - Installation of managed Kubernetes cluster
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this demo the target cloud region is a Kubernetes cluster of your choice basically just like with Openstack. ONAP platform is a bit too much hard wired to Openstack and it's visible in many demos.
+
+2-1 Installation of Managed Kubernetes
+......................................
+
+In this demo we use Kubernetes deployment used by ONAP multicloud/k8s team to test their plugin features see `KUD github`_. There's also some outdated instructions in ONAP wiki `KUD in Wiki`_.
+
+KUD deployment is fully automated and also used in ONAP's CI/CD to automatically verify all `Multicloud k8s gerrit`_ commits (see `KUD Jenkins ci/cd verification`_) and that's quite good (and rare) level of automated integration testing in ONAP. KUD deployemnt is used as it's installation is automated and it also includes bunch of Kubernetes plugins used to tests various k8s plugin features. In addition to deployement, KUD repository also contains test scripts to automatically test multicloud/k8s plugin features. Those scripts are run in CI/CD.
+
+See `KUD subproject in github`_ for a list of additional plugins this Kubernetes deployment has. In this demo the tested CNF is dependent on following plugins:
+
+- ovn4nfv
+- Multus
+- Virtlet
+
+Follow instructions in `KUD github`_ and install target Kubernetes cluster in your favorite machine(s), simplest being just one machine. Your cluster nodes(s) needs to be accessible from ONAP Kuberenetes nodes. Make sure your installed *pip* is of **version < 21.0**. Version 21 do not support python 2.7 that is used in *aio.sh* script. Also to avoid performance problems of your k8s cluster make sure you install only necessary plugins and before running *aio.sh* script execute following command
+::
+
+ export KUD_ADDONS="virtlet ovn4nfv"
+
+.. warning:: In order to run vFW CNF Use Case deployment test please make sure that this workaround does not have to be applied as well. `KUD Interface Permission`_
+
+2-2 Cloud Registration
+......................
+
+Managed Kubernetes cluster is registered here into ONAP as one cloud region. This obviously is done just one time for this particular cloud. Cloud registration information is kept in AAI.
+
+**<MANUAL>**
+
+Postman collection have folder/entry for each step. Execute in this order.
+::
+
+ Postman -> K8s Cloud Region Registration -> Create
+
+- Create Complex
+- Create Cloud Region
+- Create Complex-Cloud Region Relationship
+- Create Service
+- Create Service Subscription
+- Create Cloud Tenant
+- Create Availability Zone
+- Upload Connectivity Info
+
+.. note:: For "Upload Connectivity Info" call you need to provide kubeconfig file of existing KUD cluster. You can find that kubeconfig on deployed KUD in the directory `~/.kube/config` and this file can be easily copied e.g. via SCP. Please ensure that kubeconfig contains external IP of K8s cluster in kubeconfig and correct it, if it's not.
+
+SO database needs to be (manually) modified for SO to know that this particular cloud region is to be handled by multicloud. Values we insert needs to obviously match to the ones we populated into AAI.
+
+.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
+
+::
+
+ kubectl -n onap exec onap-mariadb-galera-0 -it -- mysql -uroot -psecretpassword -D catalogdb
+ select * from cloud_sites;
+ insert into cloud_sites(ID, REGION_ID, IDENTITY_SERVICE_ID, CLOUD_VERSION, CLLI, ORCHESTRATOR) values("k8sregionfour", "k8sregionfour", "DEFAULT_KEYSTONE", "2.5", "clli2", "multicloud");
+ select * from cloud_sites;
+ exit
+
+.. note:: The configuration of the new k8s cloud site is documented also here `K8s cloud site config`_
+
+**<AUTOMATED>**
+
+Please copy the kubeconfig file of existing KUD cluster to automation/artifacts/cluster_kubeconfig location `Automation Environment Setup`_ - step **3**. You can find that kubeconfig on deployed KUD in the directory `~/.kube/config` and this file can be easily copied e.g. via SCP. Please ensure that kubeconfig contains external IP of K8s cluster in kubeconfig and correct it, if it's not.
+
+::
+
+ python create_cloud_regions.py
+
+PART 3 - Execution of the Use Case
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This part contains all the steps to run the use case by using ONAP GUIs, Postman or Python automation scripts.
+
+3-1 CNF Orchestration Paths in ONAP
+...................................
+
+Following picture describe the overall sequential flow of the use case for **Native Helm** path (with CNF Adapter)
+
+Native Helm CNF Orchestration
+.............................
+
+Introduced in the Guilin release CNF orchestration method brings native distribution of Helm packages from SDC and native orchestration of CNFs (Helm packages) with SO. SO leverages CNF adapter to interact with K8sPlugin that takes resposnibility for the communication with k8s clusters. Heat templates are not required in the SDC onboarding package and, thanks to the fact that SO knows about Helm package orchestration, synchronization of data between k8s clusters and AAI is possible. Only in this path, since Istanbul release, k8s-resource object is created in relation to tenant, vf-module and generic-vnf objects in AAI. SO CNF adapter is resposobile for synchronization of data between AAI and k8s cluster, however currently it happens only once - after creation of CNF by SO, so any further changes (like new pods) will not be synchronized into AAI.
+
+.. figure:: files/vFW_CNF_CDS/Native_Helm_Flow.png
+ :align: center
+
+ vFW CNF CDS Use Case sequence flow for *Native Helm* (Guilin+) path.
+
+
+Kubernetes and Helm Compatibility
+.................................
+
+K8sPlugin, in the Istanbul release (0.9.x), supports Helm packages that can be validated by Helm 3.5 application. It means that new Helm fetures introduced after Helm 3.5 version are not supported currently. Moreover, since Jakarta release K8sPlugin 0.10.x implementation supports upgrade operation but CNF Upgrade orchestration workflow is not yet fully supported in SO orchestration workflows. In consequence, new service moel cna e distributed with new Helm package over SDC but the Helm upgrade procedure must be performed by direct call to k8sPlugin. The request payload is almost the same liek for Isnatce create but release-name comes for the already created instance.
+
+::
+
+ curl -i -X POST http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/upgrade
+
+K8sPlugin utilizes also v0.19.4 version of K8s client and its compatibility matrix with k8s clusters can be found here `K8s Client Compatibility`_, Compatibility Matrix section.
+
+3-2 Onboarding
+..............
+
+.. note:: Make sure you have performed `Automation Environment Setup`_ steps before following actions here.
+
+Creating Onboarding Package
++++++++++++++++++++++++++++
+
+Content of the onboarding package can be created with provided Makefile in the *template* folder.
+
+Complete content of both Onboarding Packages for **Dummy Heat** and **Native Helm** is packaged to the following VSP onboarding package files:
+
+- **Dummy Heat** path: **vfw_k8s_demo.zip**
+
+- **Native Helm** path: **native_vfw_k8s_demo.zip**
+
+.. note:: Procedure requires *make* and *helm* applications installed
+
+::
+
+ git clone --single-branch --branch jakarta "https://gerrit.onap.org/r/demo"
+ cd demo/heat/vFW_CNF_CDS/templates
+ make
+
+The result of make operation execution is following:
+::
+
+ make clean
+ make[1]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ rm -rf package_dummy/
+ rm -rf package_native/
+ rm -rf cba_dummy
+ rm -f vfw_k8s_demo.zip
+ rm -f native_vfw_k8s_demo.zip
+ make[1]: Leaving directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ make all
+ make[1]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ mkdir package_dummy/
+ mkdir package_native/
+ make -C helm
+ make[2]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm'
+ rm -f base_template-*.tgz
+ rm -f helm_base_template.tgz
+ rm -f base_template_cloudtech_k8s_charts.tgz
+ helm package base_template
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/base_template-0.2.0.tgz
+ mv base_template-*.tgz helm_base_template.tgz
+ cp helm_base_template.tgz base_template_cloudtech_k8s_charts.tgz
+ rm -f vpkg-*.tgz
+ rm -f helm_vpkg.tgz
+ rm -f vpkg_cloudtech_k8s_charts.tgz
+ helm package vpkg
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vpkg-0.2.0.tgz
+ mv vpkg-*.tgz helm_vpkg.tgz
+ cp helm_vpkg.tgz vpkg_cloudtech_k8s_charts.tgz
+ rm -f vfw-*.tgz
+ rm -f helm_vfw.tgz
+ rm -f vfw_cloudtech_k8s_charts.tgz
+ helm package vfw
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vfw-0.2.0.tgz
+ mv vfw-*.tgz helm_vfw.tgz
+ cp helm_vfw.tgz vfw_cloudtech_k8s_charts.tgz
+ rm -f vsn-*.tgz
+ rm -f helm_vsn.tgz
+ rm -f vsn_cloudtech_k8s_charts.tgz
+ helm package vsn
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vsn-0.2.0.tgz
+ mv vsn-*.tgz helm_vsn.tgz
+ cp helm_vsn.tgz vsn_cloudtech_k8s_charts.tgz
+ make[2]: Leaving directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm'
+ mv helm/helm_*.tgz package_native/
+ mv helm/*.tgz package_dummy/
+ cp base_dummy/* package_dummy/
+ cp base_native/* package_native/
+ cp -r cba cba_dummy
+ sed -i 's/"helm_/"/g' cba_dummy/Definitions/vFW_CNF_CDS.json
+ cd cba_dummy/ && zip -r CBA.zip . -x pom.xml .idea/\* target/\*
+ adding: Definitions/ (stored 0%)
+ adding: Definitions/artifact_types.json (deflated 69%)
+ adding: Definitions/data_types.json (deflated 88%)
+ adding: Definitions/node_types.json (deflated 90%)
+ adding: Definitions/policy_types.json (stored 0%)
+ adding: Definitions/relationship_types.json (stored 0%)
+ adding: Definitions/resources_definition_types.json (deflated 94%)
+ adding: Definitions/vFW_CNF_CDS.json (deflated 87%)
+ adding: Scripts/ (stored 0%)
+ adding: Scripts/kotlin/ (stored 0%)
+ adding: Scripts/kotlin/README.md (stored 0%)
+ adding: Templates/ (stored 0%)
+ adding: Templates/base_template-mapping.json (deflated 89%)
+ adding: Templates/base_template-template.vtl (deflated 87%)
+ adding: Templates/k8s-profiles/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/manifest.yaml (deflated 35%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/override_values.yaml (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json (deflated 51%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-template.yaml.vtl (deflated 56%)
+ adding: Templates/nf-params-mapping.json (deflated 88%)
+ adding: Templates/nf-params-template.vtl (deflated 44%)
+ adding: Templates/vfw-mapping.json (deflated 89%)
+ adding: Templates/vfw-template.vtl (deflated 87%)
+ adding: Templates/vnf-mapping.json (deflated 89%)
+ adding: Templates/vnf-template.vtl (deflated 93%)
+ adding: Templates/vpkg-mapping.json (deflated 89%)
+ adding: Templates/vpkg-template.vtl (deflated 87%)
+ adding: Templates/vsn-mapping.json (deflated 89%)
+ adding: Templates/vsn-template.vtl (deflated 87%)
+ adding: TOSCA-Metadata/ (stored 0%)
+ adding: TOSCA-Metadata/TOSCA.meta (deflated 37%)
+ cd cba/ && zip -r CBA.zip . -x pom.xml .idea/\* target/\*
+ adding: Definitions/ (stored 0%)
+ adding: Definitions/artifact_types.json (deflated 69%)
+ adding: Definitions/data_types.json (deflated 88%)
+ adding: Definitions/node_types.json (deflated 90%)
+ adding: Definitions/policy_types.json (stored 0%)
+ adding: Definitions/relationship_types.json (stored 0%)
+ adding: Definitions/resources_definition_types.json (deflated 94%)
+ adding: Definitions/vFW_CNF_CDS.json (deflated 87%)
+ adding: Scripts/ (stored 0%)
+ adding: Scripts/kotlin/ (stored 0%)
+ adding: Scripts/kotlin/README.md (stored 0%)
+ adding: Templates/ (stored 0%)
+ adding: Templates/base_template-mapping.json (deflated 89%)
+ adding: Templates/base_template-template.vtl (deflated 87%)
+ adding: Templates/k8s-profiles/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/manifest.yaml (deflated 35%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/override_values.yaml (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json (deflated 51%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-template.yaml.vtl (deflated 56%)
+ adding: Templates/nf-params-mapping.json (deflated 88%)
+ adding: Templates/nf-params-template.vtl (deflated 44%)
+ adding: Templates/vfw-mapping.json (deflated 89%)
+ adding: Templates/vfw-template.vtl (deflated 87%)
+ adding: Templates/vnf-mapping.json (deflated 89%)
+ adding: Templates/vnf-template.vtl (deflated 93%)
+ adding: Templates/vpkg-mapping.json (deflated 89%)
+ adding: Templates/vpkg-template.vtl (deflated 87%)
+ adding: Templates/vsn-mapping.json (deflated 89%)
+ adding: Templates/vsn-template.vtl (deflated 87%)
+ adding: TOSCA-Metadata/ (stored 0%)
+ adding: TOSCA-Metadata/TOSCA.meta (deflated 37%)
+ mv cba/CBA.zip package_native/
+ mv cba_dummy/CBA.zip package_dummy/
+ cd package_dummy/ && zip -r vfw_k8s_demo.zip .
+ adding: base_template.env (deflated 22%)
+ adding: base_template.yaml (deflated 59%)
+ adding: base_template_cloudtech_k8s_charts.tgz (stored 0%)
+ adding: CBA.zip (stored 0%)
+ adding: MANIFEST.json (deflated 84%)
+ adding: vfw.env (deflated 23%)
+ adding: vfw.yaml (deflated 60%)
+ adding: vfw_cloudtech_k8s_charts.tgz (stored 0%)
+ adding: vpkg.env (deflated 13%)
+ adding: vpkg.yaml (deflated 59%)
+ adding: vpkg_cloudtech_k8s_charts.tgz (stored 0%)
+ adding: vsn.env (deflated 15%)
+ adding: vsn.yaml (deflated 59%)
+ adding: vsn_cloudtech_k8s_charts.tgz (stored 0%)
+ cd package_native/ && zip -r native_vfw_k8s_demo.zip .
+ adding: CBA.zip (stored 0%)
+ adding: helm_base_template.tgz (stored 0%)
+ adding: helm_vfw.tgz (stored 0%)
+ adding: helm_vpkg.tgz (stored 0%)
+ adding: helm_vsn.tgz (stored 0%)
+ adding: MANIFEST.json (deflated 71%)
+ mv package_dummy/vfw_k8s_demo.zip .
+ mv package_native/native_vfw_k8s_demo.zip .
+ $
+
+Import this package into SDC and follow onboarding steps.
+
+Service Creation with SDC
++++++++++++++++++++++++++
+
+**<MANUAL>**
+
+Service Creation in SDC is composed of the same steps that are performed by most other use-cases. For reference, you can look at `vLB use-case`_
+
+Onboard VSP
+
+- Remember during VSP onboard to choose "Network Package" Onboarding procedure
+
+Create VF and Service
+Service -> Properties Assignment -> Choose VF (at right box):
+
+- sdnc_artifact_name - vnf
+- sdnc_model_name - vFW_CNF_CDS
+- sdnc_model_version - 8.0.0
+- skip_post_instantiation_configuration - True
+
+.. note:: Since Honolulu skip_post_instantiation_configuration flag can be set to *False* if we want to run config-assign/config-deploy operations.
+
+::
+
+ python onboarding.py
+
+Distribution Of Service
++++++++++++++++++++++++
+
+**<MANUAL>**
+
+Distribute service.
+
+Verify in SDC UI if distribution was successful. In case of any errors (sometimes SO fails on accepting CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT), try redistribution. You can also verify distribution for few components manually:
+
+- SDC:
+
+ SDC Catalog database should have our service now defined.
+
+ ::
+
+ Postman -> LCM -> [SDC] Catalog Service
+
+ ::
+
+ {
+ "uuid": "64dd38f3-2307-4e0a-bc98-5c2cbfb260b6",
+ "invariantUUID": "cd1a5c2d-2d4e-4d62-ac10-a5fe05e32a22",
+ "name": "vfw_cnf_cds_svc",
+ "version": "1.0",
+ "toscaModelURL": "/sdc/v1/catalog/services/64dd38f3-2307-4e0a-bc98-5c2cbfb260b6/toscaModel",
+ "category": "Network L4+",
+ "lifecycleState": "CERTIFIED",
+ "lastUpdaterUserId": "cs0008",
+ "distributionStatus": "DISTRIBUTED"
+ }
+
+ Listing should contain entry with our service name **vfw_cnf_cds_svc**.
+
+.. note:: Note that it's an example name, it depends on how your model is named during Service design in SDC and must be kept in sync with Postman variables.
+
+- SO:
+
+ SO Catalog database should have our service NFs defined now.
+
+ ::
+
+ Postman -> LCM -> [SO] Catalog DB Service xNFs
+
+ ::
+
+ {
+ "serviceVnfs": [
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109231",
+ "modelUuid": "70edaca8-8c79-468a-aa76-8224cfe686d0",
+ "modelInvariantUuid": "7901fc89-a94d-434a-8454-1e27b99dc0e2",
+ "modelVersion": "1.0",
+ "modelCustomizationUuid": "86dc8af4-aa17-4fc7-9b20-f12160d99718",
+ "modelInstanceName": "vfw_cnf_cds_vsp 0"
+ },
+ "toscaNodeType": "org.openecomp.resource.vf.VfwCnfCdsVsp",
+ "nfFunction": null,
+ "nfType": null,
+ "nfRole": null,
+ "nfNamingCode": null,
+ "multiStageDesign": "false",
+ "vnfcInstGroupOrder": null,
+ "resourceInput": "TBD",
+ "vfModules": [
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109231..helm_base_template..module-4",
+ "modelUuid": "a9f5d65f-20c3-485c-8cf9-eda9ea94300e",
+ "modelInvariantUuid": "7888f606-3ee8-4edb-b96d-467fead6ee4f",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "b9faba47-d03d-4ba1-a117-4c19632b2136"
+ },
+ "isBase": false,
+ "vfModuleLabel": "base_template",
+ "initialCount": 1,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vsn..module-1",
+ "modelUuid": "8e72ed23-4842-471a-ad83-6a4d285c48e1",
+ "modelInvariantUuid": "4f5a8a02-0dc6-4387-b86e-bd352f711e18",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "ab5614d6-25c2-4863-bad3-93e354b4d5ba"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vsn",
+ "initialCount": 0,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vpkg..module-2",
+ "modelUuid": "64f9d622-a8c1-4992-ba35-abdc13f87660",
+ "modelInvariantUuid": "88d8d71a-30c9-4e00-a6b9-bd86bae7ed37",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "37ab4199-19aa-4f63-9a11-d31b8c25ce46"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vpkg",
+ "initialCount": 0,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3",
+ "modelUuid": "f6f62096-d5cc-474e-82c7-655e7d6628b2",
+ "modelInvariantUuid": "6077ce70-3a1d-47e6-87a0-6aed6a29b089",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "879cda5e-7af9-43d2-bd6c-50e330ab328e"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vfw",
+ "initialCount": 0,
+ "hasVolumeGroup": false
+ }
+ ],
+ "groups": []
+ }
+ ]
+ }
+
+.. note:: For **Native Helm** path both modelName will have prefix *helm_* i.e. *helm_vfw* and vfModuleLabel will have *helm_* keyword inside i.e. *VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3*
+
+- SDNC:
+
+ SDNC should have it's database updated with *sdnc_* properties that were set during service modeling.
+
+.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
+
+
+::
+
+ kubectl -n onap exec onap-mariadb-galera-0 -it -- sh
+ mysql -uroot -psecretpassword -D sdnctl
+ MariaDB [sdnctl]> select sdnc_model_name, sdnc_model_version, sdnc_artifact_name from VF_MODEL WHERE customization_uuid = '86dc8af4-aa17-4fc7-9b20-f12160d99718';
+ +-----------------+--------------------+--------------------+
+ | sdnc_model_name | sdnc_model_version | sdnc_artifact_name |
+ +-----------------+--------------------+--------------------+
+ | vFW_CNF_CDS | 8.0.0 | vnf |
+ +-----------------+--------------------+--------------------+
+ 1 row in set (0.00 sec)
+
+
+.. note:: customization_uuid value is the modelCustomizationUuid of the VNF (serviceVnfs response in 2nd Postman call from SO Catalog DB)
+
+- CDS:
+
+ CDS should onboard CBA uploaded as part of VF.
+
+ ::
+
+ Postman -> Distribution Verification -> [CDS] List CBAs
+
+ ::
+
+ [
+ {
+ "blueprintModel": {
+ "id": "c505e516-b35d-4181-b1e2-bcba361cfd0a",
+ "artifactUUId": null,
+ "artifactType": "SDNC_MODEL",
+ "artifactVersion": "8.0.0",
+ "artifactDescription": "Controller Blueprint for vFW_CNF_CDS:8.0.0",
+ "internalVersion": null,
+ "createdDate": "2020-05-29T06:02:20.000Z",
+ "artifactName": "vFW_CNF_CDS",
+ "published": "N",
+ "updatedBy": "Samuli Silvius <s.silvius@partner.samsung.com>",
+ "tags": "Samuli Silvius, Lukasz Rajewski, vFW_CNF_CDS"
+ }
+ }
+ ]
+
+ The list should have the matching entries with SDNC database:
+
+ - sdnc_model_name == artifactName
+ - sdnc_model_version == artifactVersion
+
+ You can also use Postman to download CBA for further verification but it's fully optional.
+
+ ::
+
+ Postman -> Distribution Verification -> [CDS] CBA Download
+
+- K8splugin:
+
+ K8splugin should onboard 4 resource bundles related to helm resources:
+
+ ::
+
+ Postman -> Distribution Verification -> [K8splugin] List Resource Bundle Definitions
+
+ ::
+
+ [
+ {
+ "rb-name": "a9f5d65f-20c3-485c-8cf9-eda9ea94300e",
+ "rb-version": "b9faba47-d03d-4ba1-a117-4c19632b2136",
+ "chart-name": "base_template",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109231..helm_base_template..module-4",
+ "vf_module_model_uuid": "7888f606-3ee8-4edb-b96d-467fead6ee4f"
+ }
+ },
+ {
+ "rb-name": "f6f62096-d5cc-474e-82c7-655e7d6628b2",
+ "rb-version": "879cda5e-7af9-43d2-bd6c-50e330ab328e",
+ "chart-name": "vfw",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3",
+ "vf_module_model_uuid": "6077ce70-3a1d-47e6-87a0-6aed6a29b089"
+ }
+ },
+ {
+ "rb-name": "8e72ed23-4842-471a-ad83-6a4d285c48e1",
+ "rb-version": "ab5614d6-25c2-4863-bad3-93e354b4d5ba",
+ "chart-name": "vsn",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vsn..module-1",
+ "vf_module_model_uuid": "4f5a8a02-0dc6-4387-b86e-bd352f711e18"
+ }
+ },
+ {
+ "rb-name": "64f9d622-a8c1-4992-ba35-abdc13f87660",
+ "rb-version": "37ab4199-19aa-4f63-9a11-d31b8c25ce46",
+ "chart-name": "vpkg",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vpkg..module-2",
+ "vf_module_model_uuid": "88d8d71a-30c9-4e00-a6b9-bd86bae7ed37"
+ }
+ }
+ ]
+
+**<AUTOMATED>**
+
+Distribution is a part of the onboarding step and at this stage is performed
+
+3-3 CNF Instantiation
+.....................
+
+This is the whole beef of the use case and furthermore the core of it is that we can instantiate any amount of instances of the same CNF each running and working completely of their own. Very basic functionality in VM (VNF) side but for Kubernetes and ONAP integration this is the first milestone towards other normal use cases familiar for VNFs.
+
+**<MANUAL>**
+
+Postman collection is automated to populate needed parameters when queries are run in correct order. If you did not already run following 2 queries after distribution (to verify distribution), run those now:
+
+::
+
+ Postman -> LCM -> 1.[SDC] Catalog Service
+
+::
+
+ Postman -> LCM -> 2. [SO] Catalog DB Service xNFs
+
+Now actual instantiation can be triggered with:
+
+::
+
+ Postman -> LCM -> 3. [SO] Self-Serve Service Assign & Activate
+
+**<AUTOMATED>**
+
+Required inputs for instantiation process are taken from the *config.py* file.
+::
+
+ python instantiation.py
+
+
+Finally, to follow the progress of instantiation request with SO's GET request:
+
+**<MANUAL>**
+
+::
+
+ Postman -> LCM -> 4. [SO] Infra Active Requests
+
+The successful reply payload in that query should start like this:
+
+::
+
+ {
+ "requestStatus": "COMPLETE",
+ "statusMessage": "Macro-Service-createInstance request was executed correctly.",
+ "flowStatus": "Successfully completed all Building Blocks",
+ "progress": 100,
+ "startTime": 1590996766000,
+ "endTime": 1590996945000,
+ "source": "Postman",
+ "vnfId": "93b3350d-ed6f-413b-9cc5-a158c1676eb0",
+ "tenantId": "aaaa",
+ "requestBody": "**REDACTED FOR READABILITY**",
+ "lastModifiedBy": "CamundaBPMN",
+ "modifyTime": "2020-06-01T07:35:45.000+0000",
+ "cloudRegion": "k8sregionfour",
+ "serviceInstanceId": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72",
+ "serviceInstanceName": "vfw-cnf-16",
+ "requestScope": "service",
+ "requestAction": "createInstance",
+ "requestorId": "11c2ddb7-4659-4bf0-a685-a08dcbb5a099",
+ "requestUrl": "http://infra:30277/onap/so/infra/serviceInstantiation/v7/serviceInstances",
+ "tenantName": "k8stenant",
+ "cloudApiRequests": [],
+ "requestURI": "6a369c8e-d492-4ab5-a107-46804eeb7873",
+ "_links": {
+ "self": {
+ "href": "http://infra:30277/infraActiveRequests/6a369c8e-d492-4ab5-a107-46804eeb7873"
+ },
+ "infraActiveRequests": {
+ "href": "http://infra:30277/infraActiveRequests/6a369c8e-d492-4ab5-a107-46804eeb7873"
+ }
+ }
+ }
+
+
+Progress can be also followed also with `SO Monitoring`_ dashboard.
+
+Service Instance Termination
+++++++++++++++++++++++++++++
+
+Service instance can be terminated with the following postman call:
+
+**<MANUAL>**
+::
+
+ Postman -> LCM -> 5. [SO] Service Delete
+
+**<AUTOMATED>**
+::
+
+ python delete.py
+
+.. note:: Automated service deletion mecvhanism takes information about the instantiated service instance from the *config.py* file and *SERVICE_INSTANCE_NAME* variable. If you modify this value before the deletion of existing service instance then you will loose opportunity to easy delete already created service instance.
+
+Second Service Instance Instantiation
++++++++++++++++++++++++++++++++++++++
+
+To finally verify that all the work done within this demo, it should be possible to instantiate second vFW instance successfully.
+
+Trigger new instance createion. You can use previous call or a separate one that will utilize profile templating mechanism implemented in CBA:
+
+**<MANUAL>**
+::
+
+ Postman -> LCM -> 6. [SO] Self-Serve Service Assign & Activate - Second
+
+**<AUTOMATED>**
+
+Before second instance of service is created you need to modify *config.py* file changing the *SERVICENAME* and *SERVICE_INSTANCE_NAME* to different values and by changing the value or *k8s-rb-profile-name* parameter for *vpg* module from value *default* or *vfw-cnf-cds-base-profile* to *vfw-cnf-cds-vpkg-profile* what will result with instantiation of additional ssh service for *vpg* module. Second onboarding in automated case is required due to the existing limitations of *python-sdk* librarier that create vf-module instance name base on the vf-module model name. For manual Postman option vf-module instance name is set on service instance name basis what makes it unique.
+::
+
+ python onboarding.py
+ python instantiation.py
+
+3-4 Results and Logs
+....................
+
+Now multiple instances of Kubernetes variant of vFW are running in target VIM (KUD deployment).
+
+.. figure:: files/vFW_CNF_CDS/vFW_Instance_In_Kubernetes.png
+ :align: center
+
+ vFW Instance In Kubernetes
+
+**<MANUAL>**
+
+To review situation after instantiation from different ONAP components, most of the info can be found using Postman queries provided. For each query, example response payload(s) is/are saved and can be found from top right corner of the Postman window.
+
+::
+
+ Postman -> Instantiation verification**
+
+Execute example Postman queries and check example section to see the valid results.
+
+========================== =================
+Verify Target Postman query
+-------------------------- -----------------
+Service Instances in AAI **Postman -> Instantiation verification -> [AAI] List Service Instances**
+Service Instances in MDSAL **Postman -> Instantiation verification -> [SDNC] GR-API MD-SAL Services**
+K8S Instances in KUD **Postman -> Instantiation verification -> [K8splugin] List Instances**
+========================== =================
+
+.. note:: "[AAI] List vServers <Empty>" Request won't return any vserver info from AAI, as currently such information are not provided during instantiation process.
+
+
+Query also directly from VIM:
+
+::
+
+ #
+ ubuntu@kud-host:~$ kubectl get pods,svc,networks,cm,network-attachment-definition,deployments
+ NAME READY STATUS RESTARTS AGE
+ pod/vfw-17f6f7d3-8424-4550-a188-cd777f0ab48f-7cfb9949d9-8b5vg 1/1 Running 0 22s
+ pod/vfw-19571429-4af4-49b3-af65-2eb1f97bba43-75cd7c6f76-4gqtz 1/1 Running 0 11m
+ pod/vpg-5ea0d3b0-9a0c-4e88-a2e2-ceb84810259e-f4485d485-pln8m 1/1 Running 0 11m
+ pod/vpg-8581bc79-8eef-487e-8ed1-a18c0d638b26-6f8cff54d-dvw4j 1/1 Running 0 32s
+ pod/vsn-8e7ac4fc-2c31-4cf8-90c8-5074c5891c14-5879c56fd-q59l7 2/2 Running 0 11m
+ pod/vsn-fdc9b4ba-c0e9-4efc-8009-f9414ae7dd7b-5889b7455-96j9d 2/2 Running 0 30s
+
+ NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
+ service/vpg-5ea0d3b0-9a0c-4e88-a2e2-ceb84810259e-management-api NodePort 10.244.43.245 <none> 2831:30831/TCP 11m
+ service/vpg-8581bc79-8eef-487e-8ed1-a18c0d638b26-management-api NodePort 10.244.1.45 <none> 2831:31831/TCP 33s
+ service/vsn-8e7ac4fc-2c31-4cf8-90c8-5074c5891c14-darkstat-ui NodePort 10.244.16.187 <none> 667:30667/TCP 11m
+ service/vsn-fdc9b4ba-c0e9-4efc-8009-f9414ae7dd7b-darkstat-ui NodePort 10.244.20.229 <none> 667:31667/TCP 30s
+
+ NAME AGE
+ network.k8s.plugin.opnfv.org/55118b80-8470-4c99-bfdf-d122cd412739-management-network 40s
+ network.k8s.plugin.opnfv.org/55118b80-8470-4c99-bfdf-d122cd412739-protected-network 40s
+ network.k8s.plugin.opnfv.org/55118b80-8470-4c99-bfdf-d122cd412739-unprotected-network 40s
+ network.k8s.plugin.opnfv.org/567cecc3-9692-449e-877a-ff0b560736be-management-network 11m
+ network.k8s.plugin.opnfv.org/567cecc3-9692-449e-877a-ff0b560736be-protected-network 11m
+ network.k8s.plugin.opnfv.org/567cecc3-9692-449e-877a-ff0b560736be-unprotected-network 11m
+
+ NAME DATA AGE
+ configmap/vfw-17f6f7d3-8424-4550-a188-cd777f0ab48f-configmap 6 22s
+ configmap/vfw-19571429-4af4-49b3-af65-2eb1f97bba43-configmap 6 11m
+ configmap/vpg-5ea0d3b0-9a0c-4e88-a2e2-ceb84810259e-configmap 6 11m
+ configmap/vpg-8581bc79-8eef-487e-8ed1-a18c0d638b26-configmap 6 33s
+ configmap/vsn-8e7ac4fc-2c31-4cf8-90c8-5074c5891c14-configmap 2 11m
+ configmap/vsn-fdc9b4ba-c0e9-4efc-8009-f9414ae7dd7b-configmap 2 30s
+
+ NAME AGE
+ networkattachmentdefinition.k8s.cni.cncf.io/55118b80-8470-4c99-bfdf-d122cd412739-ovn-nat 40s
+ networkattachmentdefinition.k8s.cni.cncf.io/567cecc3-9692-449e-877a-ff0b560736be-ovn-nat 11m
+
+ NAME READY UP-TO-DATE AVAILABLE AGE
+ deployment.extensions/vfw-17f6f7d3-8424-4550-a188-cd777f0ab48f 1/1 1 1 22s
+ deployment.extensions/vfw-19571429-4af4-49b3-af65-2eb1f97bba43 1/1 1 1 11m
+ deployment.extensions/vpg-5ea0d3b0-9a0c-4e88-a2e2-ceb84810259e 1/1 1 1 11m
+ deployment.extensions/vpg-8581bc79-8eef-487e-8ed1-a18c0d638b26 1/1 1 1 33s
+ deployment.extensions/vsn-8e7ac4fc-2c31-4cf8-90c8-5074c5891c14 1/1 1 1 11m
+ deployment.extensions/vsn-fdc9b4ba-c0e9-4efc-8009-f9414ae7dd7b 1/1 1 1 30s
+
+
+Component Logs From The Execution
++++++++++++++++++++++++++++++++++
+
+**<MANUAL>**
+
+All logs from the use case execution can be retrieved with following
+
+::
+
+ kubectl -n onap logs `kubectl -n onap get pods -o go-template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' | grep -m1 <COMPONENT_NAME>` -c <CONTAINER>
+
+where <COMPONENT_NAME> and <CONTAINER> should be replaced with following keywords respectively:
+
+- so-bpmn-infra, so-bpmn-infra
+- so-openstack-adapter, so-openstack-adapter
+- so-cnf-adapter, so-cnf-adapter
+- sdnc-0, sdnc
+
+ From karaf.log all requests (payloads) to CDS can be found by searching following string:
+
+ ``'Sending request below to url http://cds-blueprints-processor-http:8080/api/v1/execution-service/process'``
+
+- cds-blueprints-processor, cds-blueprints-processor
+- multicloud-k8s, multicloud-k8s
+- network-name-gen, network-name-gen,
+
+**Debug log**
+
+In case more detailed logging is needed, here's instructions how to setup DEBUG logging for few components.
+
+- SDNC
+
+ ::
+
+ kubectl -n onap exec -it onap-sdnc-0 -c sdnc /opt/opendaylight/bin/client log:set DEBUG
+
+
+- CDS Blueprint Processor
+
+ ::
+
+ # Edit configmap
+ kubectl -n onap edit configmap onap-cds-blueprints-processor-configmap
+
+ # Edit logback.xml content change root logger level from info to debug.
+ <root level="debug">
+ <appender-ref ref="STDOUT"/>
+ </root>
+
+ # Delete the Pods to make changes effective
+ kubectl -n onap delete pods -l app=cds-blueprints-processor
+
+3-5 Verification of the CNF Status
+..................................
+
+**<MANUAL>**
+
+The Guilin introduced new API for verification of the status of instantiated resources in k8s cluster. The API gives result similar to *kubectl describe* operation for all the resources created for particular *rb-definition*. Status API can be used to verify the k8s resources after instantiation but also can be used leveraged for synchronization of the information with external components, like AAI. To use Status API call
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/status
+
+where {rb-instance-id} can be taken from the list of instances resolved the following call or from AAI *heat-stack-id* property of created *vf-module* associated with each Helm package from onboarded VSP which holds the *rb-instance-id* value.
+
+The same API can be accessed over cnf-adapter endpoint (ClusterIP):
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/status
+
+The similar to Status API is Query API, avaialble since Honolulu, that allows to fetch specific resources that belong to the created instance. The Query API allows to filter resources by Name, Kind, APiVersion, Namespace and Labels. The k8splugin endpoint is:
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall
+
+and cnf-adapter endpoint is:
+
+::
+
+ curl -i http://${K8S_NODE_IP}:8090/api/cnf-adapter/v1/instance/{rb-instance-id}/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall
+
+
+Examplary output of Status API is shown below (full result of test vFW CNF helm package in the attached file). It shows the list of GVK resources created for requested *rb-instance* (Helm and vf-module in the same time) with assocated describe result for all of them.
+
+ :download:`Full Status API Result <files/vFW_CNF_CDS/status-response.json>`
+
+::
+
+ {
+ "request": {
+ "rb-name": "vfw",
+ "rb-version": "plugin_test",
+ "profile-name": "test_profile",
+ "release-name": "",
+ "cloud-region": "kud",
+ "labels": {
+ "testCaseName": "plugin_fw.sh"
+ },
+ "override-values": {
+ "global.onapPrivateNetworkName": "onap-private-net-test"
+ }
+ },
+ "ready": true,
+ "resourceCount": 1,
+ "resourcesStatus": [
+ {
+ "name": "sink-configmap",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "ConfigMap"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "data": {
+ "protected_net_gw": "192.168.20.100",
+ "protected_private_net_cidr": "192.168.10.0/24"
+ },
+ "kind": "ConfigMap",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "sink-configmap",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720771",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/configmaps/sink-configmap",
+ "uid": "46c8bec4-980c-455b-9eb0-fb84ac8cc450"
+ }
+ }
+ }
+ ]
+ }
+
+**<AUTOMATED>**
+
+Since Honolulu release vFW CNF Use Case is equipped with dedicated mechanisms for verification of the CNF status automatically, during the instantiation. The process utilizes the k8sPlugin Status and Healthcheck APIs that both are natively exposed in the CDS and can be executed from the script execution functionality in the CDS.
+
+.. figure:: files/vFW_CNF_CDS/healthcheck.png
+ :scale: 60 %
+ :align: center
+
+ vFW CNF Healthcheck flow concept
+
+There is exposed a dedicated workflow in CBA, where Status API result verification is run with *status-verification-script* step and execution of the healthcheck job is run with *health-check-process*. The first one verifies if all pods have *Running* state. If yes, then verification of the health is started by execution of the dedicated Helm tests which are a jobs that verify connectivity in each component.
+
+::
+
+ "health-check": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config init and status verification",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-apply"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "status-verification-script": {
+ "description": "Simple status verification script",
+ "target": "simple-status-check",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "health-check-process"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "health-check-process": {
+ "description": "Start health check script",
+ "target": "health-check-script",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "collect-results"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "handle_error": {
+ "description": "Simple error verification script",
+ "target": "simple-error-check",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "collect-results"
+ ]
+ },
+ "collect-results": {
+ "description": "Final collection of results",
+ "target": "collect-results"
+ }
+ },
+
+
+Since Istanbul release, SO is equipped with dedicated workflow for verification of the CNF status. It works similarly to the workflow introduced in Honolulu, however basic CNF Status Verification step utilizes "Ready" flag of the StatusAPI response to check if k8s resources created from Helm package are up and running. Ready flag works properly in k8splugin 0.9.1 or higher. Both operations are performed by ControllerExecutionBB in SO and are realized by cnf-adapter component in SO. This workflow can be triggered by a dedicated endpoint documented here: `CNF Health Check`_. This workflow is not yet integrated into automation scripts.
+
+3-6 Synchronization of created k8s resources into AAI
+.....................................................
+
+Since Istanbul release `AAI v24 schema`_ version is used to store basic information about k8s resources deployed from each helm package. The AAI change is described in `K8s Resource in AAI`_. The information stored in AAI lets to identify all the deployed k8s resoureces but the details about them have to be fetched from the k8s cluster on demand. Such design is motivated by high frequency of k8s resource status change and the plethora of resource types avaialble in k8s - including the CRDs that extend the predefined resource types available in k8s. In consequence, there was no sense to store in AAI full runtime picture of the k8s resources as the synchronization of them would be impossible.
+
+K8s-Resource object is stored in the cloud-infrastructure set of AAI APIs and it belongs to the tenant, and is related with both generic-vnf and vf-module. Each k8s-resource object created in AAI has selflink for cnf-adapter Query API, described in `3-5 Verification of the CNF Status`_, that allows to fetch actual information about the resource in k8s. The examplary set of k8s-resources with related generic-vnf and vf-modules for vFW CNF use case is in the files attached below.
+
+ :download:`List of K8s Resources <files/vFW_CNF_CDS/k8s-resources-response.json>`
+
+ :download:`Generic VNF with modules <files/vFW_CNF_CDS/vfw-generic-vnf-aai.json>`
+
+ :download:`vPKG VF-Module with related k8s-resource relations <files/vFW_CNF_CDS/vpkg-vf-module-aai.json>`
+
+AAI synchronization is run just after creation of the vf-module by SO. Since Jakarta release, cnf-adapter synchronizes into AAI information about any change on k8s resources performed after their initial creation. For instance, if pod is deleted in k8s cluster, the new one is automatically created. In consequence, K8sPlugin sends notification about the change to cnf-adapter, and the latter one performs update of the information in AAI by removing the old pod and creating the new one in AAI. The update in AAI, after the change in k8s cluster, should by applied with no more than 30s delay.
+
+In order to force an imidiate update of AAI information about the concrete Helm package, the following API can be also used with properly modified body (all except the callbackUrl).
+
+::
+
+ curl -i -X POST http://${K8S_NODE_IP}:8090/api/cnf-adapter/v1/aai-update
+
+
+::
+
+ {
+ "instanceId": "keen_darwin",
+ "cloudRegion": "kud",
+ "cloudOwner": "K8sCloudOwner",
+ "tenantId": "dca807fa-0d3e-4fb1-85eb-b9e1c03108a3",
+ "callbackUrl": "http://example",
+ "genericVnfId": "8b3af2e0-fd66-460d-b928-22f5dac517a6",
+ "vfModuleId": "a0161551-9d13-47c2-ba4f-896d4ee401d4"
+ }
+
+
+PART 4 - Future improvements needed
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Future development areas for this use case:
+
+- Include Closed Loop part of the vFW CNF demo.
+- vFW service with Openstack VNF (KUD) and Kubernetes CNF
+
+Future development areas for CNF support:
+
+- Extraction of override values in time of the package onboarding
+- Upgrade of the vFW CNF similar to Helm Upgrade through the SDC and SO
+- Use multicloud/k8S API v2 (EMCO)
+
+Some of the features from the list above are covered by the Jakarta roadmap described in `REQ-890`_.
+
+
+.. _ONAP Deployment Guide: https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/deployment_guides/oom_deployment.html
+.. _CDS Documentation: https://docs.onap.org/projects/onap-ccsdk-cds/en/latest/
+.. _vLB use-case: https://wiki.onap.org/pages/viewpage.action?pageId=71838898
+.. _vFW_CNF_CDS Model: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates?h=guilin
+.. _vFW_CNF_CDS Automation: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/automation?h=guilin
+.. _vFW CDS Dublin: https://wiki.onap.org/display/DW/vFW+CDS+Dublin
+.. _vFW CBA Model: https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vFW?h=elalto
+.. _vFW_Helm Model: https://git.onap.org/multicloud/k8s/tree/kud/demo/firewall?h=elalto
+.. _vFW_NextGen: https://git.onap.org/demo/tree/heat/vFW_NextGen?h=elalto
+.. _vFW EDGEX K8S: https://docs.onap.org/projects/onap-integration/en/latest/docs_vfw_edgex_k8s.html
+.. _vFW EDGEX K8S In ONAP Wiki: https://wiki.onap.org/display/DW/Deploying+vFw+and+EdgeXFoundry+Services+on+Kubernets+Cluster+with+ONAP
+.. _KUD github: https://github.com/onap/multicloud-k8s/tree/honolulu/kud/hosting_providers/baremetal
+.. _KUD in Wiki: https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions
+.. _Multicloud k8s gerrit: https://gerrit.onap.org/r/q/status:open+project:+multicloud/k8s
+.. _KUD subproject in github: https://github.com/onap/multicloud-k8s/tree/honolulu/kud
+.. _KUD Interface Permission: https://jira.onap.org/browse/MULTICLOUD-1310
+.. _Frankfurt CBA Definition: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba/Definitions/vFW_CNF_CDS.json?h=frankfurt
+.. _Frankfurt CBA Script: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba/Scripts/kotlin/KotlinK8sProfileUpload.kt?h=frankfurt
+.. _SO-3403: https://jira.onap.org/browse/SO-3403
+.. _SO-3404: https://jira.onap.org/browse/SO-3404
+.. _REQ-182: https://jira.onap.org/browse/REQ-182
+.. _REQ-341: https://jira.onap.org/browse/REQ-341
+.. _REQ-458: https://jira.onap.org/browse/REQ-458
+.. _REQ-627: https://jira.onap.org/browse/REQ-627
+.. _REQ-890: https://jira.onap.org/browse/REQ-890
+.. _Python SDK: https://docs.onap.org/projects/onap-integration/en/latest/integration-tooling.html#python-onap-sdk
+.. _KUD Jenkins ci/cd verification: https://jenkins.onap.org/job/multicloud-k8s-master-kud-deployment-verify-shell/
+.. _K8s cloud site config: https://docs.onap.org/en/latest/guides/onap-operator/cloud_site/k8s/index.html
+.. _SO Monitoring: https://docs.onap.org/projects/onap-so/en/latest/developer_info/Working_with_so_monitoring.html
+.. _Data Dictionary: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba-dd.json?h=guilin
+.. _Helm Healer: https://git.onap.org/oom/offline-installer/tree/tools/helm-healer.sh?h=frankfurt
+.. _infra_workload: https://docs.onap.org/projects/onap-multicloud-framework/en/latest/specs/multicloud_infra_workload.html?highlight=multicloud
+.. _K8s Client Compatibility: https://github.com/kubernetes/client-go
+.. _CNF Health Check: https://docs.onap.org/projects/onap-so/en/latest/api/apis/serviceInstances-api.html#healthcheck
+.. _K8s Resource in AAI: https://jira.onap.org/browse/ONAPMODEL-37
+.. _AAI v24 schema: https://nexus.onap.org/service/local/repositories/releases/archive/org/onap/aai/schema-service/aai-schema/1.9.2/aai-schema-1.9.2.jar/!/onap/aai_swagger_html/aai_swagger_v24.html
diff --git a/docs/docs_vfw.rst b/docs/docs_vfw.rst
index b9ed9adb7..1fdb2aaa7 100644
--- a/docs/docs_vfw.rst
+++ b/docs/docs_vfw.rst
@@ -1,5 +1,7 @@
.. _docs_vfw:
+:orphan:
+
vFirewall Use Case
------------------
@@ -28,7 +30,7 @@ These VFs run in three separate VMs. The packet generator sends packets to the
packet sink through the firewall.
The firewall reports the volume of traffic passing though to the ONAP DCAE
collector. To check the traffic volume that lands at the sink VM, you can access
-the link http://sink_ip_address:667 through your browser and enable automatic page
+the link <http://SINK_IP_ADDRESS:667> through your browser and enable automatic page
refresh by clicking the "Off" button. You can see the traffic volume in the charts.
The packet generator includes a script that periodically generates different
@@ -60,7 +62,7 @@ operation policies that are currently enabled for the vFirewall use case:
operational policy to request APPC to adjust the traffic volume to 500 packets
per 10 seconds.
- APPC sends a request to the packet generator to adjust the traffic volume.
-- Changes to the traffic volume can be observed through the link http://sink_ip_address:667.
+- Changes to the traffic volume can be observed through the link <http://SINK_IP_ADDRESS:667>.
Adjust packet generator
@@ -109,9 +111,8 @@ At the end of the test , robot sets the streams back to Medium so that it is
setup for the next test.
For documentation about running the use case manually for previous releases,
-please look at the videos and the material available at this `wiki page`__.
-
-__ https://wiki.onap.org/display/DW/Running+the+ONAP+Demos
+please look at the videos and the material available in
+`Running the ONAP Demos wiki page <https://wiki.onap.org/display/DW/Running+the+ONAP+Demos>`_
Although videos are still valid, users are encouraged to use the Heat templates
linked at the top of this page rather than the old Heat templates in that wiki page.
@@ -130,7 +131,7 @@ expire. Monitoring the DMaaP topic for DCAE_CL_OUTPUT can be used to confirm
that no TCA events are coming in from the VNF through VES/TCA.
::
- http://<k8s-host>:30227/events/unauthenticated.DCAE_CL_OUTPUT/g1/c3?timeout=5000
+ http://K8S_HOST:30227/events/unauthenticated.DCAE_CL_OUTPUT/g1/c3?timeout=5000
+-------------+------------+
| JIRA ID | Status |
diff --git a/docs/docs_vfwHPA.rst b/docs/docs_vfwHPA.rst
index 2dd229b31..147d80d2a 100644
--- a/docs/docs_vfwHPA.rst
+++ b/docs/docs_vfwHPA.rst
@@ -4,11 +4,13 @@
.. _docs_vfw_hpa:
+:orphan:
+
vFW/vDNS with HPA Tutorial: Setting Up and Configuration
--------------------------------------------------------
Description
-~~~~~~~~~~
+~~~~~~~~~~~
This use case makes modifications to the regular vFW use case in ONAP by giving the VMs certain hardware features (such as SRIOV NIC, CPU pinning, pci passthrough.. etc.) in order to enhance their performance. Multiple cloud regions with flavors that have HPA features are registered with ONAP. We then create policies that specify the HPA requirements of each VM in the use case. When a service instance is created with OOF specified as the homing solution, OOF responds with the homing solution (cloud region) and flavor directives that meet the requirements specified in the policy.
This tutorial covers enhancements 1 to 5 in Background of https://wiki.onap.org/pages/viewpage.action?pageId=41421112. It focuses on Test Plan 1.
@@ -18,7 +20,7 @@ This tutorial covers enhancements 1 to 5 in Background of https://wiki.onap.org/
`HPA Enhancements Page <https://wiki.onap.org/pages/viewpage.action?pageId=34376310>`_
-`vFW with HPA Test Status Page <https://wiki.onap.org/display/DW/vFW+with+HPA+Integration+Test+-+Test+Status>`_
+`vFW with HPA Test Status Page <https://wiki.onap.org/pages/viewpage.action?pageId=45301960>`_
`Hardware Platform Enablement in ONAP <https://wiki.onap.org/display/DW/Hardware+Platform+Enablement+In+ONAP>`_
@@ -26,16 +28,13 @@ This tutorial covers enhancements 1 to 5 in Background of https://wiki.onap.org/
Setting Up and Installation
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-Some fixes for HPA support were made subsequent to the release of the Casablanca images. Several updated docker images need to be used to utilize the fixes. The details of the docker images that need to be used and the issues that are fixed are described at this link https://wiki.onap.org/display/DW/Docker+image+updates+for+HPA+vFW+testing
-
-Instructions for updating the manifest of ONAP docker images can be found here: https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/#deploying-an-updated-docker-manifest
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://wiki.onap.org/display/DW/OOM+Component. When the installation is complete (all the pods are either in running or completed state) Do the following;
+Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/deployment_guides/oom_deployment.html. When the installation is complete (all the pods are either in running or completed state) Do the following;
1. Check that all the required components were deployed;
-
+
``oom-rancher# helm list``
2. Check the state of the pods;
@@ -44,14 +43,14 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
3. Run robot health check
- ``oom-rancher# cd oom/kubernetes/robot``
+ ``oom-rancher# cd oom/kubernetes/robot``
``oom-rancher# ./ete-k8s.sh onap health``
Ensure all the required components pass the health tests
4. Modify the SO bpmn configmap to change the SO vnf adapter endpoint to v2
-
- ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
+
+ ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
``- vnf:``
@@ -74,7 +73,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``oom-rancher# kubectl delete <pod-name> -n onap``
-5. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
+5. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
- **Cloud Region One**
@@ -82,7 +81,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-create onap.hpa.flavor11 111 8 20 2``
``#nova flavor-key onap.hpa.flavor11 set hw:mem_page_size=2048``
-
+
**Flavor12**
``#nova flavor-create onap.hpa.flavor12 112 12 20 2``
@@ -91,9 +90,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3 aggr121``
``#openstack flavor set onap.hpa.flavor12 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3``
-
+
**Flavor13**
- ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
``#nova flavor-key onap.hpa.flavor13 set hw:mem_page_size=2048``
@@ -111,7 +110,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_thread_policy=isolate``
-
+
**Flavor22**
``#nova flavor-create onap.hpa.flavor22 222 12 20 2``
@@ -120,9 +119,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2 aggr221``
``#openstack flavor set onap.hpa.flavor22 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2``
-
+
**Flavor23**
- ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
``#nova flavor-key onap.hpa.flavor23 set hw:mem_page_size=2048``
@@ -140,20 +139,20 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_thread_policy=isolate``
-
+
**Flavor32**
``#nova flavor-create onap.hpa.flavor32 332 8192 20 2``
``#nova flavor-key onap.hpa.flavor32 set hw:mem_page_size=1048576``
-
+
**Flavor33**
- ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
``#nova flavor-key onap.hpa.flavor33 set hw:mem_page_size=2048``
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1 aggr331``
- ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
+ ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
**Note: Use case can be run manually or using automation script (recommended)**
@@ -219,7 +218,7 @@ If an update is needed, the update can be done via rest using curl or postman
}'
-9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
+9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
- Get msb-iag internal ip address and port
@@ -229,7 +228,7 @@ If an update is needed, the update can be done via rest using curl or postman
``oom-rancher# kubectl exec dev-oof-oof-6c848594c5-5khps -it -- bash``
-10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
+10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
**GET COMMAND**
@@ -374,122 +373,122 @@ If an update is needed, the update can be done via rest using curl or postman
}
}'
-
+
11. Onboard the vFW HPA template. The templates can be gotten from the `demo <https://github.com/onap/demo>`_ repo. The heat and env files used are located in demo/heat/vFW_HPA/vFW/. Create a zip file using the files. For onboarding instructions see steps 4 to 9 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. Note that in step 5, only one VSP is created. For the VSP the option to submit for testing in step 5cii was not shown. So you can check in and certify the VSP and proceed to step 6.
12. Get the parameters (model info, model invarant id...etc) required to create a service instance via rest. This can be done by creating a service instance via VID as in step 10 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. After creating the service instance, exec into the SO bpmn pod and look into the /app/logs/bpmn/debug.log file. Search for the service instance and look for its request details. Then populate the parameters required to create a service instance via rest in step 13 below.
13. Create a service instance rest request but do not create service instance yet. Specify OOF as the homing solution and multicloud as the orchestrator. Be sure to use a service instance name that does not exist and populate the parameters with values gotten from step 12.
-::
+::
curl -k -X POST \
http://{{k8s}}:30277/onap/so/infra/serviceInstances/v6 \
-H 'authorization: Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== \
-H 'content-type: application/json' \
-
- -d '{
-
- "requestDetails":{
-
- "modelInfo":{
-
+
+ -d '{
+
+ "requestDetails":{
+
+ "modelInfo":{
+
"modelInvariantId":"b7564cb9-4074-4c9b-95d6-39d4191e80d9",
-
+
"modelType":"service",
-
+
"modelName":"vfw_HPA",
-
+
"modelVersion":"1.0",
-
+
"modelVersionId":"35d184e8-1cba-46e3-9311-a17ace766eb0",
-
+
"modelUuid":"35d184e8-1cba-46e3-9311-a17ace766eb0",
-
+
"modelInvariantUuid":"b7564cb9-4074-4c9b-95d6-39d4191e80d9"
-
+
},
-
- "requestInfo":{
-
+
+ "requestInfo":{
+
"source":"VID",
-
+
"instanceName":"oof-12-homing",
-
+
"suppressRollback":false,
-
+
"requestorId":"demo"
-
+
},
-
- "subscriberInfo":{
-
+
+ "subscriberInfo":{
+
"globalSubscriberId":"Demonstration"
-
+
},
-
- "requestParameters":{
-
+
+ "requestParameters":{
+
"subscriptionServiceType":"vFW",
-
+
"aLaCarte":true,
-
+
"testApi":"VNF_API",
-
- "userParams":[
-
- {
-
+
+ "userParams":[
+
+ {
+
"name":"Customer_Location",
-
- "value":{
-
+
+ "value":{
+
"customerLatitude":"32.897480",
-
+
"customerLongitude":"97.040443",
-
+
"customerName":"some_company"
-
+
}
-
+
},
-
- {
-
+
+ {
+
"name":"Homing_Solution",
-
+
"value":"oof"
-
+
},
-
- {
-
+
+ {
+
"name":"orchestrator",
-
+
"value":"multicloud"
-
+
}
-
+
]
-
+
},
-
- "project":{
-
+
+ "project":{
+
"projectName":"Project-Demonstration"
-
+
},
-
- "owningEntity":{
-
+
+ "owningEntity":{
+
"owningEntityId":"e1564fc9-b9d0-44f9-b5af-953b4aad2f40",
-
+
"owningEntityName":"OE-Demonstration"
-
+
}
-
+
}
-
+
}'
14. Get the resourceModuleName to be used for creating policies. This can be gotten from the CSAR file of the service model created. However, an easy way to get the resourceModuleName is to send the service instance create request in step 13 above. This will fail as there are no policies but you can then go into the bpmn debug.log file and get its value by searching for resourcemodulename.
@@ -513,14 +512,14 @@ To Update a policy, use the following curl command. Modify the policy as require
"onapName": "SampleDemo",
"policyScope": "OSDF_CASABLANCA"
}' 'https://pdp:8081/pdp/api/updatePolicy'
-
+
To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PDP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
@@ -533,14 +532,14 @@ To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PAP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
-H 'Authorization: Basic dGVzdHBkcDphbHBoYTEyMw==' \
-H 'Environment: TEST' \
-X DELETE \
- -d '{"policyName": "OSDF_CASABLANCA.Config_MS_vnfPolicy_vFWHPA.1.xml","policyComponent":"PAP","policyType":"Optimization","deleteCondition":"ALL"}' https://pdp:8081/pdp/api/deletePolicy
+ -d '{"policyName": "OSDF_CASABLANCA.Config_MS_vnfPolicy_vFWHPA.1.xml","policyComponent":"PAP","policyType":"Optimization","deleteCondition":"ALL"}' https://pdp:8081/pdp/api/deletePolicy
Below are the 3 HPA policies for test cases in the `test plan <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_
@@ -559,7 +558,7 @@ Create Policy
}' 'https://pdp:8081/pdp/api/createPolicy'
-Push Policy
+Push Policy
::
@@ -587,7 +586,7 @@ Create Policy
}' 'https://pdp:8081/pdp/api/createPolicy'
-Push Policy
+Push Policy
::
@@ -611,8 +610,8 @@ Create Policy
"onapName": "SampleDemo",
"policyScope": "OSDF_CASABLANCA"
}' 'https://pdp:8081/pdp/api/createPolicy'
-
-Push Policy
+
+Push Policy
::
@@ -621,7 +620,7 @@ Push Policy
"policyName": "OSDF_CASABLANCA.hpa_policy_vFW_3",
"policyType": "MicroService"
}' 'https://pdp:8081/pdp/api/pushPolicy'
-
+
17. Create Service Instance using step 13 above
18. Check bpmn logs to ensure that OOF sent homing response and flavor directives.
@@ -652,9 +651,9 @@ Push Policy
"vnf-networks": [],
"vnf-vms": []
},
-
-
- "vnf-parameters": [
+
+
+ "vnf-parameters": [
{
"vnf-parameter-name": "vfw_image_name",
"vnf-parameter-value": "ubuntu-16.04"
@@ -731,7 +730,7 @@ Push Policy
"vnf-parameter-name": "vsn_private_ip_1",
"vnf-parameter-value": "10.0.100.3"
},
-
+
{
"vnf-parameter-name": "vfw_name_0",
"vnf-parameter-value": "vfw"
@@ -774,7 +773,7 @@ Push Policy
},
{
"vnf-parameter-name": "vf_module_id",
- "vnf-parameter-value": "VfwHpa..base_vfw..module-0"
+ "vnf-parameter-value": "VfwHpa..base_vfw..module-0"
},
{
"vnf-parameter-name": "sec_group",
@@ -797,32 +796,32 @@ Push Policy
"vnf-parameter-name": "oof_directives",
"vnf-parameter-value": "{\"directives\": [{\"id\": \"vfw\", \"type\": \"vnfc\", \"directives\": [{\"attributes\": [{\"attribute_name\": \"firewall_flavor_name\", \"attribute_value\": \"onap.hpa.flavor31\"}, {\"attribute_name\": \"flavorId\", \"attribute_value\": \"2297339f-6a89-4808-a78f-68216091f904\"}, {\"attribute_name\": \"flavorId\", \"attribute_value\": \"2297339f-6a89-4808-a78f-68216091f904\"}, {\"attribute_name\": \"flavorId\", \"attribute_value\": \"2297339f-6a89-4808-a78f-68216091f904\"}], \"type\": \"flavor_directives\"}]}, {\"id\": \"vgenerator\", \"type\": \"vnfc\", \"directives\": [{\"attributes\": [{\"attribute_name\": \"packetgen_flavor_name\", \"attribute_value\": \"onap.hpa.flavor32\"}, {\"attribute_name\": \"flavorId\", \"attribute_value\": \"2297339f-6a89-4808-a78f-68216091f904\"}], \"type\": \"flavor_directives\"}]}, {\"id\": \"vsink\", \"type\": \"vnfc\", \"directives\": [{\"attributes\": [{\"attribute_name\": \"sink_flavor_name\", \"attribute_value\": \"onap.large\"}, {\"attribute_name\": \"flavorId\", \"attribute_value\": \"2297339f-6a89-4808-a78f-68216091f904\"}], \"type\": \"flavor_directives\"}]}]}"
},
-
+
{
"vnf-parameter-name": "sdnc_directives",
"vnf-parameter-value": "{}"
- },
-
+ },
+
{
"vnf-parameter-name": "template_type",
"vnf-parameter-value": "heat"
}
-
-
+
+
],
"vnf-topology-identifier": {
"generic-vnf-name": "oof-12-vnf-3",
- "generic-vnf-type": "vfw_hpa 0",
+ "generic-vnf-type": "vfw_hpa 0",
"service-type": "6b17354c-0fae-4491-b62e-b41619929c54",
- "vnf-name": "vfwhpa_stack",
+ "vnf-name": "vfwhpa_stack",
"vnf-type": "VfwHpa..base_vfw..module-0"
-
+
}
}
}}
-
-Change parameters based on your environment.
+
+Change parameters based on your environment.
**Note**
@@ -833,5 +832,5 @@ Change parameters based on your environment.
"service-type": "6b17354c-0fae-4491-b62e-b41619929c54", <-- same as Service Instance ID
"vnf-name": "vfwhpa_stack", <-- name to be given to the vf module
"vnf-type": "VfwHpa..base_vfw..module-0" <-- can be found on the VID - VF Module dialog screen - Model Name
-
+
21. Create vf module (11g of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_). If everything worked properly, you should see the stack created in your VIM(WR titanium cloud openstack in this case).
diff --git a/docs/docs_vfw_edgex_k8s.rst b/docs/docs_vfw_edgex_k8s.rst
index a25b349a2..256d65948 100644
--- a/docs/docs_vfw_edgex_k8s.rst
+++ b/docs/docs_vfw_edgex_k8s.rst
@@ -4,6 +4,8 @@
.. _docs_vfw_edgex_multicloud_k8s:
+:orphan:
+
vFW/Edgex with Multicloud Kubernetes Plugin: Setting Up and Configuration
-------------------------------------------------------------------------
@@ -201,8 +203,8 @@ It is an example of the minimal HEAT template.
Onboard the CSAR
----------------
-For onboarding instructions please refer to steps 4-9 from the document
-`here <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`__.
+For onboarding instructions please refer to steps 4-9 from
+`vFWCL instantiation, testing and debuging wiki page <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_.
Steps for installing KUD Cloud
------------------------------
@@ -210,9 +212,8 @@ Steps for installing KUD Cloud
Follow the link to install KUD Kubernetes Deployment. KUD contains all the
packages required for running vfw use case.
-Kubernetes Baremetal deployment instructions can be found here_
-
-.. _here: https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions/
+Kubernetes Baremetal deployment instructions can be found in
+`Kubernetes Baremetal deployment setup instructions wiki page <https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions>`_
REGISTER KUD CLOUD REGION with K8s-Plugin
-----------------------------------------
@@ -272,15 +273,15 @@ registration) pointing to a real OpenStack tenant (e.g. the OOF tenant in
the lab where we tested).
This will cause multicloud to add the tenant to the k8s cloud region and
-then, similarly to #10 in the documentation
-`here <https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/docs_vfwHPA.html#docs-vfw-hpa>`__,
+then, similarly to #10 in the
+`vFW HPA casablanca official documentation <https://docs.onap.org/projects/onap-integration/en/latest/docs_vfwHPA.html>`_,
the service-subscription can be added to that object.
**NOTE:** use same name cloud-region and cloud-owner name
An example is shown below for K8s cloud but following the steps 1,2,3
from
-`here <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`__.
+`Multicloud Windriver Plugin documentation <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_.
The sample input below is for k8s cloud type.
**Step 1**: Cloud Registration/ Create a cloud region to represent the instance
@@ -647,7 +648,7 @@ using the Kubernetes API.
curl -X GET http://MSB_NODE_IP:30280/api/multicloud-k8s/v1/v1/instance/ZKMTSaxv
-`*\ https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json <https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json>`__
+`*\ https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json <https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json>`_
Create User parameters
~~~~~~~~~~~~~~~~~~~~~~
diff --git a/docs/docs_vipsec.rst b/docs/docs_vipsec.rst
index 755d4c085..db9e894ad 100644
--- a/docs/docs_vipsec.rst
+++ b/docs/docs_vipsec.rst
@@ -4,6 +4,8 @@
.. _docs_vipsec_hpa:
+:orphan:
+
vIPsec with HPA Tutorial: Setting Up and Configuration
--------------------------------------------------------
@@ -22,13 +24,11 @@ The deploy steps look just alike the one for vFW with HPA use case. It is also u
Setting Up and Installation
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Instructions for updating the manifest of ONAP docker images can be found here: https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/#deploying-an-updated-docker-manifest
-
Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://wiki.onap.org/display/DW/OOM+Component. When the installation is complete (all the pods are either in running or completed state) Do the following;
1. Check that all the required components were deployed;
-
+
``oom-rancher# helm list``
2. Check the state of the pods;
@@ -37,14 +37,14 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
3. Run robot health check
- ``oom-rancher# cd oom/kubernetes/robot``
+ ``oom-rancher# cd oom/kubernetes/robot``
``oom-rancher# ./ete-k8s.sh onap health``
Ensure all the required components pass the health tests
4. Modify the SO bpmn configmap to change the SO vnf adapter endpoint to v2
-
- ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
+
+ ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
``- vnf:``
@@ -73,7 +73,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``oom-rancher# ./demo-k8s.sh onap init``
-7. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
+7. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
- **Cloud Region One**
@@ -81,7 +81,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-create onap.hpa.flavor11 111 8 20 2``
``#nova flavor-key onap.hpa.flavor11 set hw:mem_page_size=2048``
-
+
**Flavor12**
``#nova flavor-create onap.hpa.flavor12 112 12 20 2``
@@ -90,9 +90,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3 aggr121``
``#openstack flavor set onap.hpa.flavor12 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3``
-
+
**Flavor13**
- ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
``#nova flavor-key onap.hpa.flavor13 set hw:mem_page_size=2048``
@@ -110,7 +110,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_thread_policy=isolate``
-
+
**Flavor22**
``#nova flavor-create onap.hpa.flavor22 222 12 20 2``
@@ -119,9 +119,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2 aggr221``
``#openstack flavor set onap.hpa.flavor22 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2``
-
+
**Flavor23**
- ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
``#nova flavor-key onap.hpa.flavor23 set hw:mem_page_size=2048``
@@ -139,20 +139,20 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_thread_policy=isolate``
-
+
**Flavor32**
``#nova flavor-create onap.hpa.flavor32 332 8192 20 2``
``#nova flavor-key onap.hpa.flavor32 set hw:mem_page_size=1048576``
-
+
**Flavor33**
- ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
``#nova flavor-key onap.hpa.flavor33 set hw:mem_page_size=2048``
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1 aggr331``
- ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
+ ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
8. Check that the cloud complex has the right values and update if it does not. Required values are;
@@ -205,7 +205,7 @@ If an update is needed, the update can be done via rest using curl or postman
}'
-9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
+9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
- Get msb-iag internal ip address and port
@@ -215,7 +215,7 @@ If an update is needed, the update can be done via rest using curl or postman
``oom-rancher# kubectl exec dev-oof-oof-6c848594c5-5khps -it -- bash``
-10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
+10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
**GET COMMAND**
@@ -360,14 +360,14 @@ If an update is needed, the update can be done via rest using curl or postman
}
}'
-
+
11. Onboard the vFW HPA template. The templates can be gotten from the `demo <https://github.com/onap/demo>`_ repo. The heat and env files used are located in demo/heat/vFW_HPA/vFW/. Create a zip file using the files. For onboarding instructions see steps 4 to 9 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. Note that in step 5, only one VSP is created. For the VSP the option to submit for testing in step 5cii was not shown. So you can check in and certify the VSP and proceed to step 6.
12. Get the parameters (model info, model invarant id...etc) required to create a service instance via rest. This can be done by creating a service instance via VID as in step 10 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. After creating the service instance, exec into the SO bpmn pod and look into the /app/logs/bpmn/debug.log file. Search for the service instance and look for its request details. Then populate the parameters required to create a service instance via rest in step 13 below.
13. Create a service instance rest request but do not create service instance yet. Specify OOF as the homing solution and multicloud as the orchestrator. Be sure to use a service instance name that does not exist and populate the parameters with values gotten from step 12.
-::
+::
curl -k -X POST \
http://{{k8s}}:30277/onap/so/infra/serviceInstances/v6 \
@@ -448,14 +448,14 @@ To Update a policy, use the following curl command. Modify the policy as require
"onapName": "SampleDemo",
"policyScope": "OSDF_DUBLIN"
}' 'https://pdp:8081/pdp/api/updatePolicy'
-
+
To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PDP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
@@ -468,7 +468,7 @@ To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PAP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
@@ -495,7 +495,7 @@ Create Policy
-Push Policy
+Push Policy
::
@@ -506,7 +506,7 @@ Push Policy
}' 'https://pdp:8081/pdp/api/pushPolicy'
-
+
17. Create Service Instance using step 13 above
18. Check bpmn logs to ensure that OOF sent homing response and flavor directives.
@@ -538,7 +538,7 @@ Push Policy
"vnf-vms": []
},
-
+
"vnf-parameters": [
{
"vnf-parameter-name":"vf_module_id",
@@ -787,13 +787,13 @@ Push Policy
"service-type": "8c071bd1-c361-4157-8282-3fef7689d32e",
"vnf-name": "ipsec-test",
"vnf-type": "Ipsec..base_vipsec..module-0"
-
+
}
}
}}
-
-Change parameters based on your environment.
+
+Change parameters based on your environment.
**Note**
@@ -804,5 +804,5 @@ Change parameters based on your environment.
"service-type": "8c071bd1-c361-4157-8282-3fef7689d32e", <-- same as Service Instance ID
"vnf-name": "ipsec-test", <-- name to be given to the vf module
"vnf-type": "Ipsec..base_vipsec..module-0" <-- can be found on the VID - VF Module dialog screen - Model Name
-
+
21. Create vf module (11g of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_). If everything worked properly, you should see the stack created in your VIM(WR titanium cloud openstack in this case).
diff --git a/docs/docs_vlb.rst b/docs/docs_vlb.rst
index ded308f05..5a9f6a2fb 100644
--- a/docs/docs_vlb.rst
+++ b/docs/docs_vlb.rst
@@ -1,5 +1,7 @@
.. _docs_vlb:
+:orphan:
+
vLoadBalancer Use Case
----------------------
@@ -15,7 +17,7 @@ Source files
Description
~~~~~~~~~~~
-The use case is composed of three VFs: packet generator, load balancer, and DNS server. These VFs run in three separate VMs. The packet generator issues DNS lookup queries that reach the DNS server via the load balancer. DNS replies reach the packet generator via the load balancer as well. The load balancer reports the average amount of traffic per DNS over a time interval to the DCAE collector. When the average amount of traffic per DNS server crosses a predefined threshold, the closed-loop is triggered and a new DNS server is instantiated.
+The use case is composed of three VFs: packet generator, load balancer, and DNS server. These VFs run in three separate VMs. The packet generator issues DNS lookup queries that reach the DNS server via the load balancer. DNS replies reach the packet generator via the load balancer as well. The load balancer reports the average amount of traffic per DNS over a time interval to the DCAE collector. When the average amount of traffic per DNS server crosses a predefined threshold, the closed-loop is triggered and a new DNS server is instantiated.
To test the application, make sure that the security group in OpenStack has ingress/egress entries for protocol 47 (GRE). The user can run a DNS query from the packet generator VM:
@@ -23,7 +25,7 @@ To test the application, make sure that the security group in OpenStack has ingr
dig @vLoadBalancer_IP host1.dnsdemo.onap.org
-The output below means that the load balancer has been set up correctly, has forwarded the DNS queries to one DNS instance, and the packet generator has received the DNS reply message.
+The output below means that the load balancer has been set up correctly, has forwarded the DNS queries to one DNS instance, and the packet generator has received the DNS reply message.
::
@@ -34,26 +36,26 @@ The output below means that the load balancer has been set up correctly, has for
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 31892
;; flags: qr aa rd; QUERY: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 2
;; WARNING: recursion requested but not available
-
+
;; OPT PSEUDOSECTION:
; EDNS: version: 0, flags:; udp: 4096
;; QUESTION SECTION:
;host1.dnsdemo.onap.org. IN A
-
+
;; ANSWER SECTION:
host1.dnsdemo.onap.org. 604800 IN A 10.0.100.101
-
+
;; AUTHORITY SECTION:
dnsdemo.onap.org. 604800 IN NS dnsdemo.onap.org.
-
+
;; ADDITIONAL SECTION:
dnsdemo.onap.org. 604800 IN A 10.0.100.100
-
+
;; Query time: 0 msec
;; SERVER: 192.168.9.111#53(192.168.9.111)
;; WHEN: Fri Nov 10 17:39:12 UTC 2017
;; MSG SIZE rcvd: 97
-
+
Closedloop for vLoadBalancer/vDNS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -69,10 +71,9 @@ To change the volume of queries generated by the packet generator, run the follo
::
+ curl -X PUT -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "Content-Type: application/json" -H "Cache-Control: no-cache" -d '{"pg-streams":{"pg-stream": [{"id":"dns1", "is-enabled":"true"}]}}' "http://PacketGen_IP:8183/restconf/config/sample-plugin:sample-plugin/pg-streams"
- curl -X PUT -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "Content-Type: application/json" -H "Cache-Control: no-cache" -d '{"pg-streams":{"pg-stream": [{"id":"dns1", "is-enabled":"true"}]}}' "http://PacketGen_IP:8183/restconf/config/sample-plugin:sample-plugin/pg-streams"
-
-- {"id":"dns1", "is-enabled":"true"} shows the stream "dns1" is enabled. The packet generator sends requests in the rate of 100 packets per 10 seconds;
+- {"id":"dns1", "is-enabled":"true"} shows the stream "dns1" is enabled. The packet generator sends requests in the rate of 100 packets per 10 seconds;
- To increase the amount of traffic, you can enable more streams. The packet generator has 10 streams, "dns1", "dns2", "dns3" to "dns10". Each of them generates 100 packets per 10 seconds. To enable the streams, please add {"id":"dnsX", "is-enabled":"true"} to the pg-stream bracket of the curl command, where X is the stream ID.
For example, if you want to enable 3 streams, the curl command will be:
@@ -83,18 +84,20 @@ For example, if you want to enable 3 streams, the curl command will be:
When the VNF starts, the packet generator is automatically configured to run 5 streams.
-
Running the Use Case
~~~~~~~~~~~~~~~~~~~~
-Automated closed loop via Robot Framework is not supported at this time. For documentation about running the use case manually for previous releases, please look at the videos and the material available at this `wiki page`__.
-
-__ https://wiki.onap.org/display/DW/Running+the+ONAP+Demos
+Automated closed loop via Robot Framework is not supported at this time.
+For documentation about running the use case manually for previous releases,
+please look at the videos and the material available at
+`Running the ONAP Demos wiki page <https://wiki.onap.org/display/DW/Running+the+ONAP+Demos>`_
-Although videos are still valid, users are encouraged to use the Heat templates linked at the top of this page rather than the old Heat templates in that wiki page.
+Although videos are still valid, users are encouraged to use the Heat templates
+linked at the top of this page rather than the old Heat templates in that wiki page.
Known issues and resolution
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1) The packet generator may become unresponsive and stop generating traffic. To solve the problem, reboot the packet generator.
+1) The packet generator may become unresponsive and stop generating traffic.
+ To solve the problem, reboot the packet generator.
2) The base and scaling VF module names need to follow a specific naming convention:
@@ -102,4 +105,4 @@ Known issues and resolution
b) The SDNC preload for the scaling VF module must set the VF module name to "vDNS\_xyz", where "xyz" is the same as the base module. This is required because during closed loop Policy looks for "Vfmodule\_" and replaces it with "vDNS\_"
-3) Only one scaling operation is supported. \ No newline at end of file
+3) Only one scaling operation is supported.
diff --git a/docs/files/CI/ONAP_CI_0.png b/docs/files/CI/ONAP_CI_0.png
new file mode 100644
index 000000000..a0193ec63
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_0.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_1.png b/docs/files/CI/ONAP_CI_1.png
new file mode 100644
index 000000000..46765e865
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_1.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_10.png b/docs/files/CI/ONAP_CI_10.png
new file mode 100644
index 000000000..fbc2cca0a
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_10.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_2.png b/docs/files/CI/ONAP_CI_2.png
new file mode 100644
index 000000000..d98b19112
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_2.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_3.png b/docs/files/CI/ONAP_CI_3.png
new file mode 100644
index 000000000..616440cc5
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_3.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_4.png b/docs/files/CI/ONAP_CI_4.png
new file mode 100644
index 000000000..05ab52e40
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_4.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_5.png b/docs/files/CI/ONAP_CI_5.png
new file mode 100644
index 000000000..ce53661a6
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_5.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_6.png b/docs/files/CI/ONAP_CI_6.png
new file mode 100644
index 000000000..b8a11d9a0
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_6.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_7.png b/docs/files/CI/ONAP_CI_7.png
new file mode 100644
index 000000000..13f8782b0
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_7.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_8.png b/docs/files/CI/ONAP_CI_8.png
new file mode 100755
index 000000000..3263e93f8
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_8.png
Binary files differ
diff --git a/docs/files/CI/ONAP_CI_9.png b/docs/files/CI/ONAP_CI_9.png
new file mode 100644
index 000000000..db31cab6f
--- /dev/null
+++ b/docs/files/CI/ONAP_CI_9.png
Binary files differ
diff --git a/docs/files/bbs/BBS_dcae-ves-collector_config.png b/docs/files/bbs/BBS_dcae-ves-collector_config.png
new file mode 100644
index 000000000..edce5985a
--- /dev/null
+++ b/docs/files/bbs/BBS_dcae-ves-collector_config.png
Binary files differ
diff --git a/docs/files/csv/release-demo-features.csv b/docs/files/csv/release-demo-features.csv
new file mode 100644
index 000000000..22bc99eaa
--- /dev/null
+++ b/docs/files/csv/release-demo-features.csv
@@ -0,0 +1,5 @@
+Issue-ID;Description
+INT-2094;[APACHE] Add Apache CNF use case files
+INT-2069;Make Network Slicing usecase more user friendly
+INT-1960;[vFW CNF CDS] Fix issue with multiple tenants creation for k8s region
+INT-1960;[vFW CNF CDS] vf_module and vnf name generation improvment in CBA
diff --git a/docs/files/csv/release-integration-features.csv b/docs/files/csv/release-integration-features.csv
new file mode 100644
index 000000000..ed06a4284
--- /dev/null
+++ b/docs/files/csv/release-integration-features.csv
@@ -0,0 +1,5 @@
+Issue-ID;Description
+INT-2070;Automate 5G Slicing use case in CI - Manual configuration step 7 - AAI Configuration
+INT-2164;Update Apache CNF Demo
+INT-2126;Data provider - add relationships into cloud region and tenant
+INT-2135;Add CPS resources into data-provider
diff --git a/docs/files/csv/release-integration-ref.csv b/docs/files/csv/release-integration-ref.csv
new file mode 100644
index 000000000..b9f3535ef
--- /dev/null
+++ b/docs/files/csv/release-integration-ref.csv
@@ -0,0 +1,39 @@
+Repository;Revision
+demo;5dcd47bfc76ab8257fcc3b8927b78295a4376c8b
+integration;cdacb811f7acc2eb0a6e5662d8d225a967160f2c
+integration/csit;08bc9551a07da29c478ca2f6487aafa651ea95dd
+integration/data-provider;3f1416193789e00f6b09029c7e841d98803e5749
+integration/docker/onap-java11;1a5e22c812aa988fbfc27a668c1d2541db971080
+integration/docker/onap-python;e2de6cdd2d5f2e4a8067c40d1af0392b02426d61
+integration/ietf-actn-tools;3a8b2c604c13584fbc807ac21058288c424893b3
+integration/onap-component-simulators;748da401868fbf35744e292ee132be614b489623
+integration/pipelines/chained-ci;0399d9842c2a5670e4ee21d45343d2ac168eee2d
+integration/pipelines/oom-automatic-installation;4934d86bfe15a6132331f802afb4b3a062cbaf8c
+integration/pipelines/xtesting-onap;276fb7948607bd6b3fc09693619f021763b5ec6e
+integration/python-onapsdk;25cafb994c9ba3b874cd973a1e1d440fb0b98bf0
+integration/seccom;0131d962bdfcf97794ac49a2f45f5eb974249288
+integration/simulators/5G-core-nf-simulator;1eaabdb8299e49dc7e81c205edce8ce46e64a511
+integration/simulators/A1-policy-enforcement-simulator;63501fbea1b8a6dc859099c3465b1758e19e0956
+integration/simulators/core-nssmf-simulator;5ce930d4a9e8137b1fbac98a58ebe2cfd3e6a77e
+integration/simulators/nf-simulator;b4e937c4c92ef68fd1ab640ce9e30a531112b371
+integration/simulators/nf-simulator/avcn-manager;13fab3acdc7a22ca7000125112c84a2e942ea307
+integration/simulators/nf-simulator/netconf-server;8d0a5c8e95ef58e391eee31c1562dcb541c4c369
+integration/simulators/nf-simulator/pm-https-server;1f0bcaac6410f2a5497aad7c6ed5e46610a4a8a7
+integration/simulators/nf-simulator/ves-client;94891f32b3e2c4be8240b4df88830f97f2255e0b
+integration/simulators/pnf-simulator;f00c718bc3978d5b7ed55a5cda1c2a1443919af9
+integration/simulators/ran-app;746cc0a4aa1ada72d98ed161322fb2bd1e359637
+integration/simulators/ran-nssmf-simulator;1528d0a38026e3e183de2d4dcf7bbfcfec633eda
+integration/simulators/ran-simulator;41bbe166748510b4c5be7606ff27ee7ee64cb001
+integration/usecases/A1-policy-enforcement;0a885a2b3595988922b8e7af3e76bef4913de8bd
+integration/usecases/A1-policy-enforcement-r-apps;e3f79f43ba8e51fda97a5d67cd5c01b04e54c9e1
+integration/xtesting;c90366fa6ec58cd063addcf50c6948ea859f5fc9
+oparent;371de4b87ccf46f1292d68468fcfd41303ab394c
+testsuite;d7fe3776469399384a340b9a38e2c0de8771e6c1
+testsuite/cds;a5eb7d86926fac92965c35de1cae9114dc471a35
+testsuite/cds-mock-odl;b7f29dc3f03ec40369941af6e525d40c822d1ced
+testsuite/cds-mock-server;7db71adaf139e54f2186cfd19d468f5a1123835d
+testsuite/cds-mock-ssh;a43ce8950dcc36363c406b1cc4043dc7d623c9f4
+testsuite/oom;9e5fee150e86c868c0ef40f2a34494be36bd41fc
+testsuite/python-testing-utils;f9d29ad319d54cdabe63b52c20c9acd9d475347b
+testsuite/pythonsdk-tests;a9dddc1095dad400626871f3f1dc5df96d05e035
+testsuite/robot-utils;7e7fbedd13aa9c33433601c8d276f0d43fcd6c78
diff --git a/docs/files/csv/release-oparent-features.csv b/docs/files/csv/release-oparent-features.csv
new file mode 100644
index 000000000..b4a48add1
--- /dev/null
+++ b/docs/files/csv/release-oparent-features.csv
@@ -0,0 +1,4 @@
+Issue-ID;Description
+
+USECASEUI-709;Upgrade dependency versions in oparent
+USECASEUI-709;Update Logback to Version 1.2.10
diff --git a/docs/files/csv/release-pythonsdk-features.csv b/docs/files/csv/release-pythonsdk-features.csv
new file mode 100644
index 000000000..6d96dd7b7
--- /dev/null
+++ b/docs/files/csv/release-pythonsdk-features.csv
@@ -0,0 +1,2 @@
+Issue-ID;Description
+INT-2119;Use 10.0.0 version of SDK in tests \ No newline at end of file
diff --git a/docs/files/csv/release-testsuite-features.csv b/docs/files/csv/release-testsuite-features.csv
new file mode 100644
index 000000000..2b05ce227
--- /dev/null
+++ b/docs/files/csv/release-testsuite-features.csv
@@ -0,0 +1,2 @@
+Issue-ID;Description
+INT-2160;[ROBOT] Fix the DCAEMOD testcase for ServiceMes
diff --git a/docs/files/csv/repo-archived.csv b/docs/files/csv/repo-archived.csv
new file mode 100644
index 000000000..4eedae852
--- /dev/null
+++ b/docs/files/csv/repo-archived.csv
@@ -0,0 +1,10 @@
+Repository;Description;Link
+integration/benchmark;Benchmark project;`link <https://git.onap.org/integration/benchmark>`__
+integration/devtool;Devtool project;`link <https://git.onap.org/integration/devtool>`__
+integration/simulators/dc-simulator;Data Center simulator;`link <https://git.onap.org/integration/simulators/dc-simulator>`__
+integration/simulators/masspnf-simulator;Mass PNF Simulator;`link <https://git.onap.org/integration/simulators/masspnf-simulator>`__
+integration/terraform;Terraform based alternative infrastructure installation;`link <https://git.onap.org/integration/terraform>`__
+integration/terragrunt;Compagnon repository of terraform;`link <https://git.onap.org/integration/terragrunt>`__
+integration/usecases/bbs;BBS use case introduced in Dublin and extracted from global repository in frankfurt;`link <https://git.onap.org/integration/usecases/bbs>`__
+integration/usecases/mdons;MDONS use case introduced in Frankfurt;`link <https://git.onap.org/integration/usecases/mdons>`__
+testsuite/heatbridge;python utils to manage the heatbridge function to enrich cloud information to AAI (deprecated);`link <https://git.onap.org/testsuite/heatbridge>`__ \ No newline at end of file
diff --git a/docs/files/csv/repo-demo.csv b/docs/files/csv/repo-demo.csv
new file mode 100644
index 000000000..2a1432693
--- /dev/null
+++ b/docs/files/csv/repo-demo.csv
@@ -0,0 +1,2 @@
+Repository;Description;Link
+demo;Historical repository to host use case artifacts (heat templates, json files,..);`link <https://git.onap.org/demo>`__
diff --git a/docs/files/csv/repo-integration-external.csv b/docs/files/csv/repo-integration-external.csv
new file mode 100644
index 000000000..dc401c0a9
--- /dev/null
+++ b/docs/files/csv/repo-integration-external.csv
@@ -0,0 +1,2 @@
+Repository;Description;Link
+integration-view;Repository integration hosting the itegration portal including the hosting of the web site;`link <https://gitlab.com/Orange-OpenSource/lfn/onap/integration-view>`__
diff --git a/docs/files/csv/repo-integration.csv b/docs/files/csv/repo-integration.csv
new file mode 100644
index 000000000..b7d8a392a
--- /dev/null
+++ b/docs/files/csv/repo-integration.csv
@@ -0,0 +1,13 @@
+Repository;Description;Link
+integration;Historical main repository including documentation, simulators (e.g. mass PNF simulator), non robot tests (e.g. security tests, vCPE Tosca,..), ...;`link <https://git.onap.org/integration>`__
+integration/csit;Repository hosting some tooling to start component functional tests in Jenkins (To be deprecated in Guilin as such tests must be reinsourced by the projects);`link <https://git.onap.org/integration/csit>`__
+integration/data-provider;Project that provides a tool to automate common ONAP resource creation;`link <https://git.onap.org/integration/data-provider>`__
+integration/docker/onap-java11;Java11 baseline image conformed to SECCOM recommendations;`link <https://git.onap.org/integration/docker/onap-java11>`__
+integration/docker/onap-python;Python baseline image conformed to SECCOM recommendations;`link <https://git.onap.org/integration/docker/onap-python>`__
+integration/ietf-actn-tools;IETF ACTN tools introduced in Honolulu);`link <https://git.onap.org/integration/ietf-actn-tools>`__
+integration/onap-component-simulators;ONAP component simulators used for tests;`link <https://git.onap.org/integration/onap-component-simulators/>`__
+integration/python-onapsdk;ONAP Python SDK repository;`link <https://git.onap.org/integration/python-onapsdk/>`__
+integration/seccom;Repory hosting seccom recommended versions and security test waivers;`link <https://git.onap.org/integration/seccom>`__
+integration/usecases/A1-policy-enforcement;A1 policy enforcement introduced in Honolulu;`link <https://git.onap.org/integration/usecases/A1-policy-enforcement>`__
+integration/usecases/A1-policy-enforcement-r-apps;A1 policy enforcement (analyticis part) introduced in Honolulu;`link <https://git.onap.org/integration/usecases/A1-policy-enforcement-r-apps>`__
+integration/xtesting;Repository in charge to build th xtesting dockers used in CI/CD chains;`link <https://git.onap.org/integration/xtesting>`__
diff --git a/docs/files/csv/repo-oparent.csv b/docs/files/csv/repo-oparent.csv
new file mode 100644
index 000000000..30cbdc78a
--- /dev/null
+++ b/docs/files/csv/repo-oparent.csv
@@ -0,0 +1,3 @@
+Repository;Description;Link
+oparent;Java dependencies for JAVA projects;`link <https://git.onap.org/oparent>`__
+oparent/cia;Dockerfile optimization and best practices;`link <https://git.onap.org/oparent/cia>`__
diff --git a/docs/files/csv/repo-pipelines.csv b/docs/files/csv/repo-pipelines.csv
new file mode 100644
index 000000000..27e43e82c
--- /dev/null
+++ b/docs/files/csv/repo-pipelines.csv
@@ -0,0 +1,4 @@
+Repository;Description;Link
+integration/pipelines/chained-ci;Main pipelines project which holds configuration;`link <https://git.onap.org/integration/pipelines/chained-ci/>`__
+integration/pipelines/oom-automatic-installation;Definition of pipelines to instantiate ONAP;`link <https://git.onap.org/integration/pipelines/oom-automatic-installation/>`__
+integration/pipelines/xtesting-onap;Definition of pipelines to execute tests;`link <https://git.onap.org/integration/pipelines/xtesting-onap/>`__ \ No newline at end of file
diff --git a/docs/files/csv/repo-simulators.csv b/docs/files/csv/repo-simulators.csv
new file mode 100644
index 000000000..91f75eb66
--- /dev/null
+++ b/docs/files/csv/repo-simulators.csv
@@ -0,0 +1,13 @@
+Repository;Description;Link
+integration/simulators/5G-core-nf-simulator;5G core nf simulator;`link <https://git.onap.org/integration/simulators/5G-core-nf-simulator>`__
+integration/simulators/A1-policy-enforcement-simulator;A1 Policy Enforcement Simulator;`link <https://git.onap.org/integration/simulators/A1-policy-enforcement-simulator>`__
+integration/simulators/core-nssmf-simulator;Core NSSMF Simulator;`link <https://git.onap.org/integration/simulators/core-nssmf-simulator>`__
+integration/simulators/nf-simulator;NF simulator;`link <https://git.onap.org/integration/simulators/nf-simulator>`__
+integration/simulators/nf-simulator/avcn-manager;NF simulator avcn manager;`link <https://git.onap.org/integration/simulators/nf-simulator/avcn-manager>`__
+integration/simulators/nf-simulator/netconf-server;NF simulator netconf server;`link <https://git.onap.org/integration/simulators/nf-simulator/netconf-server>`__
+integration/simulators/nf-simulator/pm-https-server;NF simulator pm https server;`link <https://git.onap.org/integration/simulators/nf-simulator/pm-https-server>`__
+integration/simulators/nf-simulator/ves-client;NF simulator ves client;`link <https://git.onap.org/integration/simulators/nf-simulator/ves-client>`__
+integration/simulators/pnf-simulator;PNF Simulator;`link <https://git.onap.org/integration/simulators/pnf-simulator>`__
+integration/simulators/ran-app;RAN App;`link <https://git.onap.org/integration/simulators/ran-app/>`__
+integration/simulators/ran-nssmf-simulator;RAN NSSMF simulator;`link <https://git.onap.org/integration/simulators/ran-nssmf-simulator>`__
+integration/simulators/ran-simulator;RAN simulator;`link <https://git.onap.org/integration/simulators/ran-simulator>`__
diff --git a/docs/files/csv/repo-testsuite.csv b/docs/files/csv/repo-testsuite.csv
new file mode 100644
index 000000000..79fc01860
--- /dev/null
+++ b/docs/files/csv/repo-testsuite.csv
@@ -0,0 +1,10 @@
+Repository;Description;Link
+testsuite;repository hosting the robot test suites;`link <https://git.onap.org/testsuite>`__
+testsuite/cds;Repository hosting (standalone) CDS test suites shared by Bell Canada team, not yet integrated in CI/CD;`link <https://git.onap.org/testsuite/cds>`__
+testsuite/cds-mock-odl;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-odl>`__
+testsuite/cds-mock-server;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-server>`__
+testsuite/cds-mock-ssh;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-ssh>`__
+testsuite/oom;Helm chart for robot pod (to be deprecated in Honolulu and moved back to OOM);`link <https://git.onap.org/testsuite/oom>`__
+testsuite/python-testing-utils;Python and robot util libraries used for robot tests;`link <https://git.onap.org/testsuite/python-testing-utils>`__
+testsuite/pythonsdk-tests;Repository hosting the test scenarios leveraging python-onapsdk for end to end smoke tests;`link <https://git.onap.org/testsuite/pythonsdk-tests>`__
+testsuite/robot-utils;Repository aiming to provide a robot wrapper for python-onapsdk;`link <https://git.onap.org/testsuite/robot-utils>`__
diff --git a/docs/files/csv/s3p-instantiation.csv b/docs/files/csv/s3p-instantiation.csv
new file mode 100644
index 000000000..d21f2ee5e
--- /dev/null
+++ b/docs/files/csv/s3p-instantiation.csv
@@ -0,0 +1,6 @@
+Parameters;Jakarta;Istanbul;Honolulu
+Number of tests;1190;1310;1410
+Global success rate;96%;97%;96%
+Min duration;140s;193s;81s
+Max duration;2075s;2128s;2000s
+mean duration;603s;564s;530s \ No newline at end of file
diff --git a/docs/files/csv/s3p-sdc.csv b/docs/files/csv/s3p-sdc.csv
new file mode 100644
index 000000000..cd9bb9e6c
--- /dev/null
+++ b/docs/files/csv/s3p-sdc.csv
@@ -0,0 +1,6 @@
+Parameters;Jakarta;Istanbul;Honolulu
+Number of tests;1000;1085;715
+Global success rate;92%;92%;93%
+Min duration;119;111s;80s
+Max duration;844;799s;1128s
+mean duration;394s;366s;565s \ No newline at end of file
diff --git a/docs/files/csv/simulators.csv b/docs/files/csv/simulators.csv
new file mode 100644
index 000000000..69e6b57f8
--- /dev/null
+++ b/docs/files/csv/simulators.csv
@@ -0,0 +1,6 @@
+Name;Description;Link;Contacts
+NF Simulator;Evolution of the pnf simulator, the Network service simulator;:ref:`official doc <nf_simulator>`;K.Kuzmicki
+A1 Policy Enforcement Simulator;Simulator that supports the A1-P OSC_2.1.0 interface and also provides internal API to manage the RAN elements (Cells, Ues) and allows to customize and send VES Events;`official readme <https://git.onap.org/integration/simulators/A1-policy-enforcement-simulator/tree/README.md>`__;Krystian Kędroń
+Mass PNF Simulator;Mimic the PNF for benchmark purposes;`official readme <https://git.onap.org/integration/simulators/masspnf-simulator/tree/README.md>`__;Tamas Bakai
+Ran simulator;RAN-SIM is a Radio Access Network Simulator, it is used to simulate the various functionalities of an eNodeB;`official readme <https://git.onap.org/integration/simulators/ran-simulator/tree/README.md>`__;Priyadharshini B
+DC simulator;Data Center simulator;`official readme <https://git.onap.org/integration/simulators/dc-simulator/tree/README.md>`__;Xin Miao
diff --git a/docs/files/csv/stability_basic_vm.csv b/docs/files/csv/stability_basic_vm.csv
new file mode 100644
index 000000000..5ff8d0807
--- /dev/null
+++ b/docs/files/csv/stability_basic_vm.csv
@@ -0,0 +1,11 @@
+Basic_vm metric;Value
+Number of PASS occurences;557
+Number of Raw FAIL Occurences;174
+Raw Success rate; 76%
+Corrected success rate; 86%
+Average duration of the test;549s (9m9s)
+Min duration;188s (3m8s)
+Max duration;2161 (36m1s)
+Median duration;271s (4m34s)
+% of Duration < 282s; 50%
+% of duration > 660s; 29%
diff --git a/docs/files/csv/stability_cluster_metric_cpu.csv b/docs/files/csv/stability_cluster_metric_cpu.csv
new file mode 100644
index 000000000..e77d61691
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_cpu.csv
@@ -0,0 +1,2 @@
+Namespace;CPU Utilisation (from requests);CPU utilisation (from limits);Memory Utilisation (from requests);Memory Utilisation (from limits)
+onap;2.22%;0.816%;19%;7.4%
diff --git a/docs/files/csv/stability_cluster_metric_memory.csv b/docs/files/csv/stability_cluster_metric_memory.csv
new file mode 100644
index 000000000..40c6fa566
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_memory.csv
@@ -0,0 +1,2 @@
+Namespace;Pods;Workloads;Memory Usage;Memory Requests;Memory Requests %;Memory Limits;Memory Limits %
+onap;242;181;160.70 GiB;193.13 GiB;83.21%;493.09 GiB;32.59%
diff --git a/docs/files/csv/stability_cluster_metric_network.csv b/docs/files/csv/stability_cluster_metric_network.csv
new file mode 100644
index 000000000..46f02a7f7
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_network.csv
@@ -0,0 +1,2 @@
+Namespace;Current Receive Bandwidth;Current Transmit Bandwidth;Rate of Received Packets;Rate of Transmitted Packets;Rate of Received Packets Dropped;Rate of Transmitted Packets Dropped
+onap; 1.03 MBs; 1.07 MBs;5.08 kpps;5.02 kpps;0 pps;0 pps
diff --git a/docs/files/csv/stability_top10_cpu.csv b/docs/files/csv/stability_top10_cpu.csv
new file mode 100644
index 000000000..cdd93c1a9
--- /dev/null
+++ b/docs/files/csv/stability_top10_cpu.csv
@@ -0,0 +1,11 @@
+Pod;CPU Usage;CPU Requests;CPU Request %;CPU Limits;CPU Limits %
+onap-robot;0.92;1;92%;4;23%
+onap-cassandra-0;0.4;0.4;101%;1.6;25%
+onap-cassandra-2;0.36;0.4;83%;1.6;22%
+onap-ejbca;0.35;0.02;1771%;3;11%
+onap-aws;0.35;6;6%;;
+onap-cassandra-1;0.33;0.4;83%;1.6;21%
+onap-oof-has-api;0.12;2;6%;8;1%
+onap-music-cassandra-2;0.12;0.32;32%;1;12%
+onap-dcae-cloudify-manager;0.11;2;6%;4;3%
+onap-music-cassandra-1;0.09;0.32;29%;1;9%
diff --git a/docs/files/csv/stability_top10_memory.csv b/docs/files/csv/stability_top10_memory.csv
new file mode 100644
index 000000000..504afc2ac
--- /dev/null
+++ b/docs/files/csv/stability_top10_memory.csv
@@ -0,0 +1,11 @@
+Pod;Memory Usage;Memory Requests;Memory Requests %;Memory Limits;Memory Limits %
+onap-portal-cassandra;3.34 GiB;6.2 GiB;53.8%;7.5 GiB;45%
+onap-cassandra-2;2.6 GiB;5 GiB;52%;8 GiB;32%
+onap-cassandra-0;2.6 GiB;5 GiB;52%;8 GiB;32%
+onap-cassandra-1;2.54 GiB;5 GiB;51%;8 GiB;32%
+onap-appc;2.46 GiB;4 GiB;62%;8 GiB; 32%
+onap-sdnc;2.43 GiB;4 GiB;61%;8 GiB; 30%
+onap-policy-mariadb-0;2.4 GiB;1.96 GiB;122%;5.5 GiB;44%
+onap-dcae-cloudify-manager;4.7 GiB;2 GiB;233%;4 GiB;115%
+onap-awx;1.72 GiB;12 GiB;14%;;
+onap-aaf-cass;1.45 GiB;2.5 GiB;58%;3.5 GiB;41.4%
diff --git a/docs/files/csv/stability_top10_net.csv b/docs/files/csv/stability_top10_net.csv
new file mode 100644
index 000000000..b86ba909f
--- /dev/null
+++ b/docs/files/csv/stability_top10_net.csv
@@ -0,0 +1,11 @@
+Pod;Current Receive Bandwidth;Current Transmit Bandwidth;Rate of Received Packets;Rate of Transmitted Packets, Rate of Received Packets Dropped;Rate of Transmitted Packets Dropped
+onap-oof-has-api;372 kB/s;670 B/s;9.21 p/s;5.4 p/s;0 p/s;0 p/s
+onap-cassandra-2;231 kB/s;155 kB/s;90.35 p/s;69.84 p/s;0 p/s;0 p/s
+onap-cassandra-1;228 kB/s;156 kB/s;87 p/s;82 p/s;0 p/s;0 p/s
+onap-cassandra-0;144 kB/s;245 kB/s;63 p/s;75 p/s;0 p/s;0 p/s
+onap-message-router-0;17 kB/s;18 kB/s;187 p/s;188 p/s;0 p/s;0 p/s
+onap-portal-app;15 kB/s;4.7 kB/s;187 p/s;188 p/s;0 p/s;0 p/s
+onap-consul;14 kB/s;3.9 kB/s;38 p/s;40 p/s;0 p/s;0 p/s
+onap-message-router-kafka-010kB/s;10 kB/s;112 p/s;115 p/s;0 p/s;0 p/s
+onap-dcaemodul-onboarding-apt_install;6.7 kB/s;6.1 kB/s;45 p/s;47 p/s;0 p/s;0 p/s
+onap-message-router-kafka-2;6.3 kB/s;6.1 kB/s;70 p/s;72 p/s;0 p/s;0 p/s
diff --git a/docs/files/csv/tests-healthcheck.csv b/docs/files/csv/tests-healthcheck.csv
new file mode 100644
index 000000000..32ee6cfce
--- /dev/null
+++ b/docs/files/csv/tests-healthcheck.csv
@@ -0,0 +1,11 @@
+Tests;Description;Code;Comments
+core;Robot healthcheck tests of the core components (AA&I, DMAAP, Portal, SDC, SDNC, SO);`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+full;Robot healthcheck tests for all the components, **holmes healthcheck** have been reintroduced;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+healthdist;Check the onboarding and distribution of the vFW;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+postinstall;Check dmaap and AA&I Design model DB tests;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/post-install-tests.robot>`__;`robotframework <https://robotframework.org/>`__
+ves-collector (new);Suite for checking handling events by VES Collector;`code <https://git.onap.org/testsuite/tree/robot/testsuites/ves.robot>`__;`robotframework <https://robotframework.org/>`__
+hv-ves;HV-VES 'Sunny Scenario' Robot Framework test - message is sent to the collector and Kafka topic is checked if the message has been published. Content is decoded and checked.;`code <https://git.onap.org/testsuite/tree/robot/testsuites/hvves-ci.robot>`__;`robotframework <https://robotframework.org/>`__
+basic_onboard;onboard a model, subset of most of the other basic_* tests, created to perform stability testing;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_onboard.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+cps-healthcheck;Call liveness and readiness probes of the CPS module;`robot tests <https://github.com/onap/cps/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__
+**cps-temporal-healthcheck**;Call endpoints of CPS Temporal component;`robot tests <https://github.com/onap/cps-cps-temporal/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__
+**cps-dmi-plugin-healthcheck**;Call endpoints of CPS DMI plugin component;`robot tests <https://github.com/onap/cps-ncmp-dmi-plugin/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__ \ No newline at end of file
diff --git a/docs/files/csv/tests-infrastructure-healthcheck.csv b/docs/files/csv/tests-infrastructure-healthcheck.csv
new file mode 100644
index 000000000..a350f6bc8
--- /dev/null
+++ b/docs/files/csv/tests-infrastructure-healthcheck.csv
@@ -0,0 +1,4 @@
+Tests;Description;Code;Comments
+onap-helm;Verify Helm chart status, the test has been updated to take into account Helm3;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/helm-onap-status>`__;
+onap-k8s;Check common resources of the ONAP Kubernetes namespace ;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status>`__;kubernetes python library
+onap-k8s-teardown;Check common resources of the ONAP Kubernetes namespace after all tests execution;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status>`__;kubernetes python library
diff --git a/docs/files/csv/tests-security.csv b/docs/files/csv/tests-security.csv
new file mode 100644
index 000000000..9d949a9e0
--- /dev/null
+++ b/docs/files/csv/tests-security.csv
@@ -0,0 +1,5 @@
+Tests;Description;Code;Comments
+root_pods;check that pods are nor using root user or started as root; `bash script <https://git.onap.org/integration/xtesting/tree/security/scripts/check_security_root.sh>`__; kubectl
+unlimitted_pods;check that limits are set for pods;`bash script <https://git.onap.org/integration/xtesting/tree/security/scripts/check_unlimitted_pods.sh>`__; kubectl
+nonssl_endpoints;check that all public HTTP endpoints exposed in ONAP cluster use SSL tunnels;`Go script <https://git.onap.org/integration/plain/test/security/sslendpoints/main.go>`__;kubetl, nmap
+nodeport_check_certs;This test list the nodeports and tries to get SSL information to evaluate the validity of the certificates (expiration and issuer) used on the nodeports;`python module <https://git.onap.org/integration/tree/test/security/check_certificates>`__;pyopenssl, kubernetes python libraries
diff --git a/docs/files/csv/tests-smoke.csv b/docs/files/csv/tests-smoke.csv
new file mode 100644
index 000000000..16ea7593e
--- /dev/null
+++ b/docs/files/csv/tests-smoke.csv
@@ -0,0 +1,11 @@
+Tests;Description;Code;Comments
+basic_vm;Onboard, distribute and instantiate an Openstack VM using à la carte BPMN, replaced the former basic_vm test;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_vm.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_network;Onboard, distribute and instantiate a Neutron network;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_network.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_cnf;Onboard (new), distribute and instantiate a Kubernetes pods;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_cnf.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+5gbulkpm;5G Bulk PM Usecase functionality. The test has been significantly enhanced in Honolulu;`code <https://git.onap.org/testsuite/tree/robot/testsuites/usecases/5gbulkpm.robot>`__;`robotframework <https://robotframework.org/>`__
+pnf-registrate;Executes the PNF registration test cases including setup and teardown;`code <https://git.onap.org/testsuite/tree/robot/testsuites/pnf-registration.robot>`__;`robotframework <https://robotframework.org/>`__
+cmpv2;CMPv2 Usecase functionality;`code <https://git.onap.org/testsuite/tree/robot/testsuites/cmpv2.robot>`__;`robotframework <https://robotframework.org/>`__
+basic_vm_macro;Instantiate a VM using macro bpmn;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_vm_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+pnf_macro;Run PNF simulator, onboard, distribute and instantiate service including PNF;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/pnf_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+cds_resource_resolution;Upload blueprint file into CDS, execute test workflow and check results;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/cds_resource_resolution.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_cnf_macro;Onboard (new), distribute and instantiate a Kubernetes pods using SO's macro flow;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_cnf_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
diff --git a/docs/files/csv/usecases-deprecated.csv b/docs/files/csv/usecases-deprecated.csv
new file mode 100644
index 000000000..a1c3b3b33
--- /dev/null
+++ b/docs/files/csv/usecases-deprecated.csv
@@ -0,0 +1,28 @@
+Use Case;Link;Last Valid Version;Comments
+vFirewall with closed loop;:ref:`official doc <docs_vfw>`;Guilin;Shall still be OK in Honolulu but not tested yet
+Scale Out;:ref:`official doc <docs_scaleout>`;Guilin;Shall still be OK in Honolulu but not tested yet
+vCPE Use Case;:ref:`official doc <docs_vcpe>`;El Alto;No resources to test on Frankfurt
+vIPsec with HPA Use Case;:ref:`official doc<docs_vipsec_hpa>`;El Alto;No resources to test on Frankfurt
+Change Management Schedule Optimization;:ref:`official doc<docs_CM_schedule_optimizer>`;El Alto;No resources to test on Frankfurt
+Change Management Flexible Designer and Orchestrator;:ref:`official doc<docs_CM_flexible_designer_orchestrator>`;El Alto;No resources to test on Frankfurt
+vFirewall/vDNS with HPA;:ref:`official doc <docs_vfw_hpa>`;Frankfurt;No resources to test on Guilin
+BBS (Broadband Service);:ref:`official doc <docs_bbs>`;Frankfurt;No resources to test on Guilin
+vFirewall CNF with multicloud k8s plugin;:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`;Frankfurt;No resources to test on Guilin
+EdgeXFoundry CNF with multicloud k8s plugin;:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`;Frankfurt;No resources to test on Guilin
+vCPE with Tosca;:ref:`official doc <docs_vcpe_tosca_local>`;Frankfurt;No resources to test on Guilin
+E2E Automation vLB with CDS;`wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=71838891>`__; Frankfurt;No resources to test on Guilin
+vFirewall In-Place Software Upgrade with Traffic Distribution;:ref:`official doc <docs_vfw_traffic>`;Frankfurt;APPC in maintenance mode
+5G Bulk PM; :ref:`official doc <docs_5g_bulk_pm>`;Frankfurt;No tested in Guilin
+5G NRM Network Resource Model (Configuration management);:ref:`official doc <docs_5G_NRM_Configuration>`;Frankfurt;No tested in Guilin
+5G NETCONF configuration;:ref:`official doc <docs_5G_Configuration_over_NETCONF>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade using direct Netconf Yang interface with PNF;:ref:`official doc <docs_5g_pnf_software_upgrade>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade with EM with Ansible;:ref:`official doc <docs_5G_PNF_Software_Upgrade_ansible_with_EM>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade with EM with Netconf; :ref:`official doc <docs_5g_pnf_software_upgrade_netconf_with_EM>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade in association to schema updates; :ref:`official doc <docs_5G_PNF_Software_Upgrade_With_Schema_Update>`;Frankfurt;No tested in Guilin
+VSP Compliance and Validation Check within SDC;`wiki page <https://wiki.onap.org/display/DW/VSP+Compliance+and+Validation+Check+within+SDC+%28Frankfurt%29+-+Phase+2#VSPComplianceandValidationCheckwithinSDC(Frankfurt)Phase2-VSPComplianceCheckwithinSDC-IntegrationTestPlan>`_;Frankfurt;No tested in Guilin
+Enable PNF software version at onboarding;`wiki page <https://jira.onap.org/browse/REQ-88?src=confmacro>`__;Frankfurt;No tested in Guilin
+xNF communication security enhancements; `wiki page <https://wiki.onap.org/display/DW/xNF+communication+security+enhancements+-+Tests+Description+and+Status>`__;Frankfurt;No tested in Guilin
+ETSI Alignment SO plugin to support SOL003 to connect to an external VNFM;`wiki page <https://wiki.onap.org/display/DW/ETSI+Alignment+Support>`__;Frankfurt;No tested in Guilin
+Integration of CDS as an Actor; `official doc <https://docs.onap.org/projects/onap-ccsdk-cds/en/latest/ui/designer-guide.html>`_;Frankfurt;No tested in Guilin
+3rd Party Operational Domain Manager; `wiki page <https://wiki.onap.org/display/DW/Third-party+Operational+Domain+Manager>`__;Frankfurt;No tested in Guilin
+Configuration & persistency; `wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=64003184>`__;Frankfurt;No tested in Guilin
diff --git a/docs/files/csv/usecases-functional-requirements.csv b/docs/files/csv/usecases-functional-requirements.csv
new file mode 100644
index 000000000..6bb6494d7
--- /dev/null
+++ b/docs/files/csv/usecases-functional-requirements.csv
@@ -0,0 +1,3 @@
+Issue key;Summary;Contact;Comment
+REQ-1215;E2E Network Slicing use case enhancements for Kohn release;Kevin Tang;
+REQ-1212;5G SON use case enhancements for Kohn release;N.K. Shankaranarayanan;
diff --git a/docs/files/csv/usecases-non-functional-requirements.csv b/docs/files/csv/usecases-non-functional-requirements.csv
new file mode 100644
index 000000000..3b489ac43
--- /dev/null
+++ b/docs/files/csv/usecases-non-functional-requirements.csv
@@ -0,0 +1,5 @@
+Issue key;Summary;Contact;Comment
+REQ-1267;General intent model and general intent interface requirements in R11;Keguang He;
+REQ-1214;Maintenance and Enhancement of Intent-driven Closed-loop Autonomous Networks in R11;Dong Wang;
+REQ-1268;CCVPN Kohn Enhancements for Intent-based Cloud Leased Line and Transport Slicing;Henry Yu;
+REQ-1342;Retirement of unmaintained repos;Amy Zwarico; \ No newline at end of file
diff --git a/docs/files/csv/usecases-old-valid.csv b/docs/files/csv/usecases-old-valid.csv
new file mode 100644
index 000000000..c10709e9d
--- /dev/null
+++ b/docs/files/csv/usecases-old-valid.csv
@@ -0,0 +1,6 @@
+Summary;Link;Contacts
+vFirewall CNF With CDS;:ref:`official doc <docs_vFW_CNF_CDS>`;L.Rajewski, K.Banka
+5G Realtime PM and High Volume Stream Data Collection; :ref:`official doc <docs_realtime_pm>`;M.Przybysz
+5G PNF Plug and Play; :ref:`official doc <docs_5g_pnf_pnp>`; M.Przybysz K.Kuzmicki
+5G PNF Pre-Onboarding & Onboarding;:ref:`official doc <docs_pnf_onboarding_preonboarding>`;M.Przybysz K.Kuzmicki D.Melia A.Walshe
+MDONS extension;:ref:`official doc <docs_CCVPN>`;X.Miao
diff --git a/docs/files/csv/usecases.csv b/docs/files/csv/usecases.csv
new file mode 100644
index 000000000..629088202
--- /dev/null
+++ b/docs/files/csv/usecases.csv
@@ -0,0 +1,4 @@
+Ref;Summary;Link;Contacts
+REQ-440;E2E Network Slicing;:ref:`official doc <docs_E2E_network_slicing>`;Kevin Tang
+REQ-429;5G OOF SON;:ref:`official doc <docs_5G_oof_son>`;N. K. Shankaranarayanan
+REQ-459;CCVPN-Transport Slicing;:ref:`official doc <docs_ccvpn>`;Henry Yu
diff --git a/docs/files/dt-use-case.png b/docs/files/dt-use-case.png
index 068e9e587..62b67d078 100755
--- a/docs/files/dt-use-case.png
+++ b/docs/files/dt-use-case.png
Binary files differ
diff --git a/docs/files/ns_automation/ns_automation_sdc_suffix.png b/docs/files/ns_automation/ns_automation_sdc_suffix.png
new file mode 100644
index 000000000..c78d27230
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_sdc_suffix.png
Binary files differ
diff --git a/docs/files/ns_automation/ns_automation_suc.png b/docs/files/ns_automation/ns_automation_suc.png
new file mode 100644
index 000000000..ff7a6d9b4
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_suc.png
Binary files differ
diff --git a/docs/files/ns_automation/ns_automation_test_class.png b/docs/files/ns_automation/ns_automation_test_class.png
new file mode 100644
index 000000000..5f7976841
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_test_class.png
Binary files differ
diff --git a/docs/files/s3p/basic_vm_duration.png b/docs/files/s3p/basic_vm_duration.png
new file mode 100644
index 000000000..71e522681
--- /dev/null
+++ b/docs/files/s3p/basic_vm_duration.png
Binary files differ
diff --git a/docs/files/s3p/basic_vm_duration_histo.png b/docs/files/s3p/basic_vm_duration_histo.png
new file mode 100644
index 000000000..d201d3b81
--- /dev/null
+++ b/docs/files/s3p/basic_vm_duration_histo.png
Binary files differ
diff --git a/docs/files/s3p/daily_frankfurt1.png b/docs/files/s3p/daily_frankfurt1.png
new file mode 100644
index 000000000..44d82870d
--- /dev/null
+++ b/docs/files/s3p/daily_frankfurt1.png
Binary files differ
diff --git a/docs/files/s3p/daily_frankfurt2.png b/docs/files/s3p/daily_frankfurt2.png
new file mode 100644
index 000000000..f2f9ae854
--- /dev/null
+++ b/docs/files/s3p/daily_frankfurt2.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_healthcheck.png b/docs/files/s3p/guilin_daily_healthcheck.png
new file mode 100644
index 000000000..34a58ebda
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png b/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..be24c02ce
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_security.png b/docs/files/s3p/guilin_daily_security.png
new file mode 100644
index 000000000..1d3d518c0
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_smoke.png b/docs/files/s3p/guilin_daily_smoke.png
new file mode 100644
index 000000000..5200c575e
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_healthcheck.png b/docs/files/s3p/honolulu_daily_healthcheck.png
new file mode 100644
index 000000000..01216aee4
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png b/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..660902029
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_security.png b/docs/files/s3p/honolulu_daily_security.png
new file mode 100644
index 000000000..2efc9c84a
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_smoke.png b/docs/files/s3p/honolulu_daily_smoke.png
new file mode 100644
index 000000000..4192e404d
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_sdc_stability.png b/docs/files/s3p/honolulu_sdc_stability.png
new file mode 100644
index 000000000..4d6c4ee2c
--- /dev/null
+++ b/docs/files/s3p/honolulu_sdc_stability.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_sdc_stability_resources.png b/docs/files/s3p/honolulu_sdc_stability_resources.png
new file mode 100644
index 000000000..dd9333687
--- /dev/null
+++ b/docs/files/s3p/honolulu_sdc_stability_resources.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_so_stability_1_duration.png b/docs/files/s3p/honolulu_so_stability_1_duration.png
new file mode 100644
index 000000000..47f625604
--- /dev/null
+++ b/docs/files/s3p/honolulu_so_stability_1_duration.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_so_stability_5.png b/docs/files/s3p/honolulu_so_stability_5.png
new file mode 100644
index 000000000..fe8487524
--- /dev/null
+++ b/docs/files/s3p/honolulu_so_stability_5.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_weekly_cpu.png b/docs/files/s3p/honolulu_weekly_cpu.png
new file mode 100644
index 000000000..dbf55d272
--- /dev/null
+++ b/docs/files/s3p/honolulu_weekly_cpu.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_weekly_memory.png b/docs/files/s3p/honolulu_weekly_memory.png
new file mode 100644
index 000000000..5cbdf04be
--- /dev/null
+++ b/docs/files/s3p/honolulu_weekly_memory.png
Binary files differ
diff --git a/docs/files/s3p/istanbul-dashboard.png b/docs/files/s3p/istanbul-dashboard.png
new file mode 100644
index 000000000..f8bad42ad
--- /dev/null
+++ b/docs/files/s3p/istanbul-dashboard.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_healthcheck.png b/docs/files/s3p/istanbul_daily_healthcheck.png
new file mode 100644
index 000000000..e1cf16ae6
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png b/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..1e8877d0e
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_security.png b/docs/files/s3p/istanbul_daily_security.png
new file mode 100644
index 000000000..605edb140
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_smoke.png b/docs/files/s3p/istanbul_daily_smoke.png
new file mode 100644
index 000000000..cdeb999da
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_instantiation_stability_10.png b/docs/files/s3p/istanbul_instantiation_stability_10.png
new file mode 100644
index 000000000..73749572a
--- /dev/null
+++ b/docs/files/s3p/istanbul_instantiation_stability_10.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_resiliency.png b/docs/files/s3p/istanbul_resiliency.png
new file mode 100644
index 000000000..567a98c5c
--- /dev/null
+++ b/docs/files/s3p/istanbul_resiliency.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_sdc_stability.png b/docs/files/s3p/istanbul_sdc_stability.png
new file mode 100644
index 000000000..67346cb0d
--- /dev/null
+++ b/docs/files/s3p/istanbul_sdc_stability.png
Binary files differ
diff --git a/docs/files/s3p/jakarta-dashboard.png b/docs/files/s3p/jakarta-dashboard.png
new file mode 100755
index 000000000..e5f2fd1b8
--- /dev/null
+++ b/docs/files/s3p/jakarta-dashboard.png
Binary files differ
diff --git a/docs/files/s3p/stability_sdnc_memory.png b/docs/files/s3p/stability_sdnc_memory.png
new file mode 100644
index 000000000..c381077f5
--- /dev/null
+++ b/docs/files/s3p/stability_sdnc_memory.png
Binary files differ
diff --git a/docs/files/scaleout/12.png b/docs/files/scaleout/12.png
deleted file mode 100644
index c6e79a8dd..000000000
--- a/docs/files/scaleout/12.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/13.png b/docs/files/scaleout/13.png
deleted file mode 100644
index b64d57759..000000000
--- a/docs/files/scaleout/13.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/14.png b/docs/files/scaleout/14.png
deleted file mode 100644
index 959fef355..000000000
--- a/docs/files/scaleout/14.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/15.png b/docs/files/scaleout/15.png
deleted file mode 100644
index ed552d9d0..000000000
--- a/docs/files/scaleout/15.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/16.png b/docs/files/scaleout/16.png
deleted file mode 100644
index 78ec99002..000000000
--- a/docs/files/scaleout/16.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/17.png b/docs/files/scaleout/17.png
deleted file mode 100644
index 4165da725..000000000
--- a/docs/files/scaleout/17.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/18.png b/docs/files/scaleout/18.png
deleted file mode 100644
index c0fc3b57a..000000000
--- a/docs/files/scaleout/18.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/19.png b/docs/files/scaleout/19.png
deleted file mode 100644
index f83e1d5a9..000000000
--- a/docs/files/scaleout/19.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/20.png b/docs/files/scaleout/20.png
deleted file mode 100644
index 71147e3de..000000000
--- a/docs/files/scaleout/20.png
+++ /dev/null
Binary files differ
diff --git a/docs/files/scaleout/clamp/1.png b/docs/files/scaleout/clamp/1.png
new file mode 100644
index 000000000..acf850843
--- /dev/null
+++ b/docs/files/scaleout/clamp/1.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/10.png b/docs/files/scaleout/clamp/10.png
new file mode 100644
index 000000000..17dcaa937
--- /dev/null
+++ b/docs/files/scaleout/clamp/10.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/11.png b/docs/files/scaleout/clamp/11.png
new file mode 100644
index 000000000..f41f72988
--- /dev/null
+++ b/docs/files/scaleout/clamp/11.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/12.png b/docs/files/scaleout/clamp/12.png
new file mode 100644
index 000000000..8acc00439
--- /dev/null
+++ b/docs/files/scaleout/clamp/12.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/13.png b/docs/files/scaleout/clamp/13.png
new file mode 100644
index 000000000..74894e9e7
--- /dev/null
+++ b/docs/files/scaleout/clamp/13.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/14.png b/docs/files/scaleout/clamp/14.png
new file mode 100644
index 000000000..52d1b2a02
--- /dev/null
+++ b/docs/files/scaleout/clamp/14.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/15.png b/docs/files/scaleout/clamp/15.png
new file mode 100644
index 000000000..9bfd74f3e
--- /dev/null
+++ b/docs/files/scaleout/clamp/15.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/16.png b/docs/files/scaleout/clamp/16.png
new file mode 100644
index 000000000..edec29070
--- /dev/null
+++ b/docs/files/scaleout/clamp/16.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/2.png b/docs/files/scaleout/clamp/2.png
new file mode 100644
index 000000000..e5ff5f6cb
--- /dev/null
+++ b/docs/files/scaleout/clamp/2.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/3.png b/docs/files/scaleout/clamp/3.png
new file mode 100644
index 000000000..f0dcc352f
--- /dev/null
+++ b/docs/files/scaleout/clamp/3.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/4.png b/docs/files/scaleout/clamp/4.png
new file mode 100644
index 000000000..5563b2442
--- /dev/null
+++ b/docs/files/scaleout/clamp/4.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/5.png b/docs/files/scaleout/clamp/5.png
new file mode 100644
index 000000000..590b01c7e
--- /dev/null
+++ b/docs/files/scaleout/clamp/5.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/6.png b/docs/files/scaleout/clamp/6.png
new file mode 100644
index 000000000..bbdd69878
--- /dev/null
+++ b/docs/files/scaleout/clamp/6.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/7.png b/docs/files/scaleout/clamp/7.png
new file mode 100644
index 000000000..281f7bcf9
--- /dev/null
+++ b/docs/files/scaleout/clamp/7.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/8.png b/docs/files/scaleout/clamp/8.png
new file mode 100644
index 000000000..81217e8a4
--- /dev/null
+++ b/docs/files/scaleout/clamp/8.png
Binary files differ
diff --git a/docs/files/scaleout/clamp/9.png b/docs/files/scaleout/clamp/9.png
new file mode 100644
index 000000000..ef8d3200d
--- /dev/null
+++ b/docs/files/scaleout/clamp/9.png
Binary files differ
diff --git a/docs/files/scaleout/dcae_blueprint.yaml b/docs/files/scaleout/dcae_blueprint.yaml
new file mode 100644
index 000000000..c3d1a1db9
--- /dev/null
+++ b/docs/files/scaleout/dcae_blueprint.yaml
@@ -0,0 +1,174 @@
+#
+# ============LICENSE_START====================================================
+# =============================================================================
+# Copyright (c) 2019 AT&T Intellectual Property. All rights reserved.
+# =============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END======================================================
+
+tosca_definitions_version: cloudify_dsl_1_3
+
+description: >
+ This blueprint deploys/manages the TCA module as a Docker container
+
+imports:
+ - http://www.getcloudify.org/spec/cloudify/3.4/types.yaml
+ - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/k8splugin/1.4.13/k8splugin_types.yaml
+# - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/dcaepolicyplugin/2.3.0/dcaepolicyplugin_types.yaml
+ - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/clamppolicyplugin/1.0.0/clamppolicyplugin_types.yaml
+inputs:
+ aaiEnrichmentHost:
+ type: string
+ default: "aai.onap.svc.cluster.local"
+ aaiEnrichmentPort:
+ type: string
+ default: "8443"
+ enableAAIEnrichment:
+ type: string
+ default: true
+ dmaap_host:
+ type: string
+ default: message-router.onap
+ dmaap_port:
+ type: string
+ default: "3904"
+ enableRedisCaching:
+ type: string
+ default: false
+ redisHosts:
+ type: string
+ default: dcae-redis.onap.svc.cluster.local:6379
+ tag_version:
+ type: string
+ default: "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.1.1"
+ consul_host:
+ type: string
+ default: consul-server.onap
+ consul_port:
+ type: string
+ default: "8500"
+ cbs_host:
+ type: string
+ default: "config-binding-service"
+ cbs_port:
+ type: string
+ default: "10000"
+ policy_id:
+ type: string
+ default: "onap.restart.tca"
+ external_port:
+ type: string
+ description: Kubernetes node port on which CDAPgui is exposed
+ default: "32012"
+ policy_model_id:
+ type: string
+ default: "onap.policies.monitoring.cdap.tca.hi.lo.app"
+node_templates:
+ tca_k8s:
+ type: dcae.nodes.ContainerizedServiceComponent
+ relationships:
+ - target: tca_policy
+ type: cloudify.relationships.depends_on
+ properties:
+ service_component_type: 'dcaegen2-analytics-tca'
+ docker_config: {}
+ image:
+ get_input: tag_version
+ log_info:
+ log_directory: "/opt/app/TCAnalytics/logs"
+ application_config:
+ app_config:
+ appDescription: DCAE Analytics Threshold Crossing Alert Application
+ appName: dcae-tca
+ tcaAlertsAbatementTableName: TCAAlertsAbatementTable
+ tcaAlertsAbatementTableTTLSeconds: '1728000'
+ tcaSubscriberOutputStreamName: TCASubscriberOutputStream
+ tcaVESAlertsTableName: TCAVESAlertsTable
+ tcaVESAlertsTableTTLSeconds: '1728000'
+ tcaVESMessageStatusTableName: TCAVESMessageStatusTable
+ tcaVESMessageStatusTableTTLSeconds: '86400'
+ thresholdCalculatorFlowletInstances: '2'
+ app_preferences:
+ aaiEnrichmentHost:
+ get_input: aaiEnrichmentHost
+ aaiEnrichmentIgnoreSSLCertificateErrors: 'true'
+ aaiEnrichmentPortNumber: '8443'
+ aaiEnrichmentProtocol: https
+ aaiEnrichmentUserName: dcae@dcae.onap.org
+ aaiEnrichmentUserPassword: demo123456!
+ aaiVMEnrichmentAPIPath: /aai/v11/search/nodes-query
+ aaiVNFEnrichmentAPIPath: /aai/v11/network/generic-vnfs/generic-vnf
+ enableAAIEnrichment:
+ get_input: enableAAIEnrichment
+ enableRedisCaching:
+ get_input: enableRedisCaching
+ redisHosts:
+ get_input: redisHosts
+ enableAlertCEFFormat: 'false'
+ publisherContentType: application/json
+ publisherHostName:
+ get_input: dmaap_host
+ publisherHostPort:
+ get_input: dmaap_port
+ publisherMaxBatchSize: '1'
+ publisherMaxRecoveryQueueSize: '100000'
+ publisherPollingInterval: '20000'
+ publisherProtocol: http
+ publisherTopicName: unauthenticated.DCAE_CL_OUTPUT
+ subscriberConsumerGroup: OpenDCAE-clamp
+ subscriberConsumerId: c12
+ subscriberContentType: application/json
+ subscriberHostName:
+ get_input: dmaap_host
+ subscriberHostPort:
+ get_input: dmaap_port
+ subscriberMessageLimit: '-1'
+ subscriberPollingInterval: '30000'
+ subscriberProtocol: http
+ subscriberTimeoutMS: '-1'
+ subscriberTopicName: unauthenticated.VES_MEASUREMENT_OUTPUT
+ tca_policy: ''
+ service_component_type: dcaegen2-analytics_tca
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ start:
+ inputs:
+ envs:
+ DMAAPHOST:
+ { get_input: dmaap_host }
+ DMAAPPORT:
+ { get_input: dmaap_port }
+ DMAAPPUBTOPIC: "unauthenticated.DCAE_CL_OUTPUT"
+ DMAAPSUBTOPIC: "unauthenticated.VES_MEASUREMENT_OUTPUT"
+ AAIHOST:
+ { get_input: aaiEnrichmentHost }
+ AAIPORT:
+ { get_input: aaiEnrichmentPort }
+ CONSUL_HOST:
+ { get_input: consul_host }
+ CONSUL_PORT:
+ { get_input: consul_port }
+ CBS_HOST:
+ { get_input: cbs_host }
+ CBS_PORT:
+ { get_input: cbs_port }
+ CONFIG_BINDING_SERVICE: "config_binding_service"
+ ports:
+ - concat: ["11011:", { get_input: external_port }]
+ tca_policy:
+ type: clamp.nodes.policy
+ properties:
+ policy_id:
+ get_input: policy_id
+ policy_model_id:
+ get_input: policy_model_id
diff --git a/docs/files/scaleout/k8s-tca-clamp-policy-05162019.yaml b/docs/files/scaleout/k8s-tca-clamp-policy-05162019.yaml
deleted file mode 100644
index b4e545391..000000000
--- a/docs/files/scaleout/k8s-tca-clamp-policy-05162019.yaml
+++ /dev/null
@@ -1,175 +0,0 @@
-#
-# ============LICENSE_START====================================================
-# =============================================================================
-# Copyright (c) 2019 AT&T Intellectual Property. All rights reserved.
-# =============================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END======================================================
-
-tosca_definitions_version: cloudify_dsl_1_3
-
-description: >
- This blueprint deploys/manages the TCA module as a Docker container
-
-imports:
- - http://www.getcloudify.org/spec/cloudify/3.4/types.yaml
- - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/k8splugin/1.4.13/k8splugin_types.yaml
-# - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/dcaepolicyplugin/2.3.0/dcaepolicyplugin_types.yaml
- - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R4/clamppolicyplugin/1.0.0/clamppolicyplugin_types.yaml
-inputs:
- aaiEnrichmentHost:
- type: string
- default: "aai.onap.svc.cluster.local"
- aaiEnrichmentPort:
- type: string
- default: "8443"
- enableAAIEnrichment:
- type: string
- default: true
- dmaap_host:
- type: string
- default: message-router.onap
- dmaap_port:
- type: string
- default: "3904"
- enableRedisCaching:
- type: string
- default: false
- redisHosts:
- type: string
- default: dcae-redis.onap.svc.cluster.local:6379
- tag_version:
- type: string
- default: "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.1.1"
- consul_host:
- type: string
- default: consul-server.onap
- consul_port:
- type: string
- default: "8500"
- cbs_host:
- type: string
- default: "config-binding-service"
- cbs_port:
- type: string
- default: "10000"
- policy_id:
- type: string
- default: "onap.restart.tca"
- external_port:
- type: string
- description: Kubernetes node port on which CDAPgui is exposed
- default: "32012"
- policy_model_id:
- type: string
- default: "onap.policies.monitoring.cdap.tca.hi.lo.app"
-node_templates:
- tca_k8s:
- type: dcae.nodes.ContainerizedServiceComponent
- relationships:
- - target: tca_policy
- type: cloudify.relationships.depends_on
- properties:
- service_component_type: 'dcaegen2-analytics-tca'
- application_config: {}
- docker_config: {}
- image:
- get_input: tag_version
- log_info:
- log_directory: "/opt/app/TCAnalytics/logs"
- application_config:
- app_config:
- appDescription: DCAE Analytics Threshold Crossing Alert Application
- appName: dcae-tca
- tcaAlertsAbatementTableName: TCAAlertsAbatementTable
- tcaAlertsAbatementTableTTLSeconds: '1728000'
- tcaSubscriberOutputStreamName: TCASubscriberOutputStream
- tcaVESAlertsTableName: TCAVESAlertsTable
- tcaVESAlertsTableTTLSeconds: '1728000'
- tcaVESMessageStatusTableName: TCAVESMessageStatusTable
- tcaVESMessageStatusTableTTLSeconds: '86400'
- thresholdCalculatorFlowletInstances: '2'
- app_preferences:
- aaiEnrichmentHost:
- get_input: aaiEnrichmentHost
- aaiEnrichmentIgnoreSSLCertificateErrors: 'true'
- aaiEnrichmentPortNumber: '8443'
- aaiEnrichmentProtocol: https
- aaiEnrichmentUserName: dcae@dcae.onap.org
- aaiEnrichmentUserPassword: demo123456!
- aaiVMEnrichmentAPIPath: /aai/v11/search/nodes-query
- aaiVNFEnrichmentAPIPath: /aai/v11/network/generic-vnfs/generic-vnf
- enableAAIEnrichment:
- get_input: enableAAIEnrichment
- enableRedisCaching:
- get_input: enableRedisCaching
- redisHosts:
- get_input: redisHosts
- enableAlertCEFFormat: 'false'
- publisherContentType: application/json
- publisherHostName:
- get_input: dmaap_host
- publisherHostPort:
- get_input: dmaap_port
- publisherMaxBatchSize: '1'
- publisherMaxRecoveryQueueSize: '100000'
- publisherPollingInterval: '20000'
- publisherProtocol: http
- publisherTopicName: unauthenticated.DCAE_CL_OUTPUT
- subscriberConsumerGroup: OpenDCAE-clamp
- subscriberConsumerId: c12
- subscriberContentType: application/json
- subscriberHostName:
- get_input: dmaap_host
- subscriberHostPort:
- get_input: dmaap_port
- subscriberMessageLimit: '-1'
- subscriberPollingInterval: '30000'
- subscriberProtocol: http
- subscriberTimeoutMS: '-1'
- subscriberTopicName: unauthenticated.VES_MEASUREMENT_OUTPUT
- tca_policy: ''
- service_component_type: dcaegen2-analytics_tca
- interfaces:
- cloudify.interfaces.lifecycle:
- start:
- inputs:
- envs:
- DMAAPHOST:
- { get_input: dmaap_host }
- DMAAPPORT:
- { get_input: dmaap_port }
- DMAAPPUBTOPIC: "unauthenticated.DCAE_CL_OUTPUT"
- DMAAPSUBTOPIC: "unauthenticated.VES_MEASUREMENT_OUTPUT"
- AAIHOST:
- { get_input: aaiEnrichmentHost }
- AAIPORT:
- { get_input: aaiEnrichmentPort }
- CONSUL_HOST:
- { get_input: consul_host }
- CONSUL_PORT:
- { get_input: consul_port }
- CBS_HOST:
- { get_input: cbs_host }
- CBS_PORT:
- { get_input: cbs_port }
- CONFIG_BINDING_SERVICE: "config_binding_service"
- ports:
- - concat: ["11011:", { get_input: external_port }]
- tca_policy:
- type: clamp.nodes.policy
- properties:
- policy_id:
- get_input: policy_id
- policy_model_id:
- get_input: policy_model_id \ No newline at end of file
diff --git a/docs/files/scaleout/latest-tca-guilin.yaml b/docs/files/scaleout/latest-tca-guilin.yaml
new file mode 100644
index 000000000..e7d967a26
--- /dev/null
+++ b/docs/files/scaleout/latest-tca-guilin.yaml
@@ -0,0 +1,141 @@
+# ============LICENSE_START====================================================
+# =============================================================================
+# Copyright (C) 2019-2020 AT&T Intellectual Property. All rights reserved.
+# =============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END======================================================
+#k8s-tca-gen2-v3.yaml
+
+tosca_definitions_version: cloudify_dsl_1_3
+imports:
+ - https://www.getcloudify.org/spec/cloudify/4.5.5/types.yaml
+ - plugin:k8splugin?version=3.4.2
+ - plugin:clamppolicyplugin?version=1.1.0
+inputs:
+ service_name:
+ type: string
+ default: 'dcae-tcagen2'
+ log_directory:
+ type: string
+ default: "/opt/logs/dcae-analytics-tca"
+ replicas:
+ type: integer
+ description: number of instances
+ default: 1
+ spring.data.mongodb.uri:
+ type: string
+ default: "mongodb://dcae-mongohost/dcae-tcagen2"
+ tag_version:
+ type: string
+ default: "nexus3.onap.org:10001/onap/org.onap.dcaegen2.analytics.tca-gen2.dcae-analytics-tca-web:1.2.1"
+ tca.aai.password:
+ type: string
+ default: "DCAE"
+ tca.aai.url:
+ type: string
+ default: "http://aai.onap.svc.cluster.local"
+ tca.aai.username:
+ type: string
+ default: "DCAE"
+ tca_handle_in_subscribe_url:
+ type: string
+ default: "http://message-router.onap.svc.cluster.local:3904/events/unauthenticated.VES_MEASUREMENT_OUTPUT"
+ tca_handle_out_publish_url:
+ type: string
+ default: "http://message-router.onap.svc.cluster.local:3904/events/unauthenticated.DCAE_CL_OUTPUT"
+ tca_consumer_group:
+ type: string
+ default: "cg1"
+ policy_model_id:
+ type: string
+ default: "onap.policies.monitoring.tcagen2"
+ policy_id:
+ type: string
+ default: "onap.restart.tca"
+node_templates:
+ docker.tca:
+ type: dcae.nodes.ContainerizedServiceComponent
+ relationships:
+ - target: tcagen2_policy
+ type: cloudify.relationships.depends_on
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ start:
+ inputs:
+ ports:
+ - concat: ["9091:", "0"]
+ properties:
+ application_config:
+ service_calls: []
+ streams_publishes:
+ tca_handle_out:
+ dmaap_info:
+ topic_url:
+ get_input: tca_handle_out_publish_url
+ type: message_router
+ streams_subscribes:
+ tca_handle_in:
+ dmaap_info:
+ topic_url:
+ get_input: tca_handle_in_subscribe_url
+ type: message_router
+ spring.data.mongodb.uri:
+ get_input: spring.data.mongodb.uri
+ streams_subscribes.tca_handle_in.consumer_group:
+ get_input: tca_consumer_group
+ streams_subscribes.tca_handle_in.consumer_ids[0]: c0
+ streams_subscribes.tca_handle_in.consumer_ids[1]: c1
+ streams_subscribes.tca_handle_in.message_limit: 50000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.max: 60000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.min: 30000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.step_down: 30000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.step_up: 10000
+ streams_subscribes.tca_handle_in.polling.fixed_rate: 0
+ streams_subscribes.tca_handle_in.timeout: -1
+ tca.aai.enable_enrichment: true
+ tca.aai.generic_vnf_path: aai/v11/network/generic-vnfs/generic-vnf
+ tca.aai.node_query_path: aai/v11/search/nodes-query
+ tca.aai.password:
+ get_input: tca.aai.password
+ tca.aai.url:
+ get_input: tca.aai.url
+ tca.aai.username:
+ get_input: tca.aai.username
+ tca.policy: '{"domain":"measurementsForVfScaling","metricsPerEventName":[{"eventName":"vFirewallBroadcastPackets","controlLoopSchemaType":"VM","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":300,"direction":"LESS_OR_EQUAL","severity":"MAJOR","closedLoopEventStatus":"ONSET"},{"closedLoopControlName":"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":700,"direction":"GREATER_OR_EQUAL","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]},{"eventName":"vLoadBalancer","controlLoopSchemaType":"VM","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":300,"direction":"GREATER_OR_EQUAL","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]},{"eventName":"Measurement_vGMUX","controlLoopSchemaType":"VNF","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value","thresholdValue":0,"direction":"EQUAL","severity":"MAJOR","closedLoopEventStatus":"ABATED"},{"closedLoopControlName":"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value","thresholdValue":0,"direction":"GREATER","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]}]}'
+ tca.processing_batch_size: 10000
+ tca.enable_abatement: true
+ tca.enable_ecomp_logging: true
+ docker_config:
+ healthcheck:
+ endpoint: /actuator/health
+ interval: 30s
+ timeout: 10s
+ type: http
+ image:
+ get_input: tag_version
+ log_info:
+ log_directory:
+ get_input: log_directory
+ tls_info:
+ use_tls: true
+ cert_directory: '/etc/tca-gen2/ssl'
+ replicas:
+ get_input: replicas
+ service_component_type: { get_input: service_name }
+ tcagen2_policy:
+ type: clamp.nodes.policy
+ properties:
+ policy_id:
+ get_input: policy_id
+ policy_model_id:
+ get_input: policy_model_id
diff --git a/docs/files/simulators/NF-Simulator.png b/docs/files/simulators/NF-Simulator.png
new file mode 100644
index 000000000..b52aaa5ff
--- /dev/null
+++ b/docs/files/simulators/NF-Simulator.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/APIDecisionTree.png b/docs/files/softwareUpgrade/APIDecisionTree.png
new file mode 100644
index 000000000..dff8d38fd
--- /dev/null
+++ b/docs/files/softwareUpgrade/APIDecisionTree.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/DirectNetconfYangInterface.png b/docs/files/softwareUpgrade/DirectNetconfYangInterface.png
new file mode 100644
index 000000000..4da660793
--- /dev/null
+++ b/docs/files/softwareUpgrade/DirectNetconfYangInterface.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/OnboardingCsar.png b/docs/files/softwareUpgrade/OnboardingCsar.png
new file mode 100644
index 000000000..0a5ba2bfd
--- /dev/null
+++ b/docs/files/softwareUpgrade/OnboardingCsar.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/SWUPWorkflow.png b/docs/files/softwareUpgrade/SWUPWorkflow.png
new file mode 100644
index 000000000..6455a5ac9
--- /dev/null
+++ b/docs/files/softwareUpgrade/SWUPWorkflow.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/SchemaUpdate.png b/docs/files/softwareUpgrade/SchemaUpdate.png
new file mode 100644
index 000000000..25884bdcb
--- /dev/null
+++ b/docs/files/softwareUpgrade/SchemaUpdate.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/ServiceLevelUpgrade.png b/docs/files/softwareUpgrade/ServiceLevelUpgrade.png
new file mode 100644
index 000000000..86d856765
--- /dev/null
+++ b/docs/files/softwareUpgrade/ServiceLevelUpgrade.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/ServiceLevelWorkflow.png b/docs/files/softwareUpgrade/ServiceLevelWorkflow.png
new file mode 100644
index 000000000..ea37d1603
--- /dev/null
+++ b/docs/files/softwareUpgrade/ServiceLevelWorkflow.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/WorkflowView.png b/docs/files/softwareUpgrade/WorkflowView.png
new file mode 100644
index 000000000..79a28f1db
--- /dev/null
+++ b/docs/files/softwareUpgrade/WorkflowView.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/serviceModelVersions.png b/docs/files/softwareUpgrade/serviceModelVersions.png
new file mode 100644
index 000000000..a918ffa00
--- /dev/null
+++ b/docs/files/softwareUpgrade/serviceModelVersions.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/verifyPNF.png b/docs/files/softwareUpgrade/verifyPNF.png
new file mode 100644
index 000000000..f0aacec9f
--- /dev/null
+++ b/docs/files/softwareUpgrade/verifyPNF.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/workflowList.png b/docs/files/softwareUpgrade/workflowList.png
new file mode 100644
index 000000000..339907ac5
--- /dev/null
+++ b/docs/files/softwareUpgrade/workflowList.png
Binary files differ
diff --git a/docs/files/tests/test-basic-cnf.png b/docs/files/tests/test-basic-cnf.png
new file mode 100644
index 000000000..87701587e
--- /dev/null
+++ b/docs/files/tests/test-basic-cnf.png
Binary files differ
diff --git a/docs/files/tests/test-certif.png b/docs/files/tests/test-certif.png
new file mode 100644
index 000000000..69d6264c2
--- /dev/null
+++ b/docs/files/tests/test-certif.png
Binary files differ
diff --git a/docs/files/tests/test-dashboard.png b/docs/files/tests/test-dashboard.png
new file mode 100644
index 000000000..afd4eee49
--- /dev/null
+++ b/docs/files/tests/test-dashboard.png
Binary files differ
diff --git a/docs/files/tests/test-onap-helm.png b/docs/files/tests/test-onap-helm.png
new file mode 100644
index 000000000..e5f5b5366
--- /dev/null
+++ b/docs/files/tests/test-onap-helm.png
Binary files differ
diff --git a/docs/files/tests/test-onap-k8s.png b/docs/files/tests/test-onap-k8s.png
new file mode 100644
index 000000000..69693f7f5
--- /dev/null
+++ b/docs/files/tests/test-onap-k8s.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/Instantiation_topology.png b/docs/files/vFW_CNF_CDS/Instantiation_topology.png
new file mode 100755
index 000000000..85c50599f
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/Instantiation_topology.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png b/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png
new file mode 100644
index 000000000..7e896aa8c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/healthcheck.png b/docs/files/vFW_CNF_CDS/healthcheck.png
new file mode 100644
index 000000000..693e9e74c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/healthcheck.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/helm-overrides-steps.png b/docs/files/vFW_CNF_CDS/helm-overrides-steps.png
new file mode 100644
index 000000000..ad9355344
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/helm-overrides-steps.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/helm-overrides.png b/docs/files/vFW_CNF_CDS/helm-overrides.png
new file mode 100644
index 000000000..93a4a34c5
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/helm-overrides.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/k8s-resources-response.json b/docs/files/vFW_CNF_CDS/k8s-resources-response.json
new file mode 100644
index 000000000..3d0f91344
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/k8s-resources-response.json
@@ -0,0 +1,843 @@
+{
+ "k8s-resource": [
+ {
+ "id": "e5a4eca381ade9439806cf426eb7a07237fe9a8c9088038bd0c8135f728fabe2",
+ "name": "vfw-1-vfw",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940985",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "5b43d0c6e6b3ebb3d279dccbdad024a02995d0e66c2858c866ba9295c34cde62",
+ "name": "vfw-1-vsn-5f45887f5f-khqdd",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "pod-template-hash=5f45887f5f",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vsn-5f45887f5f-khqdd&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941084",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "afce111381db5b5e61d12726d96e4d08b5dc1c7fdb5b069a24bb68c36314d45f",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-ovn-nat",
+ "group": "k8s.cni.cncf.io",
+ "version": "v1",
+ "kind": "NetworkAttachmentDefinition",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1&Kind=NetworkAttachmentDefinition&Name=kud-lr-onap-nf-20211103t124217642438z-ovn-nat&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940925",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "eaa4c964f1e0559cb061cd543c381e8067fc19524195e04ca25b539fdde19712",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-unprotected-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-unprotected-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940922",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "6e5fc95d95eee51f818cd3fbd8fbd40bc29d9fe4c587a1e2840ef4d17fb2fac6",
+ "name": "vfw-1-vsn-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vsn-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941065",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "9e587e69bfe74762f66c59c5cb2ed41ca37bafa59b454b4e1432f4c61f0361f7",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-management-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-management-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940926",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "20413fe7d44e703f3b498a442184b7e95a1e52afccf68cdc5785bfb1855a70c9",
+ "name": "vfw-1-vfw-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vfw-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940973",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730",
+ "name": "vfw-1-vpkg-mgmt",
+ "group": "",
+ "version": "v1",
+ "kind": "Service",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Service&Name=vfw-1-vpkg-mgmt&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941017",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f",
+ "name": "vfw-1-vpkg",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vpkg&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941032",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae",
+ "name": "vfw-1-vpkg-c6bdb954c-mlpz9",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "pod-template-hash=c6bdb954c",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vpkg-c6bdb954c-mlpz9&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941033",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "8c8a65ef11c599eb15a1054ccd590a94305d52d2efa1e72c7581ee2094cace1b",
+ "name": "vfw-1-vsn",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vsn&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941083",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "677e61310b562007084bc99c42aefb9106438d4c782afc6504d6a6a062b974a8",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-protected-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-protected-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940927",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617",
+ "name": "vfw-1-vpkg-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vpkg-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941014",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "6134c369330c7398807d550c708890b0bcef2affbe5af1f9acde02c5a58c104a",
+ "name": "vfw-1-vsn-ui",
+ "group": "",
+ "version": "v1",
+ "kind": "Service",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Service&Name=vfw-1-vsn-ui&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941068",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "633a8c14f7df72b14f4d8e7b77cf7a2f33b71d54136e8582f135678c586fcde3",
+ "name": "vfw-1-vfw-89bd4bfdb-mshpq",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "pod-template-hash=89bd4bfdb",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vfw-89bd4bfdb-mshpq&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940986",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ]
+}
diff --git a/docs/files/vFW_CNF_CDS/postman.zip b/docs/files/vFW_CNF_CDS/postman.zip
new file mode 100644
index 000000000..11d3ef47c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/postman.zip
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/profile-templating.png b/docs/files/vFW_CNF_CDS/profile-templating.png
new file mode 100644
index 000000000..a60b040fe
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/profile-templating.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/scenarios.png b/docs/files/vFW_CNF_CDS/scenarios.png
new file mode 100644
index 000000000..53da8668d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/scenarios.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/status-response.json b/docs/files/vFW_CNF_CDS/status-response.json
new file mode 100644
index 000000000..78b6c836d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/status-response.json
@@ -0,0 +1,1213 @@
+{
+ "request": {
+ "rb-name": "vfw",
+ "rb-version": "plugin_test",
+ "profile-name": "test_profile",
+ "release-name": "",
+ "cloud-region": "kud",
+ "labels": {
+ "testCaseName": "plugin_fw.sh"
+ },
+ "override-values": {
+ "global.onapPrivateNetworkName": "onap-private-net-test"
+ }
+ },
+ "ready": false,
+ "resourceCount": 12,
+ "resourcesStatus": [
+ {
+ "name": "sink-configmap",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "ConfigMap"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "data": {
+ "protected_net_gw": "192.168.20.100",
+ "protected_private_net_cidr": "192.168.10.0/24"
+ },
+ "kind": "ConfigMap",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "sink-configmap",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720771",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/configmaps/sink-configmap",
+ "uid": "46c8bec4-980c-455b-9eb0-fb84ac8cc450"
+ }
+ }
+ },
+ {
+ "name": "packetgen-service",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "Service"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "kind": "Service",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "app": "packetgen",
+ "chart": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "packetgen-service",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720776",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/services/packetgen-service",
+ "uid": "5e1c27c8-1db8-4883-a3a2-6f4e98e2f48b"
+ },
+ "spec": {
+ "clusterIP": "10.244.8.190",
+ "externalTrafficPolicy": "Cluster",
+ "ports": [
+ {
+ "nodePort": 30831,
+ "port": 2831,
+ "protocol": "TCP",
+ "targetPort": 2831
+ }
+ ],
+ "selector": {
+ "app": "packetgen",
+ "release": "test-release"
+ },
+ "sessionAffinity": "None",
+ "type": "NodePort"
+ },
+ "status": {
+ "loadBalancer": {}
+ }
+ }
+ },
+ {
+ "name": "sink-service",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "Service"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "kind": "Service",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "app": "sink",
+ "chart": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "sink-service",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720780",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/services/sink-service",
+ "uid": "789a14fe-1246-4cdd-ba9a-359240ba614f"
+ },
+ "spec": {
+ "clusterIP": "10.244.2.4",
+ "externalTrafficPolicy": "Cluster",
+ "ports": [
+ {
+ "nodePort": 30667,
+ "port": 667,
+ "protocol": "TCP",
+ "targetPort": 667
+ }
+ ],
+ "selector": {
+ "app": "sink",
+ "release": "test-release"
+ },
+ "sessionAffinity": "None",
+ "type": "NodePort"
+ },
+ "status": {
+ "loadBalancer": {}
+ }
+ }
+ },
+ {
+ "name": "test-release-packetgen",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "packetgen",
+ "chart": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-packetgen",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720804",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-packetgen",
+ "uid": "42578e9f-7c88-46d6-94f7-a7bcc8e69ec6"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "packetgen",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/packetgen | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "app": "packetgen",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\":[ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.2\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.0.100.2\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud",
+ "release": "test-release"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "packetgen",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "Deployment does not have minimum availability.",
+ "reason": "MinimumReplicasUnavailable",
+ "status": "False",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "ReplicaSet \"test-release-packetgen-5647bfb56\" is progressing.",
+ "reason": "ReplicaSetUpdated",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "replicas": 1,
+ "unavailableReplicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "test-release-sink",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "sink",
+ "chart": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-sink",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720857",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-sink",
+ "uid": "1f50eecf-c924-4434-be87-daf7c64b6506"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "sink",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.3\", \"interface\": \"eth1\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.4\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "sink-configmap"
+ }
+ }
+ ],
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "sink",
+ "resources": {},
+ "securityContext": {
+ "privileged": true
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ },
+ {
+ "image": "electrocucaracha/darkstat:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "darkstat",
+ "ports": [
+ {
+ "containerPort": 667,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "availableReplicas": 1,
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "lastUpdateTime": "2020-09-29T13:36:33Z",
+ "message": "Deployment has minimum availability.",
+ "reason": "MinimumReplicasAvailable",
+ "status": "True",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:33Z",
+ "message": "ReplicaSet \"test-release-sink-6546c4f698\" has successfully progressed.",
+ "reason": "NewReplicaSetAvailable",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "readyReplicas": 1,
+ "replicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "test-release-firewall",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "firewall",
+ "chart": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-firewall",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720823",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-firewall",
+ "uid": "77392f60-7d12-4846-8edb-f4a65a4be098"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "firewall",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/firewall | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.3\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.2\", \"interface\": \"eth2\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.3\", \"interface\": \"eth3\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "firewall",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "Deployment does not have minimum availability.",
+ "reason": "MinimumReplicasUnavailable",
+ "status": "False",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "ReplicaSet \"test-release-firewall-5bf9995f5f\" is progressing.",
+ "reason": "ReplicaSetUpdated",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "replicas": 1,
+ "unavailableReplicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "onap-private-net-test",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "onap-private-net-test",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720825",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/onap-private-net-test",
+ "uid": "43d413f1-f222-4d98-9ddd-b209d3ade106"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "10.10.0.1/16",
+ "name": "subnet1",
+ "subnet": "10.10.0.0/16"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "protected-private-net",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "protected-private-net",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720827",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/protected-private-net",
+ "uid": "75c98944-80b6-4158-afed-8efa7a1075e2"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "192.168.20.100/24",
+ "name": "subnet1",
+ "subnet": "192.168.20.0/24"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "unprotected-private-net",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "unprotected-private-net",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720829",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/unprotected-private-net",
+ "uid": "54995c10-bffd-4bb2-bbab-5de266af9456"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "192.168.10.1/24",
+ "name": "subnet1",
+ "subnet": "192.168.10.0/24"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "test-release-firewall-5bf9995f5f-hnvps",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/firewall | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.3\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.2\", \"interface\": \"eth2\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.3\", \"interface\": \"eth3\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.10.3/24\", \"mac_address\":\"00:00:00:2b:62:71\", \"gateway_ip\": \"192.168.10.1\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"192.168.20.2/24\", \"mac_address\":\"00:00:00:43:d6:f3\", \"gateway_ip\": \"192.168.20.100\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"},{\"ip_address\":\"10.10.100.3/16\", \"mac_address\":\"00:00:00:03:4c:34\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth3\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"virtlet-eth0\",\n \"ips\": [\n \"10.244.64.45\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2d\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth3\",\n \"ips\": [\n \"192.168.10.3\",\n \"192.168.20.2\",\n \"10.10.100.3\"\n ],\n \"mac\": \"00:00:00:03:4c:34\",\n \"dns\": {}\n}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-firewall-5bf9995f5f-",
+ "labels": {
+ "app": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "5bf9995f5f",
+ "release": "test-release"
+ },
+ "name": "test-release-firewall-5bf9995f5f-hnvps",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-firewall-5bf9995f5f",
+ "uid": "8d68ff0c-c6f8-426c-8ebc-0ce5b7fb5132"
+ }
+ ],
+ "resourceVersion": "10720850",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-firewall-5bf9995f5f-hnvps",
+ "uid": "92b169e3-2d25-449d-b029-d47674eb98e6"
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "firewall",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ },
+ "requests": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [firewall]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [firewall]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imageID": "",
+ "lastState": {},
+ "name": "firewall",
+ "ready": false,
+ "restartCount": 0,
+ "state": {
+ "waiting": {
+ "reason": "ContainerCreating"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Pending",
+ "qosClass": "Burstable",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ },
+ {
+ "name": "test-release-packetgen-5647bfb56-ghpbs",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/packetgen | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "app": "packetgen",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\":[ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.2\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.0.100.2\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.10.2/24\", \"mac_address\":\"00:00:00:ed:8c:d1\", \"gateway_ip\": \"192.168.10.1\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"10.0.100.2/16\", \"mac_address\":\"00:00:00:97:31:3f\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"virtlet-eth0\",\n \"ips\": [\n \"10.244.64.44\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2c\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth2\",\n \"ips\": [\n \"192.168.10.2\",\n \"10.0.100.2\"\n ],\n \"mac\": \"00:00:00:97:31:3f\",\n \"dns\": {}\n}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud",
+ "release": "test-release"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-packetgen-5647bfb56-",
+ "labels": {
+ "app": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "5647bfb56",
+ "release": "test-release"
+ },
+ "name": "test-release-packetgen-5647bfb56-ghpbs",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-packetgen-5647bfb56",
+ "uid": "3c227839-04ad-4d16-b9ea-d8f436426de1"
+ }
+ ],
+ "resourceVersion": "10720852",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-packetgen-5647bfb56-ghpbs",
+ "uid": "74aad8c5-b881-4881-b634-46ad48ccb857"
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "packetgen",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ },
+ "requests": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [packetgen]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [packetgen]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imageID": "",
+ "lastState": {},
+ "name": "packetgen",
+ "ready": false,
+ "restartCount": 0,
+ "state": {
+ "waiting": {
+ "reason": "ContainerCreating"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Pending",
+ "qosClass": "Burstable",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ },
+ {
+ "name": "test-release-sink-6546c4f698-dv529",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.3\", \"interface\": \"eth1\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.4\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.20.3/24\", \"mac_address\":\"00:00:00:13:40:87\", \"gateway_ip\": \"192.168.20.100\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"10.10.100.4/16\", \"mac_address\":\"00:00:00:49:de:fc\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"eth0\",\n \"ips\": [\n \"10.244.64.46\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2e\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth2\",\n \"ips\": [\n \"192.168.20.3\",\n \"10.10.100.4\"\n ],\n \"mac\": \"00:00:00:49:de:fc\",\n \"dns\": {}\n}]"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-sink-6546c4f698-",
+ "labels": {
+ "app": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "6546c4f698",
+ "release": "test-release"
+ },
+ "name": "test-release-sink-6546c4f698-dv529",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-sink-6546c4f698",
+ "uid": "72c9da29-af3b-4b5c-a90b-06285ae83429"
+ }
+ ],
+ "resourceVersion": "10720854",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-sink-6546c4f698-dv529",
+ "uid": "a4e24041-65c9-4b86-8f10-a27a4dba26bb"
+ },
+ "spec": {
+ "containers": [
+ {
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "sink-configmap"
+ }
+ }
+ ],
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "sink",
+ "resources": {},
+ "securityContext": {
+ "privileged": true
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "image": "electrocucaracha/darkstat:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "darkstat",
+ "ports": [
+ {
+ "containerPort": 667,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "status": "True",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "status": "True",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "containerID": "docker://87c9af78735400606d70ccd9cd85e2545e43cb3be9c30d4b4fe173da0062dda9",
+ "image": "electrocucaracha/darkstat:latest",
+ "imageID": "docker-pullable://electrocucaracha/darkstat@sha256:a6764fcc2e15f6156ac0e56f1d220b98970f2d4da9005bae99fb518cfd2f9c25",
+ "lastState": {},
+ "name": "darkstat",
+ "ready": true,
+ "restartCount": 0,
+ "started": true,
+ "state": {
+ "running": {
+ "startedAt": "2020-09-29T13:36:33Z"
+ }
+ }
+ },
+ {
+ "containerID": "docker://a004f95e7c7a681c7f400852aade096e3ffd75b7efc64e12e65b4ce1fe326577",
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imageID": "docker-pullable://rtsood/onap-vfw-demo-sink@sha256:15b7abb0b67a3804ea5f954254633f996fc99c680b09d86a6cf15c3d7b14ab16",
+ "lastState": {},
+ "name": "sink",
+ "ready": true,
+ "restartCount": 0,
+ "started": true,
+ "state": {
+ "running": {
+ "startedAt": "2020-09-29T13:36:32Z"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Running",
+ "podIP": "10.244.64.46",
+ "podIPs": [
+ {
+ "ip": "10.244.64.46"
+ }
+ ],
+ "qosClass": "BestEffort",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ }
+ ]
+}
diff --git a/docs/files/vFW_CNF_CDS/vFW_CNF_CDS_Flow.png b/docs/files/vFW_CNF_CDS/vFW_CNF_CDS_Flow.png
new file mode 100755
index 000000000..ca2d1239a
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vFW_CNF_CDS_Flow.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/vFW_Instance_In_Kubernetes.png b/docs/files/vFW_CNF_CDS/vFW_Instance_In_Kubernetes.png
new file mode 100755
index 000000000..0f6118b1b
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vFW_Instance_In_Kubernetes.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json b/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json
new file mode 100644
index 000000000..89b7f7a2d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json
@@ -0,0 +1,167 @@
+{
+ "vnf-id": "d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "vnf-name": "VF_vfw_k8s_demo_CNF_LR_1",
+ "vnf-type": "vfw_k8s_demo_CNF_LR_1/null",
+ "service-id": "vfw_k8s_demo_CNF_LR_1",
+ "prov-status": "NVTPROV",
+ "orchestration-status": "Active",
+ "in-maint": false,
+ "is-closed-loop-disabled": false,
+ "resource-version": "1635943409675",
+ "model-invariant-id": "a5c188d5-7f0a-44e9-bd92-4a60781cb2cf",
+ "model-version-id": "033d9730-549a-4ff8-b166-1581fb73aa08",
+ "model-customization-id": "f107d24c-0a2f-4eb9-96d3-7a631c973cfd",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.BelongsTo",
+ "related-link": "/aai/v21/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "kud-lr-tenant"
+ }
+ ]
+ },
+ {
+ "related-to": "cloud-region",
+ "relationship-label": "org.onap.relationships.inventory.LocatedIn",
+ "related-link": "/aai/v21/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "cloud-region.owner-defined-type",
+ "property-value": "t1"
+ }
+ ]
+ },
+ {
+ "related-to": "service-instance",
+ "relationship-label": "org.onap.relationships.inventory.ComposedOf",
+ "related-link": "/aai/v21/business/customers/customer/customer_cnf/service-subscriptions/service-subscription/vfw_k8s_demo_CNF_LR_1/service-instances/service-instance/93b89241-104b-40a7-8030-32e3b6eff459",
+ "relationship-data": [
+ {
+ "relationship-key": "customer.global-customer-id",
+ "relationship-value": "customer_cnf"
+ },
+ {
+ "relationship-key": "service-subscription.service-type",
+ "relationship-value": "vfw_k8s_demo_CNF_LR_1"
+ },
+ {
+ "relationship-key": "service-instance.service-instance-id",
+ "relationship-value": "93b89241-104b-40a7-8030-32e3b6eff459"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "service-instance.service-instance-name",
+ "property-value": "INSTANCE_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "platform",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v21/business/platforms/platform/%3Conapsdk.vid.vid.Platform%20object%20at%200x7f48eddc2c40%3E",
+ "relationship-data": [
+ {
+ "relationship-key": "platform.platform-name",
+ "relationship-value": "<onapsdk.vid.vid.Platform object at 0x7f48eddc2c40>"
+ }
+ ]
+ },
+ {
+ "related-to": "line-of-business",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v21/business/lines-of-business/line-of-business/%3Conapsdk.vid.vid.LineOfBusiness%20object%20at%200x7f48eddc2040%3E",
+ "relationship-data": [
+ {
+ "relationship-key": "line-of-business.line-of-business-name",
+ "relationship-value": "<onapsdk.vid.vid.LineOfBusiness object at 0x7f48eddc2040>"
+ }
+ ]
+ }
+ ]
+ },
+ "vf-modules": {
+ "vf-module": [
+ {
+ "vf-module-id": "abb282c8-c932-45dc-9c62-01938eab32fa",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_base_template..module-4",
+ "heat-stack-id": "nifty_lichterman",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943380124",
+ "model-invariant-id": "7b0bcafb-6437-461c-bb48-7240f67ee718",
+ "model-version-id": "5cc1eda3-24e2-4e5e-a4a0-cb18477834f6",
+ "model-customization-id": "b80dedcd-902e-4c75-939a-310a68acb440",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "314795d7-6005-4462-a9fe-7006538e3ff9",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vpkg..module-2",
+ "heat-stack-id": "dazzling_nightingale",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943396304",
+ "model-invariant-id": "8f3652a6-af23-4d8c-9aa2-3e8d6f1a5b6e",
+ "model-version-id": "f4e54571-7cc7-4a67-b973-1851b8e540a7",
+ "model-customization-id": "5f1445b0-9ef2-4eb3-8051-a445fa35f877",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vsn..module-1",
+ "heat-stack-id": "sharp_torvalds",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943404667",
+ "model-invariant-id": "46a8e556-6c5f-4acd-9cfc-ea29b51c919e",
+ "model-version-id": "aa1ecbc5-990e-4ed0-a03e-a135f21763d3",
+ "model-customization-id": "0e61ce72-5eef-4fd7-b790-2107b67044f6",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vfw..module-3",
+ "heat-stack-id": "brave_brattain",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943387739",
+ "model-invariant-id": "89f47572-1d25-44b4-a6e0-52d0421a0980",
+ "model-version-id": "12a89df1-9fad-4045-a90e-dcb64264eed4",
+ "model-customization-id": "c81f3c71-3f42-4831-b3b2-7ceffb567795",
+ "module-index": 0
+ }
+ ]
+ }
+}
diff --git a/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json b/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json
new file mode 100644
index 000000000..84d62ca6c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json
@@ -0,0 +1,133 @@
+{
+ "vf-module-id": "314795d7-6005-4462-a9fe-7006538e3ff9",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vpkg..module-2",
+ "heat-stack-id": "dazzling_nightingale",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943396304",
+ "model-invariant-id": "8f3652a6-af23-4d8c-9aa2-3e8d6f1a5b6e",
+ "model-version-id": "f4e54571-7cc7-4a67-b973-1851b8e540a7",
+ "model-customization-id": "5f1445b0-9ef2-4eb3-8051-a445fa35f877",
+ "module-index": 0,
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-mgmt"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-c6bdb954c-mlpz9"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-configmap"
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/docs/files/vLBMS_report.json b/docs/files/vLBMS_report.json
index b8688bba9..d06f9cf5c 100644
--- a/docs/files/vLBMS_report.json
+++ b/docs/files/vLBMS_report.json
@@ -10118,4 +10118,4 @@
"errors": []
}
]
-} \ No newline at end of file
+}
diff --git a/docs/files/vcpe_tosca/create_image.png b/docs/files/vcpe_tosca/create_image.png
new file mode 100644
index 000000000..44b226231
--- /dev/null
+++ b/docs/files/vcpe_tosca/create_image.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/create_project.png b/docs/files/vcpe_tosca/create_project.png
new file mode 100644
index 000000000..814e4089d
--- /dev/null
+++ b/docs/files/vcpe_tosca/create_project.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/create_user.png b/docs/files/vcpe_tosca/create_user.png
new file mode 100644
index 000000000..800247a94
--- /dev/null
+++ b/docs/files/vcpe_tosca/create_user.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/customer_service.png b/docs/files/vcpe_tosca/customer_service.png
new file mode 100644
index 000000000..0bd69fadd
--- /dev/null
+++ b/docs/files/vcpe_tosca/customer_service.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/image.png b/docs/files/vcpe_tosca/image.png
new file mode 100644
index 000000000..05ac77290
--- /dev/null
+++ b/docs/files/vcpe_tosca/image.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/manage_project_user.png b/docs/files/vcpe_tosca/manage_project_user.png
new file mode 100644
index 000000000..3deb7b05b
--- /dev/null
+++ b/docs/files/vcpe_tosca/manage_project_user.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_active.png b/docs/files/vcpe_tosca/ns_active.png
new file mode 100644
index 000000000..4d24e29a5
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_active.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_create.png b/docs/files/vcpe_tosca/ns_create.png
new file mode 100644
index 000000000..7fe2fd91b
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_create.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_create_input.png b/docs/files/vcpe_tosca/ns_create_input.png
new file mode 100644
index 000000000..f6d2c88a8
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_create_input.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_delete.png b/docs/files/vcpe_tosca/ns_delete.png
new file mode 100644
index 000000000..931334e81
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_delete.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_deleted.png b/docs/files/vcpe_tosca/ns_deleted.png
new file mode 100644
index 000000000..2663fbe97
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_deleted.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_deleting.png b/docs/files/vcpe_tosca/ns_deleting.png
new file mode 100644
index 000000000..977d771d0
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_deleting.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_instance.png b/docs/files/vcpe_tosca/ns_instance.png
new file mode 100644
index 000000000..32e47da91
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_instance.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_package_list.png b/docs/files/vcpe_tosca/ns_package_list.png
new file mode 100644
index 000000000..77f024fd6
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_package_list.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_package_onboard.png b/docs/files/vcpe_tosca/ns_package_onboard.png
new file mode 100644
index 000000000..0ecce911e
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_package_onboard.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_vnf_heal.png b/docs/files/vcpe_tosca/ns_vnf_heal.png
new file mode 100644
index 000000000..aded069f9
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_vnf_heal.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_vnf_healed.png b/docs/files/vcpe_tosca/ns_vnf_healed.png
new file mode 100644
index 000000000..7fa669ce1
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_vnf_healed.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_vnf_healing.png b/docs/files/vcpe_tosca/ns_vnf_healing.png
new file mode 100644
index 000000000..140e00b74
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_vnf_healing.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/ns_vnf_list.png b/docs/files/vcpe_tosca/ns_vnf_list.png
new file mode 100644
index 000000000..77af139da
--- /dev/null
+++ b/docs/files/vcpe_tosca/ns_vnf_list.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/sdc.png b/docs/files/vcpe_tosca/sdc.png
new file mode 100644
index 000000000..98e5ffaa6
--- /dev/null
+++ b/docs/files/vcpe_tosca/sdc.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/vim.png b/docs/files/vcpe_tosca/vim.png
new file mode 100644
index 000000000..e9cb0cc44
--- /dev/null
+++ b/docs/files/vcpe_tosca/vim.png
Binary files differ
diff --git a/docs/files/vcpe_tosca/vnfm.png b/docs/files/vcpe_tosca/vnfm.png
new file mode 100644
index 000000000..6315b9a14
--- /dev/null
+++ b/docs/files/vcpe_tosca/vnfm.png
Binary files differ
diff --git a/docs/files/vfw-1-preload.json b/docs/files/vfw-1-preload.json
index be42a3bd4..f4207257d 100644
--- a/docs/files/vfw-1-preload.json
+++ b/docs/files/vfw-1-preload.json
@@ -1,141 +1,176 @@
{
- "input": {
- "request-information": {
- "notification-url": "openecomp.org",
- "order-number": "1",
- "order-version": "1",
- "request-action": "PreloadVNFRequest",
- "request-id": "robot12"
- },
- "sdnc-request-header": {
- "svc-action": "reserve",
- "svc-notification-url": "http:\/\/openecomp.org:8080\/adapters\/rest\/SDNCNotify",
- "svc-request-id": "robot12"
- },
- "vnf-topology-information": {
- "vnf-assignments": {
- "availability-zones": [],
- "vnf-networks": [],
- "vnf-vms": []
- },
- "vnf-parameters":
- [{
- "vnf-parameter-name": "unprotected_private_net_id",
- "vnf-parameter-value": "unprotected_net_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_subnet_id",
- "vnf-parameter-value": "unprotected_subnet_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_net_cidr",
- "vnf-parameter-value": "192.168.10.0/24"
- }, {
- "vnf-parameter-name": "protected_private_net_id",
- "vnf-parameter-value": "protected_net_dt"
- }, {
- "vnf-parameter-name": "protected_private_net_cidr",
- "vnf-parameter-value": "192.168.20.0/24"
- }, {
- "vnf-parameter-name": "protected_private_subnet_id",
- "vnf-parameter-value": "protected_subnet_dt"
- }, {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vfw_key"
- }, {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- }, {
- "vnf-parameter-name": "vsn_name_0",
- "vnf-parameter-value": "vfw-vsn-1-dt"
- }, {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- }, {
- "vnf-parameter-name": "ext_private_net_id",
- "vnf-parameter-value": "onap_oam_ext"
- }, {
- "vnf-parameter-name": "ext_private_subnet_id",
- "vnf-parameter-value": "onap_oam_ext_sub"
- }, {
- "vnf-parameter-name": "ext_private_net_cidr",
- "vnf-parameter-value": "10.100.0.0/16"
- }, {
- "vnf-parameter-name": "nexus_artifact_repo",
- "vnf-parameter-value": "https://nexus.onap.org"
- }, {
- "vnf-parameter-name": "vfw_name_0",
- "vnf-parameter-value": "vfw-vfw-1-dt"
- }, {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- }, {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "external"
- }, {
- "vnf-parameter-name": "image_name",
- "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
- }, {
- "vnf-parameter-name": "flavor_name",
- "vnf-parameter-value": "m1.medium"
- }, {
- "vnf-parameter-name": "install_script_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "vfw_private_ip_0",
- "vnf-parameter-value": "192.168.10.100"
- }, {
- "vnf-parameter-name": "vfw_private_ip_1",
- "vnf-parameter-value": "192.168.20.100"
- }, {
- "vnf-parameter-name": "vfw_private_ip_2",
- "vnf-parameter-value": "10.0.110.1"
- }, {
- "vnf-parameter-name": "vfw_private_ip_3",
- "vnf-parameter-value": "10.100.100.1"
- }, {
- "vnf-parameter-name": "vpg_private_ip_0",
- "vnf-parameter-value": "192.168.10.200"
- }, {
- "vnf-parameter-name": "vpg_private_ip_1",
- "vnf-parameter-value": "10.0.110.2"
- }, {
- "vnf-parameter-name": "vpg_private_ip_2",
- "vnf-parameter-value": "10.100.100.2"
- }, {
- "vnf-parameter-name": "vsn_private_ip_1",
- "vnf-parameter-value": "10.0.110.3"
- }, {
- "vnf-parameter-name": "vsn_private_ip_0",
- "vnf-parameter-value": "192.168.20.250"
- }, {
- "vnf-parameter-name": "vsn_private_ip_2",
- "vnf-parameter-value": "10.100.100.3"
- }, {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
- }, {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.1"
- }, {
- "vnf-parameter-name": "sec_group",
- "vnf-parameter-value": "onap_sg_y3id"
- }
- ],
- "vnf-topology-identifier": {
- "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
- "vnf-type": "VfwsnkDistributetraffic..base_vfw..module-0",
- "generic-vnf-name": "vfw-vsnk-dt-1",
- "generic-vnf-type": "vFWSNK-DistributeTraffic 0",
- "vnf-name": "vfw-dt-module-1"
- }
- }
- }
+ "input": {
+ "request-information": {
+ "notification-url": "openecomp.org",
+ "order-number": "1",
+ "order-version": "1",
+ "request-action": "PreloadVNFRequest",
+ "request-id": "robot12"
+ },
+ "sdnc-request-header": {
+ "svc-action": "reserve",
+ "svc-notification-url": "http://openecomp.org:8080/adapters/rest/SDNCNotify",
+ "svc-request-id": "robot12"
+ },
+ "vnf-topology-information": {
+ "vnf-assignments": {
+ "availability-zones": [],
+ "vnf-networks": [],
+ "vnf-vms": []
+ },
+ "vnf-parameters": [
+ {
+ "vnf-parameter-name": "unprotected_private_net_id",
+ "vnf-parameter-value": "unprotected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_subnet_id",
+ "vnf-parameter-value": "unprotected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_net_cidr",
+ "vnf-parameter-value": "192.168.10.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_id",
+ "vnf-parameter-value": "protected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_cidr",
+ "vnf-parameter-value": "192.168.20.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_subnet_id",
+ "vnf-parameter-value": "protected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vfw_key"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vsn_name_0",
+ "vnf-parameter-value": "vfw-vsn-1-dt"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_id",
+ "vnf-parameter-value": "onap_oam_ext"
+ },
+ {
+ "vnf-parameter-name": "ext_private_subnet_id",
+ "vnf-parameter-value": "onap_oam_ext_sub"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_cidr",
+ "vnf-parameter-value": "10.100.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "nexus_artifact_repo",
+ "vnf-parameter-value": "https://nexus.onap.org"
+ },
+ {
+ "vnf-parameter-name": "vfw_name_0",
+ "vnf-parameter-value": "vfw-vfw-1-dt"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "external"
+ },
+ {
+ "vnf-parameter-name": "image_name",
+ "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
+ },
+ {
+ "vnf-parameter-name": "flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "install_script_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_0",
+ "vnf-parameter-value": "192.168.10.100"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_1",
+ "vnf-parameter-value": "192.168.20.100"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_2",
+ "vnf-parameter-value": "10.0.110.1"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_3",
+ "vnf-parameter-value": "10.100.100.1"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_0",
+ "vnf-parameter-value": "192.168.10.200"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_1",
+ "vnf-parameter-value": "10.0.110.2"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_2",
+ "vnf-parameter-value": "10.100.100.2"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_1",
+ "vnf-parameter-value": "10.0.110.3"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_0",
+ "vnf-parameter-value": "192.168.20.250"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_2",
+ "vnf-parameter-value": "10.100.100.3"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.1"
+ },
+ {
+ "vnf-parameter-name": "sec_group",
+ "vnf-parameter-value": "onap_sg_y3id"
+ }
+ ],
+ "vnf-topology-identifier": {
+ "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
+ "vnf-type": "VfwsnkDistributetraffic..base_vfw..module-0",
+ "generic-vnf-name": "vfw-vsnk-dt-1",
+ "generic-vnf-type": "vFWSNK-DistributeTraffic 0",
+ "vnf-name": "vfw-dt-module-1"
+ }
+ }
+ }
}
diff --git a/docs/files/vfw-2-preload.json b/docs/files/vfw-2-preload.json
index 17b473efa..956aec459 100644
--- a/docs/files/vfw-2-preload.json
+++ b/docs/files/vfw-2-preload.json
@@ -1,141 +1,176 @@
{
- "input": {
- "request-information": {
- "notification-url": "openecomp.org",
- "order-number": "1",
- "order-version": "1",
- "request-action": "PreloadVNFRequest",
- "request-id": "robot12"
- },
- "sdnc-request-header": {
- "svc-action": "reserve",
- "svc-notification-url": "http:\/\/openecomp.org:8080\/adapters\/rest\/SDNCNotify",
- "svc-request-id": "robot12"
- },
- "vnf-topology-information": {
- "vnf-assignments": {
- "availability-zones": [],
- "vnf-networks": [],
- "vnf-vms": []
- },
- "vnf-parameters":
- [{
- "vnf-parameter-name": "unprotected_private_net_id",
- "vnf-parameter-value": "unprotected_net_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_subnet_id",
- "vnf-parameter-value": "unprotected_subnet_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_net_cidr",
- "vnf-parameter-value": "192.168.10.0/24"
- }, {
- "vnf-parameter-name": "protected_private_net_id",
- "vnf-parameter-value": "protected_net_dt"
- }, {
- "vnf-parameter-name": "protected_private_net_cidr",
- "vnf-parameter-value": "192.168.20.0/24"
- }, {
- "vnf-parameter-name": "protected_private_subnet_id",
- "vnf-parameter-value": "protected_subnet_dt"
- }, {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vfw_key"
- }, {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- }, {
- "vnf-parameter-name": "vsn_name_0",
- "vnf-parameter-value": "vfw-vsn-2-dt"
- }, {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- }, {
- "vnf-parameter-name": "ext_private_net_id",
- "vnf-parameter-value": "onap_oam_ext"
- }, {
- "vnf-parameter-name": "ext_private_subnet_id",
- "vnf-parameter-value": "onap_oam_ext_sub"
- }, {
- "vnf-parameter-name": "ext_private_net_cidr",
- "vnf-parameter-value": "10.100.0.0/16"
- }, {
- "vnf-parameter-name": "nexus_artifact_repo",
- "vnf-parameter-value": "https://nexus.onap.org"
- }, {
- "vnf-parameter-name": "vfw_name_0",
- "vnf-parameter-value": "vfw-vfw-2-dt"
- }, {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- }, {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "external"
- }, {
- "vnf-parameter-name": "image_name",
- "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
- }, {
- "vnf-parameter-name": "flavor_name",
- "vnf-parameter-value": "m1.medium"
- }, {
- "vnf-parameter-name": "install_script_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "vfw_private_ip_0",
- "vnf-parameter-value": "192.168.10.110"
- }, {
- "vnf-parameter-name": "vfw_private_ip_1",
- "vnf-parameter-value": "192.168.20.110"
- }, {
- "vnf-parameter-name": "vfw_private_ip_2",
- "vnf-parameter-value": "10.0.110.4"
- }, {
- "vnf-parameter-name": "vfw_private_ip_3",
- "vnf-parameter-value": "10.100.100.4"
- }, {
- "vnf-parameter-name": "vpg_private_ip_0",
- "vnf-parameter-value": "192.168.10.200"
- }, {
- "vnf-parameter-name": "vpg_private_ip_1",
- "vnf-parameter-value": "10.0.110.2"
- }, {
- "vnf-parameter-name": "vpg_private_ip_2",
- "vnf-parameter-value": "10.100.100.2"
- }, {
- "vnf-parameter-name": "vsn_private_ip_1",
- "vnf-parameter-value": "10.0.110.5"
- }, {
- "vnf-parameter-name": "vsn_private_ip_0",
- "vnf-parameter-value": "192.168.20.240"
- }, {
- "vnf-parameter-name": "vsn_private_ip_2",
- "vnf-parameter-value": "10.100.100.5"
- }, {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
- }, {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.1"
- }, {
- "vnf-parameter-name": "sec_group",
- "vnf-parameter-value": "onap_sg_y3id"
- }
- ],
- "vnf-topology-identifier": {
- "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
- "vnf-type": "VfwsnkDistributetraffic..base_vfw..module-0",
- "generic-vnf-name": "vfw-vsnk-dt-2",
- "generic-vnf-type": "vFWSNK-DistributeTraffic 1",
- "vnf-name": "vfw-dt-module-2"
- }
- }
- }
+ "input": {
+ "request-information": {
+ "notification-url": "openecomp.org",
+ "order-number": "1",
+ "order-version": "1",
+ "request-action": "PreloadVNFRequest",
+ "request-id": "robot12"
+ },
+ "sdnc-request-header": {
+ "svc-action": "reserve",
+ "svc-notification-url": "http://openecomp.org:8080/adapters/rest/SDNCNotify",
+ "svc-request-id": "robot12"
+ },
+ "vnf-topology-information": {
+ "vnf-assignments": {
+ "availability-zones": [],
+ "vnf-networks": [],
+ "vnf-vms": []
+ },
+ "vnf-parameters": [
+ {
+ "vnf-parameter-name": "unprotected_private_net_id",
+ "vnf-parameter-value": "unprotected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_subnet_id",
+ "vnf-parameter-value": "unprotected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_net_cidr",
+ "vnf-parameter-value": "192.168.10.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_id",
+ "vnf-parameter-value": "protected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_cidr",
+ "vnf-parameter-value": "192.168.20.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_subnet_id",
+ "vnf-parameter-value": "protected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vfw_key"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vsn_name_0",
+ "vnf-parameter-value": "vfw-vsn-2-dt"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_id",
+ "vnf-parameter-value": "onap_oam_ext"
+ },
+ {
+ "vnf-parameter-name": "ext_private_subnet_id",
+ "vnf-parameter-value": "onap_oam_ext_sub"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_cidr",
+ "vnf-parameter-value": "10.100.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "nexus_artifact_repo",
+ "vnf-parameter-value": "https://nexus.onap.org"
+ },
+ {
+ "vnf-parameter-name": "vfw_name_0",
+ "vnf-parameter-value": "vfw-vfw-2-dt"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "external"
+ },
+ {
+ "vnf-parameter-name": "image_name",
+ "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
+ },
+ {
+ "vnf-parameter-name": "flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "install_script_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_0",
+ "vnf-parameter-value": "192.168.10.110"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_1",
+ "vnf-parameter-value": "192.168.20.110"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_2",
+ "vnf-parameter-value": "10.0.110.4"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_3",
+ "vnf-parameter-value": "10.100.100.4"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_0",
+ "vnf-parameter-value": "192.168.10.200"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_1",
+ "vnf-parameter-value": "10.0.110.2"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_2",
+ "vnf-parameter-value": "10.100.100.2"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_1",
+ "vnf-parameter-value": "10.0.110.5"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_0",
+ "vnf-parameter-value": "192.168.20.240"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_2",
+ "vnf-parameter-value": "10.100.100.5"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.1"
+ },
+ {
+ "vnf-parameter-name": "sec_group",
+ "vnf-parameter-value": "onap_sg_y3id"
+ }
+ ],
+ "vnf-topology-identifier": {
+ "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
+ "vnf-type": "VfwsnkDistributetraffic..base_vfw..module-0",
+ "generic-vnf-name": "vfw-vsnk-dt-2",
+ "generic-vnf-type": "vFWSNK-DistributeTraffic 1",
+ "vnf-name": "vfw-dt-module-2"
+ }
+ }
+ }
}
diff --git a/docs/files/vfwdt-aai-postman.json b/docs/files/vfwdt-aai-postman.json
index a37792217..aaa03a4f8 100644
--- a/docs/files/vfwdt-aai-postman.json
+++ b/docs/files/vfwdt-aai-postman.json
@@ -1,214 +1,214 @@
{
- "info": {
- "_postman_id": "a2db5e8d-6c15-4db1-9a62-9346a414b0be",
- "name": "ONAP-AAI-Test",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "VServersByName",
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "X-FromAppId",
- "value": "AAI",
- "type": "text"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr",
- "type": "text"
- },
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- }
- ],
- "url": {
- "raw": "https://10.12.5.63:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/087050388b204c73a3e418dd2c1fe30b/vservers/?vserver-name=vofwl01vfw4407",
- "protocol": "https",
- "host": [
- "10",
- "12",
- "5",
- "63"
- ],
- "port": "30233",
- "path": [
- "aai",
- "v14",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "CloudOwner",
- "RegionOne",
- "tenants",
- "tenant",
- "087050388b204c73a3e418dd2c1fe30b",
- "vservers",
- ""
- ],
- "query": [
- {
- "key": "vserver-name",
- "value": "vofwl01vfw4407"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Tenants",
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "X-FromAppId",
- "value": "AAI",
- "type": "text"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr",
- "type": "text"
- },
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- }
- ],
- "url": {
- "raw": "https://10.12.5.63:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/087050388b204c73a3e418dd2c1fe30b/vservers/?vserver-name=vofwl01vfw4407",
- "protocol": "https",
- "host": [
- "10",
- "12",
- "5",
- "63"
- ],
- "port": "30233",
- "path": [
- "aai",
- "v14",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "CloudOwner",
- "RegionOne",
- "tenants",
- "tenant",
- "087050388b204c73a3e418dd2c1fe30b",
- "vservers",
- ""
- ],
- "query": [
- {
- "key": "vserver-name",
- "value": "vofwl01vfw4407"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "GenericVNFById",
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "X-FromAppId",
- "value": "AAI",
- "type": "text"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr",
- "type": "text"
- },
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- }
- ],
- "url": {
- "raw": "https://10.12.5.63:30233/aai/v14/network/generic-vnfs/generic-vnf/2d125b4d-e120-4815-a0c7-4c4bec6c89f9",
- "protocol": "https",
- "host": [
- "10",
- "12",
- "5",
- "63"
- ],
- "port": "30233",
- "path": [
- "aai",
- "v14",
- "network",
- "generic-vnfs",
- "generic-vnf",
- "2d125b4d-e120-4815-a0c7-4c4bec6c89f9"
- ]
- }
- },
- "response": []
- }
- ],
- "auth": {
- "type": "basic",
- "basic": [
- {
- "key": "password",
- "value": "AAI",
- "type": "string"
- },
- {
- "key": "username",
- "value": "AAI",
- "type": "string"
- }
- ]
- },
- "event": [
- {
- "listen": "prerequest",
- "script": {
- "id": "e35b8a0f-24b0-4990-88f2-4c83421cb4a4",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- },
- {
- "listen": "test",
- "script": {
- "id": "74b3d3ac-e313-4570-93b0-bd6d64beebfa",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- }
- ]
-} \ No newline at end of file
+ "info": {
+ "_postman_id": "a2db5e8d-6c15-4db1-9a62-9346a414b0be",
+ "name": "ONAP-AAI-Test",
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+ },
+ "item": [
+ {
+ "name": "VServersByName",
+ "request": {
+ "method": "GET",
+ "header": [
+ {
+ "key": "X-FromAppId",
+ "value": "AAI",
+ "type": "text"
+ },
+ {
+ "key": "X-TransactionId",
+ "value": "get_aai_subscr",
+ "type": "text"
+ },
+ {
+ "key": "Accept",
+ "value": "application/json",
+ "type": "text"
+ },
+ {
+ "key": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "url": {
+ "raw": "https://10.12.5.63:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/087050388b204c73a3e418dd2c1fe30b/vservers/?vserver-name=vofwl01vfw4407",
+ "protocol": "https",
+ "host": [
+ "10",
+ "12",
+ "5",
+ "63"
+ ],
+ "port": "30233",
+ "path": [
+ "aai",
+ "v14",
+ "cloud-infrastructure",
+ "cloud-regions",
+ "cloud-region",
+ "CloudOwner",
+ "RegionOne",
+ "tenants",
+ "tenant",
+ "087050388b204c73a3e418dd2c1fe30b",
+ "vservers",
+ ""
+ ],
+ "query": [
+ {
+ "key": "vserver-name",
+ "value": "vofwl01vfw4407"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Tenants",
+ "request": {
+ "method": "GET",
+ "header": [
+ {
+ "key": "X-FromAppId",
+ "value": "AAI",
+ "type": "text"
+ },
+ {
+ "key": "X-TransactionId",
+ "value": "get_aai_subscr",
+ "type": "text"
+ },
+ {
+ "key": "Accept",
+ "value": "application/json",
+ "type": "text"
+ },
+ {
+ "key": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "url": {
+ "raw": "https://10.12.5.63:30233/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/RegionOne/tenants/tenant/087050388b204c73a3e418dd2c1fe30b/vservers/?vserver-name=vofwl01vfw4407",
+ "protocol": "https",
+ "host": [
+ "10",
+ "12",
+ "5",
+ "63"
+ ],
+ "port": "30233",
+ "path": [
+ "aai",
+ "v14",
+ "cloud-infrastructure",
+ "cloud-regions",
+ "cloud-region",
+ "CloudOwner",
+ "RegionOne",
+ "tenants",
+ "tenant",
+ "087050388b204c73a3e418dd2c1fe30b",
+ "vservers",
+ ""
+ ],
+ "query": [
+ {
+ "key": "vserver-name",
+ "value": "vofwl01vfw4407"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "GenericVNFById",
+ "request": {
+ "method": "GET",
+ "header": [
+ {
+ "key": "X-FromAppId",
+ "value": "AAI",
+ "type": "text"
+ },
+ {
+ "key": "X-TransactionId",
+ "value": "get_aai_subscr",
+ "type": "text"
+ },
+ {
+ "key": "Accept",
+ "value": "application/json",
+ "type": "text"
+ },
+ {
+ "key": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "url": {
+ "raw": "https://10.12.5.63:30233/aai/v14/network/generic-vnfs/generic-vnf/2d125b4d-e120-4815-a0c7-4c4bec6c89f9",
+ "protocol": "https",
+ "host": [
+ "10",
+ "12",
+ "5",
+ "63"
+ ],
+ "port": "30233",
+ "path": [
+ "aai",
+ "v14",
+ "network",
+ "generic-vnfs",
+ "generic-vnf",
+ "2d125b4d-e120-4815-a0c7-4c4bec6c89f9"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "auth": {
+ "type": "basic",
+ "basic": [
+ {
+ "key": "password",
+ "value": "AAI",
+ "type": "string"
+ },
+ {
+ "key": "username",
+ "value": "AAI",
+ "type": "string"
+ }
+ ]
+ },
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "e35b8a0f-24b0-4990-88f2-4c83421cb4a4",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "74b3d3ac-e313-4570-93b0-bd6d64beebfa",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ]
+}
diff --git a/docs/files/vfwdt-general-workflow-sd.png b/docs/files/vfwdt-general-workflow-sd.png
new file mode 100644
index 000000000..89fa1f4ab
--- /dev/null
+++ b/docs/files/vfwdt-general-workflow-sd.png
Binary files differ
diff --git a/docs/files/vfwdt-identification-workflow-sd.png b/docs/files/vfwdt-identification-workflow-sd.png
new file mode 100644
index 000000000..83310f731
--- /dev/null
+++ b/docs/files/vfwdt-identification-workflow-sd.png
Binary files differ
diff --git a/docs/files/vfwdt-td-workflow-sd.png b/docs/files/vfwdt-td-workflow-sd.png
new file mode 100644
index 000000000..73c6305a0
--- /dev/null
+++ b/docs/files/vfwdt-td-workflow-sd.png
Binary files differ
diff --git a/docs/files/vfwdt-upgrade-workflow-sd.png b/docs/files/vfwdt-upgrade-workflow-sd.png
new file mode 100644
index 000000000..6b2ee5dfa
--- /dev/null
+++ b/docs/files/vfwdt-upgrade-workflow-sd.png
Binary files differ
diff --git a/docs/files/vfwdt-workflow-general.png b/docs/files/vfwdt-workflow-general.png
new file mode 100644
index 000000000..3ffe35db6
--- /dev/null
+++ b/docs/files/vfwdt-workflow-general.png
Binary files differ
diff --git a/docs/files/vfwdt-workflow-traffic.png b/docs/files/vfwdt-workflow-traffic.png
new file mode 100644
index 000000000..8bc6073dd
--- /dev/null
+++ b/docs/files/vfwdt-workflow-traffic.png
Binary files differ
diff --git a/docs/files/vfwdt-workflow-upgrade.png b/docs/files/vfwdt-workflow-upgrade.png
new file mode 100644
index 000000000..6e24c706d
--- /dev/null
+++ b/docs/files/vfwdt-workflow-upgrade.png
Binary files differ
diff --git a/docs/files/vpkg-preload.json b/docs/files/vpkg-preload.json
index 8a303b28d..4b2be182d 100644
--- a/docs/files/vpkg-preload.json
+++ b/docs/files/vpkg-preload.json
@@ -1,141 +1,176 @@
{
- "input": {
- "request-information": {
- "notification-url": "openecomp.org",
- "order-number": "1",
- "order-version": "1",
- "request-action": "PreloadVNFRequest",
- "request-id": "robot12"
- },
- "sdnc-request-header": {
- "svc-action": "reserve",
- "svc-notification-url": "http:\/\/openecomp.org:8080\/adapters\/rest\/SDNCNotify",
- "svc-request-id": "robot12"
- },
- "vnf-topology-information": {
- "vnf-assignments": {
- "availability-zones": [],
- "vnf-networks": [],
- "vnf-vms": []
- },
- "vnf-parameters":
- [{
- "vnf-parameter-name": "unprotected_private_net_id",
- "vnf-parameter-value": "unprotected_net_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_subnet_id",
- "vnf-parameter-value": "unprotected_subnet_dt"
- }, {
- "vnf-parameter-name": "unprotected_private_net_cidr",
- "vnf-parameter-value": "192.168.10.0/24"
- }, {
- "vnf-parameter-name": "protected_private_net_id",
- "vnf-parameter-value": "protected_net_dt"
- }, {
- "vnf-parameter-name": "protected_private_net_cidr",
- "vnf-parameter-value": "192.168.20.0/24"
- }, {
- "vnf-parameter-name": "protected_private_subnet_id",
- "vnf-parameter-value": "protected_subnet_dt"
- }, {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vfw_key"
- }, {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- }, {
- "vnf-parameter-name": "vsn_name_0",
- "vnf-parameter-value": "vfw-vsn-0-dt"
- }, {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "oam_onap_y3id"
- }, {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- }, {
- "vnf-parameter-name": "ext_private_net_id",
- "vnf-parameter-value": "onap_oam_ext"
- }, {
- "vnf-parameter-name": "ext_private_subnet_id",
- "vnf-parameter-value": "onap_oam_ext_sub"
- }, {
- "vnf-parameter-name": "ext_private_net_cidr",
- "vnf-parameter-value": "10.100.0.0/16"
- }, {
- "vnf-parameter-name": "nexus_artifact_repo",
- "vnf-parameter-value": "https://nexus.onap.org"
- }, {
- "vnf-parameter-name": "vfw_name_0",
- "vnf-parameter-value": "vfw-vfw-0-dt"
- }, {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- }, {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "external"
- }, {
- "vnf-parameter-name": "image_name",
- "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
- }, {
- "vnf-parameter-name": "flavor_name",
- "vnf-parameter-value": "m1.medium"
- }, {
- "vnf-parameter-name": "install_script_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "vfw_private_ip_0",
- "vnf-parameter-value": "192.168.10.100"
- }, {
- "vnf-parameter-name": "vfw_private_ip_1",
- "vnf-parameter-value": "192.168.20.100"
- }, {
- "vnf-parameter-name": "vfw_private_ip_2",
- "vnf-parameter-value": "10.0.110.1"
- }, {
- "vnf-parameter-name": "vfw_private_ip_3",
- "vnf-parameter-value": "10.100.100.1"
- }, {
- "vnf-parameter-name": "vpg_private_ip_0",
- "vnf-parameter-value": "192.168.10.200"
- }, {
- "vnf-parameter-name": "vpg_private_ip_1",
- "vnf-parameter-value": "10.0.110.2"
- }, {
- "vnf-parameter-name": "vpg_private_ip_2",
- "vnf-parameter-value": "10.100.100.2"
- }, {
- "vnf-parameter-name": "vsn_private_ip_1",
- "vnf-parameter-value": "10.0.110.3"
- }, {
- "vnf-parameter-name": "vsn_private_ip_0",
- "vnf-parameter-value": "192.168.20.250"
- }, {
- "vnf-parameter-name": "vsn_private_ip_2",
- "vnf-parameter-value": "10.100.100.3"
- }, {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.4.0"
- }, {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
- }, {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.1"
- }, {
- "vnf-parameter-name": "sec_group",
- "vnf-parameter-value": "onap_sg_y3id"
- }
- ],
- "vnf-topology-identifier": {
- "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
- "vnf-type": "VpkgDistributetraffic..base_vpkg..module-0",
- "generic-vnf-name": "vpkg-dt-1",
- "generic-vnf-type": "vPKG-DistributeTraffic 0",
- "vnf-name": "vpkg-dt-module-1"
- }
- }
- }
+ "input": {
+ "request-information": {
+ "notification-url": "openecomp.org",
+ "order-number": "1",
+ "order-version": "1",
+ "request-action": "PreloadVNFRequest",
+ "request-id": "robot12"
+ },
+ "sdnc-request-header": {
+ "svc-action": "reserve",
+ "svc-notification-url": "http://openecomp.org:8080/adapters/rest/SDNCNotify",
+ "svc-request-id": "robot12"
+ },
+ "vnf-topology-information": {
+ "vnf-assignments": {
+ "availability-zones": [],
+ "vnf-networks": [],
+ "vnf-vms": []
+ },
+ "vnf-parameters": [
+ {
+ "vnf-parameter-name": "unprotected_private_net_id",
+ "vnf-parameter-value": "unprotected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_subnet_id",
+ "vnf-parameter-value": "unprotected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "unprotected_private_net_cidr",
+ "vnf-parameter-value": "192.168.10.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_id",
+ "vnf-parameter-value": "protected_net_dt"
+ },
+ {
+ "vnf-parameter-name": "protected_private_net_cidr",
+ "vnf-parameter-value": "192.168.20.0/24"
+ },
+ {
+ "vnf-parameter-name": "protected_private_subnet_id",
+ "vnf-parameter-value": "protected_subnet_dt"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vfw_key"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vsn_name_0",
+ "vnf-parameter-value": "vfw-vsn-0-dt"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "oam_onap_y3id"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_id",
+ "vnf-parameter-value": "onap_oam_ext"
+ },
+ {
+ "vnf-parameter-name": "ext_private_subnet_id",
+ "vnf-parameter-value": "onap_oam_ext_sub"
+ },
+ {
+ "vnf-parameter-name": "ext_private_net_cidr",
+ "vnf-parameter-value": "10.100.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "nexus_artifact_repo",
+ "vnf-parameter-value": "https://nexus.onap.org"
+ },
+ {
+ "vnf-parameter-name": "vfw_name_0",
+ "vnf-parameter-value": "vfw-vfw-0-dt"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "external"
+ },
+ {
+ "vnf-parameter-name": "image_name",
+ "vnf-parameter-value": "ubuntu-14-04-cloud-amd64"
+ },
+ {
+ "vnf-parameter-name": "flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "install_script_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_0",
+ "vnf-parameter-value": "192.168.10.100"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_1",
+ "vnf-parameter-value": "192.168.20.100"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_2",
+ "vnf-parameter-value": "10.0.110.1"
+ },
+ {
+ "vnf-parameter-name": "vfw_private_ip_3",
+ "vnf-parameter-value": "10.100.100.1"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_0",
+ "vnf-parameter-value": "192.168.10.200"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_1",
+ "vnf-parameter-value": "10.0.110.2"
+ },
+ {
+ "vnf-parameter-name": "vpg_private_ip_2",
+ "vnf-parameter-value": "10.100.100.2"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_1",
+ "vnf-parameter-value": "10.0.110.3"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_0",
+ "vnf-parameter-value": "192.168.20.250"
+ },
+ {
+ "vnf-parameter-name": "vsn_private_ip_2",
+ "vnf-parameter-value": "10.100.100.3"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.4.0"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6KbTxfT368DNBzLyfTYJ0INI7mkRXc/xMpx349TXGBTQ06g2toN/p9OM188EGZd/qERRXcdnKY8lYULVIXZEoQbXTus2gTQWXy1zn5nhSkhste3L1uG+sjHySQHBj5NyOC32iNehRNu03bHi8XRBAOHZsXx6JpkltLXgWqQI91+h9i7I/4trpPyODtzz1DLIkbXv9woncwllKf4bmm+Bk6/OJKHuAlBdCLL5diZqODEZlKOyPPC9c07a835sskMgbscPrvBQigZFRNerjQlFXXCHjRwoBMvfS7kD7/R/K1fmSLtH0w4VEp5iwS9SIezgEeO2aDFPucmYG4MQDGzAz"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.1"
+ },
+ {
+ "vnf-parameter-name": "sec_group",
+ "vnf-parameter-value": "onap_sg_y3id"
+ }
+ ],
+ "vnf-topology-identifier": {
+ "service-type": "1c38ef10-0f14-4d78-876c-3cc10ff4e535",
+ "vnf-type": "VpkgDistributetraffic..base_vpkg..module-0",
+ "generic-vnf-name": "vpkg-dt-1",
+ "generic-vnf-type": "vPKG-DistributeTraffic 0",
+ "vnf-name": "vpkg-dt-module-1"
+ }
+ }
+ }
}
diff --git a/docs/files/windriver/windriver_CPU.png b/docs/files/windriver/windriver_CPU.png
new file mode 100644
index 000000000..abf334b81
--- /dev/null
+++ b/docs/files/windriver/windriver_CPU.png
Binary files differ
diff --git a/docs/files/windriver/windriver_RAM.png b/docs/files/windriver/windriver_RAM.png
new file mode 100644
index 000000000..1333f01cb
--- /dev/null
+++ b/docs/files/windriver/windriver_RAM.png
Binary files differ
diff --git a/docs/files/windriver/windriver_disks.png b/docs/files/windriver/windriver_disks.png
new file mode 100644
index 000000000..1f7fc5265
--- /dev/null
+++ b/docs/files/windriver/windriver_disks.png
Binary files differ
diff --git a/docs/files/windriver/windriver_servers.png b/docs/files/windriver/windriver_servers.png
new file mode 100644
index 000000000..39671782f
--- /dev/null
+++ b/docs/files/windriver/windriver_servers.png
Binary files differ
diff --git a/docs/files/windriver/windrivers_servers2.png b/docs/files/windriver/windrivers_servers2.png
new file mode 100644
index 000000000..90d63c35a
--- /dev/null
+++ b/docs/files/windriver/windrivers_servers2.png
Binary files differ
diff --git a/docs/heat.rst b/docs/heat.rst
deleted file mode 100644
index 78796df95..000000000
--- a/docs/heat.rst
+++ /dev/null
@@ -1,236 +0,0 @@
-ONAP HEAT Template
-------------------
-
-Source files
-~~~~~~~~~~~~
-
-- Template file: https://git.onap.org/demo/plain/heat/ONAP/onap_openstack.yaml
-- Environment file: https://git.onap.org/demo/plain/heat/ONAP/onap_openstack.env
-
-Description
-~~~~~~~~~~~
-
-The ONAP HEAT template spins up the entire ONAP platform. The template,
-onap_openstack.yaml, comes with an environment file,
-onap_openstack.env, in which all the default values are defined.
-
-.. note::
- onap_openstack.yaml AND onap_openstack.env ARE THE HEAT TEMPLATE
- AND ENVIRONMENT FILE CURRENTLY SUPPORTED.
- onap_openstack_float.yaml/env AND onap_openstack_nofloat.yaml/env
- AREN'T UPDATED AND THEIR USAGE IS NOT SUGGESTED.
-
-The HEAT template is composed of two sections: (i) parameters, and (ii)
-resources.
-The parameter section contains the declaration and
-description of the parameters that will be used to spin up ONAP, such as
-public network identifier, URLs of code and artifacts repositories, etc.
-The default values of these parameters can be found in the environment
-file.
-
-The resource section contains the definition of:
-
-- ONAP Private Management Network, which ONAP components use to communicate with each other and with VNFs
-- ONAP Virtual Machines (VMs)
-- Public/private key pair used to access ONAP VMs
-- Virtual interfaces towards the ONAP Private Management Network
-- Disk volumes.
-
-Each VM specification includes Operating System image name, VM size
-(i.e. flavor), VM name, etc. Each VM has two virtual network interfaces:
-one towards the public network and one towards the ONAP Private
-Management network, as described above. Furthermore, each VM runs a
-post-instantiation script that downloads and installs software
-dependencies (e.g. Java JDK, gcc, make, Python, ...) and ONAP software
-packages and docker containers from remote repositories.
-
-When the HEAT template is executed, the Openstack HEAT engine creates
-the resources defined in the HEAT template, based on the parameters
-values defined in the environment file.
-
-Environment file
-~~~~~~~~~~~~~~~~
-
-Before running HEAT, it is necessary to customize the environment file.
-Indeed, some parameters, namely public_net_id, pub_key,
-openstack_tenant_id, openstack_username, and openstack_api_key,
-need to be set depending on the user's environment:
-
-**Global parameters**
-
-::
-
- public_net_id: PUT YOUR NETWORK ID/NAME HERE
- pub_key: PUT YOUR PUBLIC KEY HERE
- openstack_tenant_id: PUT YOUR OPENSTACK PROJECT ID HERE
- openstack_username: PUT YOUR OPENSTACK USERNAME HERE
- openstack_api_key: PUT YOUR OPENSTACK PASSWORD HERE
- horizon_url: PUT THE HORIZON URL HERE
- keystone_url: PUT THE KEYSTONE URL HERE (do not include version number)
-
-openstack_region parameter is set to RegionOne (OpenStack default). If
-your OpenStack is using another Region, please modify this parameter.
-
-public_net_id is the unique identifier (UUID) or name of the public
-network of the cloud provider. To get the public_net_id, use the
-following OpenStack CLI command (ext is the name of the external
-network, change it with the name of the external network of your
-installation)
-
-::
-
- openstack network list | grep ext | awk '{print $2}'
-
-pub_key is string value of the public key that will be installed in
-each ONAP VM. To create a public/private key pair in Linux, please
-execute the following instruction:
-
-::
-
- user@ubuntu:~$ ssh-keygen -t rsa
-
-The following operations to create the public/private key pair occur:
-
-::
-
- Generating public/private rsa key pair.
- Enter file in which to save the key (/home/user/.ssh/id_rsa):
- Created directory '/home/user/.ssh'.
- Enter passphrase (empty for no passphrase):
- Enter same passphrase again:
- Your identification has been saved in /home/user/.ssh/id_rsa.
- Your public key has been saved in /home/user/.ssh/id_rsa.pub.
-
-openstack_username, openstack_tenant_id (password), and
-openstack_api_key are user's credentials to access the
-OpenStack-based cloud.
-
-**Images and flavors parameters**
-
-::
-
- ubuntu_1404_image: PUT THE UBUNTU 14.04 IMAGE NAME HERE
- ubuntu_1604_image: PUT THE UBUNTU 16.04 IMAGE NAME HERE
- flavor_small: PUT THE SMALL FLAVOR NAME HERE
- flavor_medium: PUT THE MEDIUM FLAVOR NAME HERE
- flavor_large: PUT THE LARGE FLAVOR NAME HERE
- flavor_xlarge: PUT THE XLARGE FLAVOR NAME HERE
- flavor_xxlarge: PUT THE XXLARGE FLAVOR NAME HERE
-
-To get the images in your OpenStack environment, use the following
-OpenStack CLI command:
-
-::
-
- openstack image list | grep 'ubuntu'
-
-To get the flavor names used in your OpenStack environment, use the
-following OpenStack CLI command:
-
-::
-
- openstack flavor list
-
-**DNS parameters**
-
-::
-
- dns_list: PUT THE ADDRESS OFTHE EXTERNAL DNS HERE (e.g. a comma-separated list of IP addresses in your /etc/resolv.conf in UNIX-based Operating Systems). THIS LIST MUST INCLUDE THE DNS SERVER THAT OFFERS DNS AS AS SERVICE (see DCAE section below for more details)
- external_dns: PUT THE FIRST ADDRESS OF THE EXTERNAL DNS LIST HERE oam_network_cidr: 10.0.0.0/16
-
-You can use the Google Public DNS 8.8.8.8 and 4.4.4.4 address or your internal DNS servers
-
-**DCAE Parameters**
-
-DCAE spins up ONAP's data collection and analytics system in two phases.
-The first is the launching of a bootstrap VM that is specified in the
-ONAP Heat template. This VM requires a number of deployment specific
-conifiguration parameters being provided so that it can subsequently
-bring up the DCAE system. There are two groups of parameters.
-
-The first group relates to the launching of DCAE VMs, including parameters such as
-the keystone URL and additional VM image IDs/names. DCAE VMs are
-connected to the same internal network as the rest of ONAP VMs, but
-dynamically spun up by the DCAE core platform. Hence these parameters
-need to be provided to DCAE. Note that although DCAE VMs will be
-launched in the same tenant as the rest of ONAP, because DCAE may use
-MultiCloud node as the agent for interfacing with the underying cloud,
-it needs a separate keystone URL (which points to MultiCloud node
-instead of the underlying cloud).
-
-The second group of configuration parameters relate to DNS As A Service support (DNSaaS).
-DCAE requires DNSaaS for registering its VMs into organization-wide DNS service. For
-OpenStack, DNSaaS is provided by Designate. Designate support can be
-provided via an integrated service endpoint listed under the service
-catalog of the OpenStack installation; or proxyed by the ONAP MultiCloud
-service. For the latter case, a number of parameters are needed to
-configure MultiCloud to use the correct Designate service. These
-parameters are described below:
-
-::
-
- dcae_keystone_url: PUT THE KEYSTONE URL OF THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED (Note: put the MultiCloud proxy URL if the DNSaaS is proxyed by MultiCloud)
- dcae_centos_7_image: PUT THE CENTOS7 IMAGE ID/NAME AVAILABLE AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_security_group: PUT THE SECURITY GROUP ID/NAME TO BE USED AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_key_name: PUT THE ACCESS KEY-PAIR NAME REGISTER AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_public_key: PUT THE PUBLIC KEY OF A KEY-PAIR USED FOR DCAE BOOTSTRAP NODE TO COMMUNICATE WITH DCAE VMS
- dcae_private_key: PUT THE PRIVATE KEY OF A KEY-PAIR USED FOR DCAE BOOTSTRAP NODE TO COMMUNICATE WITH DCAE VMS
-
- dnsaas_config_enabled: true or false FOR WHETHER DNSAAS IS PROXYED
- dnsaas_region: PUT THE REGION OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_tenant_id: PUT THE TENANT ID/NAME OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_keystone_url: PUT THE KEYSTONE URL OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_username: PUT THE USERNAME OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_password: PUT THE PASSWORD OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
-
-Instantiation
-~~~~~~~~~~~~~
-
-The ONAP platform can be instantiated via Horizon (OpenStack dashboard)
-or Command Line.
-
-**Instantiation via Horizon:**
-
-- Login to Horizon URL with your personal credentials
-- Click "Stacks" from the "Orchestration" menu
-- Click "Launch Stack"
-- Paste or manually upload the HEAT template file (onap_openstack.yaml) in the "Template Source" form
-- Paste or manually upload the HEAT environment file (onap_openstack.env) in the "Environment Source" form
-- Click "Next" - Specify a name in the "Stack Name" form
-- Provide the password in the "Password" form
-- Click "Launch"
-
-**Instantiation via Command Line:**
-
-- Install the HEAT client on your machine, e.g. in Ubuntu (ref. http://docs.openstack.org/user-guide/common/cli-install-openstack-command-line-clients.html):
-
-::
-
- apt-get install python-dev python-pip
- pip install python-heatclient # Install heat client
- pip install python-openstackclient # Install the Openstack client to support multiple services
-
-- Create a file (named i.e. ~/openstack/openrc) that sets all the
- environmental variables required to access Rackspace:
-
-::
-
- export OS_AUTH_URL=INSERT THE AUTH URL HERE
- export OS_USERNAME=INSERT YOUR USERNAME HERE
- export OS_TENANT_ID=INSERT YOUR TENANT ID HERE
- export OS_REGION_NAME=INSERT THE REGION HERE
- export OS_PASSWORD=INSERT YOUR PASSWORD HERE
-
-- Run the script from command line:
-
-::
-
- source ~/openstack/openrc
-
-- In order to install the ONAP platform, type:
-
-::
-
- heat stack-create STACK_NAME -f PATH_TO_HEAT_TEMPLATE(YAML FILE) -e PATH_TO_ENV_FILE # Old HEAT client, OR
- openstack stack create -t PATH_TO_HEAT_TEMPLATE(YAML FILE) -e PATH_TO_ENV_FILE STACK_NAME # New Openstack client
-
diff --git a/docs/index.rst b/docs/index.rst
index 57ac59220..eb10f12a8 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,11 +1,13 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. _master_index:
INTEGRATION
===========
-The Integration project provides the following artifacts:
-- Heat template to deploy the virtual ressources needed for the ONAP deployment
-- Test suites and tools to check the various ONAP components based on Robot Framework
-- Artifacts and documentation for the use-case deployments
+.. toctree::
+ :maxdepth: 1
-.. include:: onap-oom-heat.rst
+ release-notes.rst
+ docs_usecases_release.rst
+ integration-resources.rst
+ integration-missions.rst
diff --git a/docs/integration-CICD.rst b/docs/integration-CICD.rst
new file mode 100644
index 000000000..5ac342008
--- /dev/null
+++ b/docs/integration-CICD.rst
@@ -0,0 +1,53 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-CICD:
+
+.. integration_main-doc:
+
+CI/CD
+=====
+
+.. important::
+ Integration team deals with 2 different CI/CD systems.
+
+ - Jenkins CI/CD, CI managed by LF IT and CD by Integration team
+ - GitLab-CI managed by Integration team
+
+Continuous Integration
+----------------------
+
+The CI part provides the following features:
+
+- Repository verification (format of the INFO.yaml)
+- Patchset verification thanks to json/yaml/python/go/rst/md linters. These Jenkins
+ verification jobs are hosted in the ci-management repository. They can vote
+ +1/-1 on patchset submission. Integration team systematically enables linters
+ on any new repository
+- Docker build: Integration team builds testsuite dockers and xtesting dockers.
+ These dockers are built then pushed to Nexus through a jjb also hosted in the
+ ci-management repository.
+
+The different verification chains are defined in https://jenkins.onap.org/:
+
+- CSIT: https://jenkins.onap.org/view/CSIT/
+- testsuite: https://jenkins.onap.org/view/testsuite/
+- integration: https://jenkins.onap.org/view/integration/
+- testsuite-robot-utils: https://jenkins.onap.org/view/testsuite-robot-utils/
+
+The Jenkins jobs (jjb) are hosted in https://git.onap.org/ci-management/.
+
+Continuous Deployment
+---------------------
+
+GitLab CD
+.........
+
+This CD is leveraging public gitlab-ci mechanism and used to deploy several ONAP
+labs:
+
+- Daily Master: daily run using OOM Master
+- Weekly Master: run once a week with longer tests
+- Gating: run on OOM, clamp or SO patchset submission. It means a full ONAP
+ deployment on demand based on new patchset declared in gerrit.
+
+See :ref:`Integration CI guideline <integration-ci>` for details.
diff --git a/docs/integration-labs.rst b/docs/integration-labs.rst
new file mode 100644
index 000000000..49915c846
--- /dev/null
+++ b/docs/integration-labs.rst
@@ -0,0 +1,38 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-labs:
+
+.. integration_main-doc:
+
+Integration Labs
+================
+
+.. important::
+ The Integration team deals with several community labs:
+
+ - The Azure staging lab
+ - The DT lab
+ - The University New Hempshire lab
+
+Additionally integration contributors may deal with their own lab pushing results
+in the integration portal (See DT http://testresults.opnfv.org/onap-integration/dt/dt.html)
+
+Azure staging lab
+-----------------
+
+An additional Azure staging lab has been created for Guilin. It is installed as
+any daily/weekly/gating labs (see CI/CD sections).
+Contact the Integration team to get an access.
+
+DT lab
+------
+
+The DT lab reported Master daily results in addition of Istanbul daily results.
+Results are shared with the community in
+`<https://logs.onap.org/onap-integration/daily/onap-master-daily-dell/>`_
+
+University New Hempshire lab
+----------------------------
+
+Lab for community use. See `ONAP UNH lab <https://wiki.onap.org/display/DW/ONAP+UNH-IOL+Lab>`_
+for more information.
diff --git a/docs/integration-missions.rst b/docs/integration-missions.rst
new file mode 100644
index 000000000..421519859
--- /dev/null
+++ b/docs/integration-missions.rst
@@ -0,0 +1,44 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-missions:
+
+Integration Missions
+====================
+
+.. important::
+ The Integration project is in charge of:
+
+ - Providing testing environment
+ - Supporting the use case teams
+ - Managing ONAP CI/CD chains
+ - Developing tests
+ - Providing baseline images
+ - Validating the ONAP releases
+
+The different activities may be summarized as follows (proportions are indicative):
+
+- Community support
+- Lab support
+- Use case support
+- Test development
+- Management of daily/weekly CI chains
+- Build baseline images
+- Automate tests
+- Validate the release
+
+For each release, the integration team provides the following artifacts:
+
+- A daily CI chain corresponding to the release
+- Staging labs to perform the pairwise testing (when not automated) and support
+ the use case teams
+- Baseline Java and Python images
+- oparent library to manage Java dependencies
+- Test suites and tools to check the various ONAP components
+- Use-case documentation and artifacts
+- A testsuite docker included in the ONAP cluster to execute the robot based tests
+- Configuration files (scripts, Heat templates, CSAR files) to help installing
+ and testing ONAP
+- Wiki release follow-up tables (blocking points, docker versions,...)
+
+Please see the `integration wiki page <https://wiki.onap.org/display/DW/Integration+Project>`_
+for details.
diff --git a/docs/integration-repositories.rst b/docs/integration-repositories.rst
new file mode 100644
index 000000000..2501c7321
--- /dev/null
+++ b/docs/integration-repositories.rst
@@ -0,0 +1,115 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-repositories:
+
+Integration repositories
+========================
+
+.. important::
+ The Integration project deals with lots of code repositories.
+
+Integration
+-----------
+
+The integration repository is the historical repository.
+As a consequence it includes several elements in the same repository:
+
+- Deployment scripts (deployment directory)
+- Tests: the first non robot tests (security, vCPE,..)
+- Simulators/emulators (test/mocks)
+- Integration and use cases documentation (docs)
+- Tools (bootstrap, S3Ptools)
+
+Since Frankfurt version, we created more smaller repositories especially for the use
+cases and the simulators.
+It shall help improving the maintenance of the different elements.
+It shall also help identifying, leveraging and adopting existing simulators
+rather than systematically re-inventing the wheel.
+
+.. note::
+ There is a new section of repositories - pipelines. These repositories are migrated for Orange GitLab project.
+ Code on these repositories is planned to be used to run ONAP tests GitLab CI/CD pipelines on `GitLab ONAP integration group <https://gitlab.com/onap/integration>`__
+
+.. csv-table:: Integration Repositories
+ :file: ./files/csv/repo-integration.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Integration Simulators
+ :file: ./files/csv/repo-simulators.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Integration Pipelines
+ :file: ./files/csv/repo-pipelines.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Testsuite
+---------
+
+The testsuite repository and its sub repositories deal exclusively with tests.
+
+The testsuite repository includes all the robotframework scripts.
+The robot pod that can be installed as part of the ONAP cluster is built from
+this repository.
+
+Several tooling repositories are associated with the robot tests (heatbridge,
+robot-python-testing-utils).
+
+.. csv-table:: Testsuite Repositories
+ :file: ./files/csv/repo-testsuite.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Demo
+----
+
+In this repository you will find any artifacts needed for demo, PoC and use cases
+if they do not have their own repository (mainly old use cases).
+
+.. csv-table:: Demo Repository
+ :file: ./files/csv/repo-demo.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Oparent
+-------
+
+.. csv-table:: Oparent Repository
+ :file: ./files/csv/repo-oparent.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Archived repositories
+---------------------
+
+Some repositories are archived and marked as "read-only" due to the lack of any activity in them.
+
+.. csv-table:: Archived Repositories
+ :file: ./files/csv/repo-archived.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+
+External repositories
+---------------------
+
+Additionally, the Integration team also deals with external gitlab.com
+repositories.
+
+.. important::
+ All of these repositories should be migrated into ONAP's Gerrit
+
+.. csv-table:: Integration external repositories table
+ :file: ./files/csv/repo-integration-external.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/integration-resources.rst b/docs/integration-resources.rst
new file mode 100644
index 000000000..4af90c15d
--- /dev/null
+++ b/docs/integration-resources.rst
@@ -0,0 +1,16 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-resources:
+
+Integration Resources
+=====================
+
+.. toctree::
+ :glob:
+
+ integration-repositories.rst
+ integration-labs.rst
+ integration-tests.rst
+ integration-CICD.rst
+ integration-simulators.rst
+ integration-tooling.rst
diff --git a/docs/integration-s3p.rst b/docs/integration-s3p.rst
index f42b48911..13e36c17a 100644
--- a/docs/integration-s3p.rst
+++ b/docs/integration-s3p.rst
@@ -1,99 +1,207 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
.. _integration-s3p:
-ONAP Maturity Testing Notes
----------------------------
+:orphan:
-For the El Alto release, ONAP continues to improve in multiple
-areas of Scalability, Security, Stability and Performance (S3P)
-metrics.
+Stability
+=========
+.. important::
+ The Release stability has been evaluated by:
+ - The daily CI/CD chain
+ - Stability tests
-Stability
-=========
+.. note:
+ The scope of these tests remains limited and does not provide a full set of
+ KPIs to determinate the limits and the dimensioning of the ONAP solution.
+
+CI results
+----------
+
+As usual, a daily CI chain dedicated to the release is created after RC0.
+
+The daily results can be found in `LF DT lab daily results web site <https://logs.onap.org/onap-integration/daily/onap-daily-dt-oom-master/>`_.
+
+.. image:: files/s3p/jakarta-dashboard.png
+ :align: center
+
+
+Infrastructure Healthcheck Tests
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+These tests deal with the Kubernetes/Helm tests on ONAP cluster.
+
+The global expected criteria is **100%**.
+
+The onap-k8s and onap-k8s-teardown, providing a snapshop of the onap namespace
+in Kubernetes, as well as the onap-helm tests are expected to be PASS.
+
+.. image:: files/s3p/istanbul_daily_infrastructure_healthcheck.png
+ :align: center
-** TODO **
+Healthcheck Tests
+~~~~~~~~~~~~~~~~~
-Integration Stability Testing verifies that the ONAP platform remains fully
-functional after running for an extended amounts of time.
-This is done by repeated running tests against an ONAP instance for a period of
-72 hours.
+These tests are the traditionnal robot healthcheck tests and additional tests
+dealing with a single component.
-Methodology
+The expectation is **100% OK**.
+
+.. image:: files/s3p/istanbul_daily_healthcheck.png
+ :align: center
+
+Smoke Tests
~~~~~~~~~~~
-The Stability Test has two main components:
+These tests are end to end and automated use case tests.
+See the :ref:`the Integration Test page <integration-tests>` for details.
+
+The expectation is **100% OK**.
+
+.. figure:: files/s3p/istanbul_daily_smoke.png
+ :align: center
+
+Security Tests
+~~~~~~~~~~~~~~
+
+These tests are tests dealing with security.
+See the :ref:`the Integration Test page <integration-tests>` for details.
+
+Waivers have been granted on different projects for the different tests.
+The list of waivers can be found in
+https://git.onap.org/integration/seccom/tree/waivers?h=jakarta.
-- Running "ete stability72hr" Robot suite periodically. This test suite
- verifies that ONAP can instantiate vDNS, vFWCL, and VVG.
-- Set up vFW Closed Loop to remain running, then check periodically that the
- closed loop functionality is still working.
+nodeport_check_certs test is expected to fail. Even tremendous progress have
+been done in this area, some certificates (unmaintained, upstream or integration
+robot pods) are still not correct due to bad certificate issuers (Root CA
+certificate non valid) or extra long validity. Most of the certificates have
+been installed using cert-manager and will be easily renewable.
+The expectation is **80% OK**. The criteria is met.
-Results: 100% PASS
+.. figure:: files/s3p/istanbul_daily_security.png
+ :align: center
+
+Stability tests
+---------------
+
+Stability tests have been performed on Istanbul release:
+
+- SDC stability test
+- Parallel instantiation test
+
+The results can be found in the weekly backend logs
+https://logs.onap.org/onap-integration/weekly/onap_weekly_pod4_istanbul.
+
+SDC stability test
~~~~~~~~~~~~~~~~~~
-=================== ======== ========== ======== ========= =========
-Test Case Attempts Env Issues Failures Successes Pass Rate
-=================== ======== ========== ======== ========= =========
-Stability 72 hours 72 34 0 38 100%
-vFW Closed Loop 75 7 0 68 100%
-**Total** 147 41 0 106 **100%**
-=================== ======== ========== ======== ========= =========
-
-Detailed results can be found at https://wiki.onap.org/display/DW/Dublin+Release+Stability+Testing+Status .
-
-.. note::
- - Overall results were good. All of the test failures were due to
- issues with the unstable environment and tooling framework.
- - JIRAs were created for readiness/liveness probe issues found while
- testing under the unstable environment. Patches applied to oom and
- testsuite during the testing helped reduce test failures due to
- environment and tooling framework issues.
- - The vFW Closed Loop test was very stable and self recovered from
- environment issues.
-
-
-Resilience
-==========
-
-Integration Resilience Testing verifies that ONAP can automatically recover
-from failures of any of its components.
-This is done by deleting the ONAP pods that are involved in each particular Use
-Case flow and then checking that the Use Case flow can again be executed
-successfully after ONAP recovers.
-
-Methodology
-~~~~~~~~~~~
-For each Use Case, a list of the ONAP components involved is identified.
-The pods of each of those components are systematically deleted one-by-one;
-after each pod deletion, we wait for the pods to recover, then execute the Use
-Case again to verify successful ONAP platform recovery.
-
-
-Results: 99.4% PASS
-~~~~~~~~~~~~~~~~~~~
-=============================== ======== ========== ======== ========= =========
-Use Case Attempts Env Issues Failures Successes Pass Rate
-=============================== ======== ========== ======== ========= =========
-VNF Onboarding and Distribution 49 0 0 49 100%
-VNF Instantiation 64 19 1 44 97.8%
-vFW Closed Loop 66 0 0 66 100%
-**Total** 179 19 1 159 **99.4%**
-=============================== ======== ========== ======== ========= =========
-
-Detailed results can be found at https://wiki.onap.org/display/DW/Dublin+Release+Resilience+Testing+Status .
-
-
-Deployability
-=============
-
-Smaller ONAP container images footprint reduces resource consumption,
-time to deploy, time to heal, as well as scale out resources.
-
-Minimizing the footprint of ONAP container images reduces resource
-consumption, time to deploy, time and time to heal. It also reduces
-the resources needed to scale out and time to scale in. For those
-reasons footprint minimization postively impacts the scalability of
-the ONAP platform. Smaller ONAP container images footprint reduces
-resource consumption, time to deploy, time to heal, as well as scale
-out resources.
+
+In this test, we consider the basic_onboard automated test and we run 5
+simultaneous onboarding procedures in parallel during 24h.
+
+The basic_onboard test consists in the following steps:
+
+- [SDC] VendorOnboardStep: Onboard vendor in SDC.
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC.
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC.
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file
+ in SDC.
+
+The test has been initiated on the Istanbul weekly lab on the 14th of November.
+
+As already observed in daily|weekly|gating chain, we got race conditions on
+some tests (https://jira.onap.org/browse/INT-1918).
+
+The success rate is expected to be above 95% on the 100 first model upload
+and above 80% until we onboard more than 500 models.
+
+We may also notice that the function test_duration=f(time) increases
+continuously. At the beginning the test takes about 200s, 24h later the same
+test will take around 1000s.
+Finally after 36h, the SDC systematically answers with a 500 HTTP answer code
+explaining the linear decrease of the success rate.
+
+The following graphs provides a good view of the SDC stability test.
+
+.. image:: files/s3p/istanbul_sdc_stability.png
+ :align: center
+
+.. csv-table:: S3P Onboarding stability results
+ :file: ./files/csv/s3p-sdc.csv
+ :widths: 60,20,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. important::
+ The onboarding duration increases linearly with the number of on-boarded
+ models, which is already reported and may be due to the fact that models
+ cannot be deleted. In fact the test client has to retrieve the list of
+ models, which is continuously increasing. No limit tests have been
+ performed.
+ However 1085 on-boarded models is already a vry high figure regarding the
+ possible ONAP usage.
+ Moreover the mean duration time is much lower in Istanbul.
+ It explains why it was possible to run 35% more tests within the same
+ time frame.
+
+Parallel instantiations stability test
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The test is based on the single test (basic_vm) that can be described as follows:
+
+- [SDC] VendorOnboardStep: Onboard vendor in SDC.
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC.
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC.
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file
+ in SDC.
+- [AAI] RegisterCloudRegionStep: Register cloud region.
+- [AAI] ComplexCreateStep: Create complex.
+- [AAI] LinkCloudRegionToComplexStep: Connect cloud region with complex.
+- [AAI] CustomerCreateStep: Create customer.
+- [AAI] CustomerServiceSubscriptionCreateStep: Create customer's service
+ subscription.
+- [AAI] ConnectServiceSubToCloudRegionStep: Connect service subscription with
+ cloud region.
+- [SO] YamlTemplateServiceAlaCarteInstantiateStep: Instantiate service described
+ in YAML using SO a'la carte method.
+- [SO] YamlTemplateVnfAlaCarteInstantiateStep: Instantiate vnf described in YAML
+ using SO a'la carte method.
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module
+ described in YAML using SO a'la carte method.
+
+10 instantiation attempts are done simultaneously on the ONAP solution during 24h.
+
+The results can be described as follows:
+
+.. image:: files/s3p/istanbul_instantiation_stability_10.png
+ :align: center
+
+.. csv-table:: S3P Instantiation stability results
+ :file: ./files/csv/s3p-instantiation.csv
+ :widths: 60,20,20,20
+ :delim: ;
+ :header-rows: 1
+
+The results are good with a success rate above 95%. After 24h more than 1300
+VNF have been created and deleted.
+
+As for SDC, we can observe a linear increase of the test duration. This issue
+has been reported since Guilin. For SDC as it is not possible to delete the
+models, it is possible to imagine that the duration increases due to the fact
+that the database of models continuously increases. Therefore the client has
+to retrieve an always bigger list of models.
+But for the instantiations, it is not the case as the references
+(module, VNF, service) are cleaned at the end of each test and all the tests
+use the same model. Then the duration of an instantiation test should be
+almost constant, which is not the case. Further investigations are needed.
+
+.. important::
+ The test has been executed with the mariadb-galera replicaset set to 1
+ (3 by default). With this configuration the results during 24h are very
+ good. When set to 3, the error rate is higher and after some hours
+ most of the instantiation are failing.
+ However, even with a replicaset set to 1, a test on Master weekly chain
+ showed that the system is hitting another limit after about 35h
+ (https://jira.onap.org/browse/SO-3791).
diff --git a/docs/integration-simulators.rst b/docs/integration-simulators.rst
new file mode 100644
index 000000000..7f6688f05
--- /dev/null
+++ b/docs/integration-simulators.rst
@@ -0,0 +1,111 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-tooling:
+
+.. _integration-simulators:
+
+Simulators
+==========
+
+Simulators are regularly created for use cases. The goal of this section is to:
+
+- Highlight the existing Simulators
+- Provide recommendations when starting developing a new simulator
+
+.. important::
+ Before developing a new simulator, check that it does not exist...and
+ refactor/contribute to existing simulators rather than recreating new ones.
+
+
+Existing simulators
+-------------------
+
+.. csv-table:: Simulators
+ :file: ./files/csv/simulators.csv
+ :widths: 10,50,20,20
+ :delim: ;
+ :header-rows: 1
+
+
+Recommendations
+---------------
+
+The simulator code
+..................
+
+We recommend to create a dedicated repository (ask Integration team).
+
+.. csv-table:: Simulator repositories
+ :file: ./files/csv/repo-simulators.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+
+Dockerization
+.............
+
+From this repository, create a jenkins job to automatically build the dockers.
+
+Helm Chart
+..........
+
+It is recommended to create a helm chart in order to run the simulators.
+
+
+Wrapper for simulators
+......................
+
+1. In order to deploy the Helm release with a simulator, place a YAML file
+describing the Helm release in src/onaptests/templates/helm_charts.
+
+ The structure of the YAML file should be like in the example below.
+ Dependencies contain all the charts that need to be pulled.
+
+ .. code-block:: YAML
+
+ # Helm release information
+ api_version: # API_VERSION
+ app_version: # APP_VERSION
+ chart_name: # SIMULATOR_NAME
+ version: # CHART_VERSION
+
+ # Helm charts that need to be pulled
+ dependencies:
+ - name: # SIMULATOR_NAME
+ version: # CHART_VERSION
+ repository: # URL
+ local_repo_name: # REPO_NAME
+
+2. Install the Helm release:
+
+ .. code-block:: Python
+
+ from onaptests.steps.wrapper.helm_charts import HelmChartStep
+
+ chart = HelmChartStep(
+ cleanup = BOOLEAN,
+ chart_info_file = YAML_FILE_NAME # name, not the path
+ )
+ chart.execute()
+
+3. Start the simulator via an API call:
+
+ .. code-block:: Python
+
+ start = SimulatorStartStep(
+ cleanup = BOOLEAN,
+ https = BOOLEAN,
+ host = HOSTNAME,
+ port = PORT,
+ endpoint = START_ENDPOINT, # if applicable
+ method = REQUEST_METHOD, # GET, POST etc.
+ data = PAYLOAD # {"json": {...}, ...}
+ )
+ start.execute()
+
+4. Undeploy the Helm release:
+
+ .. code-block:: Python
+
+ chart.cleanup()
diff --git a/docs/integration-tests.rst b/docs/integration-tests.rst
new file mode 100644
index 000000000..6453e55ef
--- /dev/null
+++ b/docs/integration-tests.rst
@@ -0,0 +1,159 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-tests:
+
+Tests
+=====
+
+.. important::
+ Integration is in charge of several types of tests:
+
+ - Use Cases: developed by use case teams, usually complex, demonstrating high value capabilities of ONAP. They may be partially automated and even
+ integrated in CD.
+ - CSIT Tests: functional tests created by the projects, partially hosted in CSIT repository
+ - Automatic Test Cases: these use cases are usually more simple and aim to validate that ONAP is working properly.
+ These tests have been developed to validate ONAP as a software solution.
+ In theory all the main functions shall be covered by such tests in order to have more robust CI/CD and then avoid regressions.
+ These tests are usually developed and maintained by the integration team.
+
+We may also indicate that when the development of the test framework python-onapsdk
+follows standard development quality rules and imposes the creation of
+unit/functional/integration tests.
+As an example python-onapsdk requires a unit test coverage of 98% before merging
+a new feature, which is far above the project criteria in SonarCloud today.
+
+Use Cases
+---------
+
+The use cases of the last release are described in
+:ref:`Verified Use cases <docs_usecases_release>`.
+
+CSIT Tests
+----------
+
+The CSIT tests are functional tests executed by the projects on mocked
+environment to validate their components.
+Historically it was hosted in a CSIT repository.
+
+Integration team invited the projects to bring back such tests back to home
+repository for 2 main reasons:
+
+- integration cannot be a bottleneck: +2/merge from integration needed for each
+ project
+- most of the tests are abandoned and not maintained when hosted in a third party
+ repository leading to CI/CD resource waste and misleading test reporting
+
+Automated Tests
+---------------
+
+These tests are run daily/weekly on each new gate (new patchset in OOM, CLAMP
+or SO). They can be in any language (bash, go, python,...), leveraging any test
+framework (robotframework, MTS, python-onapsdk).
+They are all embedded in `xtesting <https://pypi.org/project/xtesting/>`_ dockers.
+
+.. hint::
+ Automatic tests are currently divided in 4 different categories:
+
+ - infrastructure-healthcheck: tests from OOM checking the ONAP namespace, certificates...
+ - healthcheck: basic tests on components
+ - smoke tests: end to end tests
+ - security tests
+
+A dashboard summarizing the status and providing the links to the test result
+page or the logs is automatically created at the end of the execution of the
+tests.
+
+.. figure:: files/tests/test-dashboard.png
+
+ Test dashboard (Guilin version)
+
+All the pages and artifacts are pushed to LF backend:
+
+- Daily chains: https://logs.onap.org/onap-integration/daily
+- Weekly chains: https://logs.onap.org/onap-integration/weekly
+- Gating chains: the result link is indicated in gerrit
+
+A video has been recorded to help launching some of the automated tests on ONAP Guilin.
+See `Running ONAP tests in Guilin Video <https://www.youtube.com/watch?v=ABvuJfyGDmw>`_
+
+Infrastructure Healthcheck Tests
+................................
+
+.. csv-table:: Infrastructure Healthcheck Tests
+ :file: ./files/csv/tests-infrastructure-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Infrastructure Healthcheck README <https://git.onap.org/integration/xtesting/tree/infra-healthcheck/README.md>`_
+to adapt then run infrastructure healthcheck tests on your own system.
+
+Please note that the onap-k8s is run 2 times in CD chains. It is run just after
+the installation (onap-k8s) and at the end of the test execution (onap-k8s-teardown)
+in order to collect the logs of the different components during the test execution.
+
+.. figure:: files/tests/test-onap-k8s.png
+
+Healthcheck Tests
+.................
+
+.. csv-table:: Healthcheck Tests
+ :file: ./files/csv/tests-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Healthcheck README <https://git.onap.org/integration/xtesting/tree/healthcheck/README.md>`_
+to adapt then run healthcheck tests on your own system.
+
+Smoke Tests
+...........
+
+.. csv-table:: Smoke Tests
+ :file: ./files/csv/tests-smoke.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+There are 2 main families of smoke tests:
+
+* RobotFramework based tests, usually run from inside the cluster as a k8s job
+* Pythonsdk based tests. These tests (also known as onaptests) are consuming
+ several SDKs: the Openstack and Kubernetes SDK for the management of the cloud
+ resources and the python ONAP SDK for the interactions with ONAP
+
+To launch the the robot based tests, please see
+`Robot smoke test README <https://git.onap.org/integration/xtesting/tree/smoke-usecases-robot/README.md>`_
+Standard Robot html pages are generated. See :ref:`Robot page <docs_robot>`.
+
+To launch the pythonsdk based tests, please see
+`Python smoke test README <https://git.onap.org/integration/xtesting/tree/smoke-usecases-pythonsdk/README.md>`_
+
+.. note:
+ Please note that the OpenStack minimal version is pike.
+
+An html page is generated by the pythonsdk-test tests.
+
+.. figure:: files/tests/test-basic-cnf.png
+
+Security Tests
+...............
+
+.. csv-table:: Security Tests
+ :file: ./files/csv/tests-security.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Security test README <https://git.onap.org/integration/xtesting/tree/security/README.md>`_
+to adapt then run the security tests on your own system.
+
+Note for security tests, integration team follows `SECCOM recommendations and
+apply waivers granted by SECCOM if needed through xfail lists <https://git.onap.org/integration/seccom/tree/>`_.
+
+Stability tests
+---------------
+
+Ensuring the stability of ONAP is one of the missions of the Integration team.
+CI chains and stability tests are performed to help stabilising the release.
+See :ref:`Integration stability tests <integration-s3p>` for details.
diff --git a/docs/integration-tooling.rst b/docs/integration-tooling.rst
new file mode 100644
index 000000000..d615e7e27
--- /dev/null
+++ b/docs/integration-tooling.rst
@@ -0,0 +1,214 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-tooling:
+
+Tooling
+=======
+
+.. important::
+ Integration team deals with lots of tools to complete its missions. The goal
+ of this section is to highlight some of them and redirect to their official
+ documentation. These tools can be used for CI/CD, Testing or platform management.
+
+ **Upstream tools** are privileged but when needed specific developments can be done.
+
+ Please note that none of these tools are imposed to test developers, in other
+ words, any kind of test is accepted and can be integrated, the list of tools
+ is just indicative.
+
+Integration Project
+-------------------
+
+Integration portal
+~~~~~~~~~~~~~~~~~~
+
+A portal is built to report the status of the different labs collaborating in
+Integration, see http://testresults.opnfv.org/onap-integration/
+
+.. figure:: files/CI/ONAP_CI_3.png
+ :align: center
+ :width: 6.5in
+
+The code of this web site is shared on a public gitlab project.
+
+
+Communication channels
+~~~~~~~~~~~~~~~~~~~~~~
+
+The main communication channel for real time support is the official ONAP
+Slack #integration-team chan (https://onapproject.slack.com/).
+
+You can also send a mail to onap-discuss AT lists.onap.org
+with [ONAP] [Integration] prefix in the title.
+
+Repository management
+~~~~~~~~~~~~~~~~~~~~~
+
+Since Integration team manages few dozens of different repositories a tool was provided to aid the process of mass editing the INFO.yaml files. It can be found `here <https://git.onap.org/integration/tree/ptl/edit_committers_info>`__.
+
+Testing
+-------
+
+Test frameworks
+~~~~~~~~~~~~~~~
+
+Robotframework
+..............
+
+`robotframework <https://robotframework.org/>`_ is a well known test framework.
+Lots of ONAP tests are leveraging this framework.
+This framework is fully developed upstream even if some extensions (python
+modules) were created especially to deal with OpenStack (see
+`python-testing-utils project <https://git.onap.org/testsuite/python-testing-utils/>`_).
+
+Some GUI tests (using Robotframework Selenium extension) had been initiated but
+not maintained, as a consequence they are not integrated in CI/CD.
+
+
+Python ONAP SDK
+...............
+
+The Openstack and Kubernetes python SDK are references widely adopted by the
+developers and the industry. Developing a python ONAP SDK aimed to follow the
+examples of the infrastructure SDK with the same expectations in term of code
+quality.
+After an evaluation of the CLI project (JAVA SDK re-exposing primitives through
+python system calls), and a first prototype (onap_tests used until Frankfurt for
+end to end tests) it was decided to develop a new python SDK.
+
+This SDK has been developed in gitlab.com to benefit from the numerous built-in
+options offered by gitlab and ensure the best possible code quality.
+
+- `python SDK repositoy <https://gerrit.onap.org/r/admin/repos/integration/python-onapsdk>`_
+- `python SDK documentation <https://python-onapsdk.readthedocs.io/en/latest/?badge=develop>`_
+
+The project is fully Open Source, released under the Apache v2 license.
+Integration committers are invited to join the project. The main maintainers are
+ONAP integration and OOM committers.
+
+Any new feature shall respect the code quality criteria:
+
+- unit test coverage > 98%
+- functional tests (several components mock objects have been developed)
+
+.. attention::
+ Python-onapsdk is a **SDK**, it means it is a tool allowing to communicate
+ with ONAP. It is a **middleware** that can be used by test projects but it is
+ **NOT a test**.
+
+A companion project has been created in ONAP:
+`pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/>`_.
+
+The pythonsdk-test project defines tests based on python-onapsdk.
+
+The tests are hosted in this repository. They consume the different needed SDK:
+python-onapsdk but also the kubernetes, the OpenStack SDK and or any needed
+additional middlewares.
+The project developed the notion of steps that can been combined and reorganized
+as need to design a test. This project interacts with ONAP only through the
+python-onapsdk library.
+The tests are described in :ref:`The Integration Test page <integration-tests>`.
+
+The available steps are:
+
+- [CLAMP] OnboardClampStep: Onboard a SDC including a TCA blueprint
+- [CDS] ExposeCDSBlueprintprocessorNodePortStep: expose CDS blueprint nodeport (Guilin workaround)
+- [CDS] BootstrapBlueprintprocessor: Bootstrap a blueprint processor
+- [CDS] DataDictionaryUploadStep: Upload a Data Dictionary to CDS
+- [CDZ] CbaEnrichStep: Enrich CBA
+- [K8S plugin] K8SProfileStep: Create K8S profile
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module described in YAML using SO a'la carte method
+- [SO] YamlTemplateVlAlaCarteInstantiateStep: Instantiate network link described in YAML using SO a'la carte method.
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module described in YAML using SO a'la carte method
+- [SO] YamlTemplateVnfAlaCarteInstantiateStep: Instantiate vnf described in YAML using SO a'la carte method
+- [SO] YamlTemplateServiceAlaCarteInstantiateStep: Instantiate service described in YAML using SO a'la carte method
+- [AAI] ConnectServiceSubToCloudRegionStep: Connect service subscription with cloud region
+- [AAI] CustomerServiceSubscriptionCreateStep: Create customer's service subscription
+- [AAI] CustomerCreateStep: Create customer
+- [AAI] LinkCloudRegionToComplexStep: Connect cloud region with complex
+- [AAI] ComplexCreateStep: Create complex
+- [AAI] RegisterCloudRegionStep: Register cloud region
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file in SDC
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC
+- [SDC] VendorOnboardStep: Onboard vendor in SDC
+
+You can reuse the existing steps to compose your test and/or code your own step
+if it is not supported yet.
+
+The procedure to start a test is described in `pythonsdk-test README <https://git.onap.org/testsuite/pythonsdk-tests/tree/README.md>`_
+
+CI/CD
+-----
+
+The CI/CD is key for integration. It consolidates the trustability in the solution
+by the automated verification of the deployment and the execution of tests.
+Integration tests complete the component tests (unit and functional known as
+CSIT tests).
+
+Xtesting
+~~~~~~~~
+
+As the tests can be very heterogeneous (framework, language, outputs), the
+integration team integrates the tests in simple isolated execution context based
+on docker called **xtesting dockers**.
+
+Xtesting is a python library harmonizing the way to setup, run, teardown,
+manage the artifacts, manage the reporting of the tests (automatic push of the
+results on a DB backend). It was developed by
+`OPNFV functest project <https://git.opnfv.org/functest-xtesting/>`_.
+This python library is included in an alpine docker and contains the needed
+tests, their associated libraries as well as a testcases.yaml listing these tests.
+These docker files are built on any change in the integration/xtesting repository
+and daily to take into account the upstream changes.
+
+The integration project manages 5 xtesting dockers, see
+:ref:`Integration Test page <integration-tests>`.
+
+.. important::
+ **xtesting is a CI/CD framework, neither a test nor a test framework**
+
+ Testers can provide tests independently from xtesting.
+ However to be part of the CI/CD chains, an integration of the test in xtesting
+ will be required.
+
+The configuration files are provided as volumes and defined in each docker.
+The use of this CI/CD abstraction for the tests simplify the integration
+of the test suites in any CI/CD systems and harmonize the inputs and the outputs.
+
+The official documentation can be found on
+`xtesting official web site <https://xtesting.readthedocs.io/en/latest/>`_
+
+Integration Test database
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The integration team shares a Test Result Database with the OPNFV project. All
+the test results of the CD are automatically pushed to this database.
+It is possible to retrieve the results through the Test API associated with this
+test Database.
+
+The following information are available:
+
+- List of pods allowed to push results: http://testresults.opnfv.org/onap/api/v1/pods
+- List of projects that declared test cases for CI/CD: http://testresults.opnfv.org/onap/api/v1/projects
+- List of integration test cases:
+ http://testresults.opnfv.org/onap/api/v1/projects/integration/cases
+- List of security test cases:
+ http://testresults.opnfv.org/onap/api/v1/projects/security/cases
+- Results with lots of possible filter combinations: http://testresults.opnfv.org/onap/api/v1/results?last=3
+
+It is possible to get results according to several criteria (version, case name,
+lab, period, last, CI id,..)
+See the `OPNFV test API documentation
+<https://docs.anuket.io/en/stable-lakelse/testing/ecosystem/overview.html#test-api-description>`_.
+
+Any company running ONAP Integration tests can be referenced to push their results
+to this database.
+This Database is hosted on a LF OPNFV server. Results are backuped daily.
+Integration committers can have access to this server.
+
+VNF demo Artifacts
+~~~~~~~~~~~~~~~~~~
+
+VNF demo artifacts are hosted in the demo repositories and published in
+https://nexus.onap.org/content/repositories/releases/org/onap/demo/vnf/.
diff --git a/docs/onap-integration-ci.rst b/docs/onap-integration-ci.rst
new file mode 100644
index 000000000..150c82b40
--- /dev/null
+++ b/docs/onap-integration-ci.rst
@@ -0,0 +1,131 @@
+.. _integration-ci:
+
+:orphan:
+
+Integration Continuous Integration Guide
+----------------------------------------
+
+.. important::
+ Continuous Integration is key due to the complexity of the ONAP projects.
+ Several chains have been created:
+
+ - Daily stable chain
+ - Daily master chain
+ - Gating: On demand deployment of a full ONAP solution to validate patchsets
+
+They are run on different environments (Orange labs, DT labs, Azure Cloud).
+
+The following document will detail these chains and how you could set up such
+chains and/or provide test results to the community.
+
+Integration CI Ecosystem
+------------------------
+
+Overview
+~~~~~~~~
+
+The global ecosystem can de described as follows:
+
+.. figure:: files/CI/ONAP_CI_10.png
+ :align: center
+
+Several chains are run in ONAP.
+The CI chains are triggered from different CI systems (Jenkins or gitlab-ci) (1)
+on different target environments hosted on community labs (Windriver,
+Orange, DT, E///) or Azure clouds. Jobs (installation, tests) are executed on
+these labs (2). At the end, the results are pushed through the OPNFV test API (3)
+to a test database (4) hosted by Linux Foundation on
+http://testresults.opnfv.org.
+Results can be reported in different web pages hosted on LF or on gitlab.com (5).
+
+
+Daily Chains
+~~~~~~~~~~~~
+
+CI daily chains (Master and last Stable) are run on Orange, DT using gitlab-ci
+jobs and Ericsson using jenkins jobs.
+
+Gating
+~~~~~~
+
+OOM gating has been introduced for El Alto. It consists of a deployment followed
+by a set of tests on patchsets submitted to OOM repository.
+
+The CI part is managed on gitlab.com and the deployment is executed on ONAP
+Orange lab and Azure clouds.
+The goal is to provide a feedback - and ultimately to vote - on code change
+prior to merge to consolidate the OOM Master branch.
+
+The developer can evaluate the consequences of his/her patchset on a fresh
+installation.
+
+The gating is triggered in 2 scenarios:
+
+ - new patchset in OOM
+ - comment with the magic word **oom_redeploy** is posted in the Gerrit's comment
+ section
+
+The procedure to submit new feature in CI is done in 3 steps as described in the
+figure below:
+
+.. figure:: files/CI/ONAP_CI_0.png
+ :align: center
+
+
+Visualization of the CI pipelines
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+As the CI chains are triggered from different systems, several web interfaces
+can be used to visualize them.
+
+A web site has been created to centralize the links on http://testresults.opnfv.org/onap-integration/index.html
+
+For Gating and gitlab.com based CI chains, the pipelines consist in pipelines of
+pipelines managed through the chaining of .gitlab-ci.yml file thanks to an Open
+Source deployment called chained-ci (https://gitlab.com/Orange-OpenSource/lfn/ci_cd/chained-ci).
+A visualization tool is available to list all your chains as described in the
+figure below:
+
+.. figure:: files/CI/ONAP_CI_1.png
+ :align: center
+ :width: 10in
+
+If you click on any element of the chain, you will open a new window:
+
+.. figure:: files/CI/ONAP_CI_2.png
+ :align: center
+
+In order to provide the logs to the developer an additional web page has been
+created to summarize the tests and grant access to their associated logs:
+
+.. figure:: files/CI/ONAP_CI_8.png
+ :align: center
+ :width: 6.5in
+
+Additionnaly, for the daily chain, another page displays the results as time
+series, allowing to see the evolution of the tests over time.
+
+.. figure:: files/CI/ONAP_CI_9.png
+ :align: center
+ :width: 6.5in
+
+
+Setup Your Own CI Chains
+------------------------
+
+If you want to setup a gitlab.com based CI chain, and want to use chained-ci,
+you can follow the tutorial on https://gitlab.com/Orange-OpenSource/lfn/ci_cd/chained-ci-handson
+
+You should be able to chain your automation projects:
+
+* Create resources
+* Deployment of Kubernetes
+* Test of your Kubernetes (using OPNFV functest-k8s tests)
+* Deployment of your ONAP (you can use your own automatic installation
+ procedure or https://gitlab.com/Orange-OpenSource/lfn/onap/onap_oom_automatic_installation/)
+* Test ONAP thanks to the differnet ONAP xtesting dockers covering infrastructure
+ healthcheck, components healthcheck tests, end to end tests, security tests.
+
+If you want to report your results to the community, do not hesitate to contact
+the integration team. The Test database is public but the pods must be declared
+to be allowed to report results from third party labs.
diff --git a/docs/onap-oom-heat.rst b/docs/onap-oom-heat.rst
index bb9c1abff..13e6ca6db 100644
--- a/docs/onap-oom-heat.rst
+++ b/docs/onap-oom-heat.rst
@@ -1,6 +1,8 @@
-.. _onap-oom-heat:
+.. _integration-installation:
-Integration Environement Installation
+:orphan:
+
+Integration Environment Installation
-------------------------------------
ONAP is deployed on top of kubernetes through the OOM installer.
@@ -16,16 +18,16 @@ are provided, they correspond to files used on windriver environment.
This environment is used by the integration team to validate the installation,
perform tests and troubleshoot.
-If you intend to deploy your own environement, they can be used as reference but
+If you intend to deploy your own environment, they can be used as reference but
must be adapted according to your context.
Source files
~~~~~~~~~~~~
-- HEAT template files: https://git.onap.org/integration/tree/deployment/heat/onap-rke?h=elalto
-- Sample OpenStack RC file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/Integration-SB-00-openrc?h=elalto
-- Sample environment file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/onap-oom.env?h=elatlo
-- Deployment script: https://git.onap.org/integration/tree/deployment/heat/onap-rke/scripts/deploy.sh?h=elalto
+- HEAT template files: https://git.onap.org/integration/tree/deployment/heat/onap-rke
+- Sample OpenStack RC file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/Integration-SB-00-openrc
+- Sample environment file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/onap-oom.env
+- Deployment script: https://git.onap.org/integration/tree/deployment/heat/onap-rke/scripts/deploy.sh
Heat Template Description
@@ -33,7 +35,7 @@ Heat Template Description
The ONAP Integration Project provides a sample HEAT template that
fully automates the deployment of ONAP using OOM as described in
-:ref:`ONAP Operations Manager (OOM) over Kubernetes<installing-onap>`.
+OOM documentation.
The ONAP OOM HEAT template deploys the entire ONAP platform. It spins
up an HA-enabled Kubernetes cluster, and deploys ONAP using OOM onto
@@ -126,8 +128,7 @@ Exploring the Rancher VM
The Rancher VM that is spun up by this HEAT template serves the
following key roles:
-- Hosts the /dockerdata-nfs/ NFS export shared by all the k8s VMs for persistent
- volumes
+- Hosts the /dockerdata-nfs/ NFS export shared by all the k8s VMs for persistent volumes
- git clones the oom repo into /root/oom
- git clones the integration repo into /root/integration
- Creates the helm override file at /root/integration-override.yaml
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index 884998fa1..07ba0b9f3 100644
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -1,238 +1,119 @@
+.. _release_notes:
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-.. _doc-release-notes:
+Integration Kohn Release Notes
+==============================
-Integration Release Notes
-=========================
+.. csv-table:: Integration Releases
+ :file: ./files/csv/release-integration-ref.csv
+ :widths: 50,50
+ :delim: ;
+ :header-rows: 1
+.. important::
-Integration Repo
-----------------
-
-Version: 4.0.0
-..............
-
-:Release Date: 2019-10-21
-
-**New Features**
-
-* Add new integration labs
-* Introduction of OOM Gating
-* Updated scripts for OOM daily automated deployment tests
-* Refactoring of the Integration wiki home page
-* Automation script for use cases
-* Updated java artifact versions for ONAP El Alto release
-* Cleaning of CSIT jobs
-* Update oparent library to fix security Vulnerabilities
-* Update Postman collection for test
-
-Quick Links:
- - `Integration project page <https://wiki.onap.org/display/DW/Integration+Project>`_
- - ` El Alto use testing status page <https://wiki.onap.org/display/DW/2%3A+El+Alto+Release+Integration+Testing+Status>`
-
-ONAP Maturity Testing Notes
----------------------------
-
-For El Alto release, ONAP continues to improve in multiple areas of
-Scalability, Security, Stability and Performance (S3P) metrics.
-
-In addition of the windriver lab, Master and El Alto use cases have been tested
-on Ericcson (Daily Master CI chain), Orange (Daily Master chain, Gating) and
-windriver labs (use cases, daily, long duration). See `Integration Lab portal
-<http://testresults.opnfv.org/onap-integration>`
-
-
-A gating chain has been setup for OOM. This CI chain provides feedback to the
-integration team. For each OOM change, a full ONAP deployment is triggered then
-several tests are executed (k8s verification, helm chart verification, 61 robot
-healthcheck, healthdist and end to end basic VNF tests).
-For El Alto, more than 1000 pipelines have been executed (gating, daily master
-and stable).
-The results of the tests for the OOM gating can be found ` here
-<https://orange-opensource.gitlab.io/lfn/onap/xtesting-onap-view/index.html>`
-
-Tests dealing with more than 25 test cases are executed on Windriver
-environment.
-
-The Integration team ran the 72 hours stability testing (xx% passing rate) and
-full resilience testing (xx% passing rate) at ONAP OpenLabs.
-More details in :ref:`ONAP Maturity Testing Notes <integration-s3p>`.
-
-
-Verified Use Cases and Functional Requirements
-----------------------------------------------
-
-The Integration team verified 22 use cases and functional requirements.
-The details can be found at
-:ref:`Verified Use Cases and Functional Requirements <docs_usecases>` session.
-
-O-Parent
---------
-
-Version: 2.2.0
-..............
-
-:Release Date: 2019-09-03
-
-**New Features**
-
-* Updated oparent POM files to support LF's new global job template.
-* commons-beanutils 1.9.4
-* tomcat-embed-core 9.0.24
-* jquery 3.4.1
-
-
-Version: 2.0.0
-..............
-
-:Release Date: 2019-03-08
-
-**New Features**
-
-* Updated oparent POM files to support LF's new global job template.
+ - New repositories (see dedicated section)
+ - Bug fixes
-Version: 1.2.3
-..............
+ Quick Links:
-:Release Date: 2019-02-11
+ - `Kohn Integration page <https://wiki.onap.org/display/DW/Integration+Kohn>`_
+ - `Kohn Integration JIRA follow-up <https://wiki.onap.org/display/DW/Kohn+Integration+Blocking+points>`_
+ - `Kohn Integration weather Board <https://wiki.onap.org/display/DW/0%3A+Integration+Weather+Board+for+Kohn+Release>`_
-**Bug Fixes**
+Code changes
+------------
-* Updated various library dependency versions per SECCOM input
-* Fixed Checkstyle configuration issues
-
-
-Version: 1.2.2
-..............
-
-:Release Date: 2018-11-11
-
-**Bug Fixes**
-
-* Updated Spring dependency version to fix CLM issues
-* Remove hard-coding of ONAP nexus repos
-
-
-Version: 1.2.1
-..............
-
-:Release Date: 2018-09-14
-
-**New Features**
-
-* Refactor oparent pom to support running builds against local nexus
- repos without having to modify oparent source code
-* Standardize 3rd party library versions
-
-Version: 1.2.0
-..............
-
-:Release Date: 2018-08-24
-
-**New Features**
-
-* Add depedencyManagement sub-module to declare dependecies
-
-
-Demo Artifacts (HEAT Templates)
--------------------------------
-
-Version: 1.5.0
-..............
-
-:Release Date: 2019-10-11
-
-**New Features**
-
-* vFW DT tutorial improvement
-* Helm chart for visualization operator
-* bug fixes
-* Robot enhancements for various use cases
-
-
-Version: 1.4.0
-..............
-
-:Release Date: 2019-06-13
-
-**New Features**
-
-The main changes for this release are the additional templates and
-other changes to support Use Cases such as vFWCL, vFWDT, vCPE, Scale-out,
-and TOSCA templates.
+Integration Repo
+.................
+:Release Date: 2022-10-27
-Version: 1.3.0
-..............
-:Release Date: 2018-11-15
+Version: 11.0.0 (aka Kohn)
+^^^^^^^^^^^^^^^^^^^^^^^^^^
-**New Features**
+.. csv-table:: Integration Changes
+ :file: ./files/csv/release-integration-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-The main changes for this release are the additional templates and
-other changes to support Use Cases such as HPA, vCPE, Scale-out,
-and TOSCA templates.
+Onaptests (pythonsdk_tests)
+...........................
-Robot Test Suites
------------------
+Main changes:
-Version: 1.5.4
+.. csv-table:: pythonsdk_tests Changes
+ :file: ./files/csv/release-pythonsdk-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-:Release Date: 2019-10-24
+Robot (Testsuite)
+.................
-**New Features**
+Version: 1.11.0
+^^^^^^^^^^^^^^^
-* bug Fixes(Teardown, control loop, alotteed properties)
-* Add repush Policy
-* CDS support
-* HV-VES SSL support
-* Add testsuite for VNF Lifecycle validation
-* Cleaning (remaining openecomp ref, ocata and lenovo healthcheck, unused or
- redundant variables and scripts)
+Main changes:
+.. csv-table:: Testsuite Changes
+ :file: ./files/csv/release-testsuite-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-Version: 1.4.1
-:Release Date: 2019-06-09
+O-Parent
+........
-**New Features**
+Version: 3.3.2
+^^^^^^^^^^^^^^
-* Update vFWCL use case test script
+.. csv-table:: Oparent Changes
+ :file: ./files/csv/release-oparent-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
+Demo Artifacts (Heat Templates)
+...............................
-Version: 1.4.0
+Version: 1.11.0
+^^^^^^^^^^^^^^^
-:Release Date: 2019-05-24
+.. csv-table:: Demo Changes
+ :file: ./files/csv/release-demo-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-**New Features**
+The demo artifacts are pushed to https://nexus.onap.org/content/repositories/releases/org/onap/demo/vnf
-* Update vFWCL use case closed loop policy
-* Fix vCPE use case test bugs
-* Support resource VL type in test script
-* Add test script for new use cases
-* Enhance existing use cases test script
-Version: 1.3.2
-..............
+Use Cases and Requirements
+--------------------------
-:Release Date: 2018-11-20
+See dedicated :ref:`Kohn Use Cases and requirements page <docs_usecases_release>`
-**New Features**
+Maturity Testing Notes
+----------------------
-* Fully automated vFW Closed Loop instantiation and testing
-* Instantiation of 5 new vCPE models
+:ref:`Maturity testing page <integration-s3p>`
+Open JIRAs/Known issues
+-----------------------
-Version: 1.3.1
-..............
+Integration
+...........
-:Release Date: 2018-11-14
+`Integration JIRA page <https://jira.onap.org/issues/?jql=project%20%3D%20Integration%20>`_
-**New Features**
+Testsuite
+.........
-* Additional health checks for new ONAP components in Casablanca
-* New ETE test suite to test Portal functionality
-* Various enhancements to improve stability of Robot tests
+`Testsuite JIRA page <https://jira.onap.org/issues/?jql=project%20%3D%20Test>`_
diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
new file mode 100644
index 000000000..097282b97
--- /dev/null
+++ b/docs/requirements-docs.txt
@@ -0,0 +1,8 @@
+sphinx>=4.2.0 # BSD
+sphinx-rtd-theme>=1.0.0 # MIT
+sphinxcontrib-blockdiag # BSD
+sphinxcontrib-seqdiag # BSD
+sphinxcontrib-swaggerdoc
+sphinxcontrib-spelling
+sphinxcontrib-plantuml
+six
diff --git a/docs/schema-update-apis.csv b/docs/schema-update-apis.csv
new file mode 100644
index 000000000..391615363
--- /dev/null
+++ b/docs/schema-update-apis.csv
@@ -0,0 +1,49 @@
+API,Service level workflow retrieving API,Service level workflow execution API
+Name,RetrieveServiceLevelWorkflow,ExecuteServiceLevelWorkflow
+Type,Get,Post
+URL,/onap/so/infra/workflowSpecifications/v1/workflows?resourceTarget=service,/onap/so/infra/instanceManagement/v1/serviceInstances/{serviceInstanceId}/workflows/{workflow_UUID}
+Request,"Headers: application/json
+
+Path parameters: resourceTarget=service
+
+Body={
+
+}
+
+
+
+","Headers: application/json
+
+Path parameters: serviceInstances; workflow_UUID
+
+Body={
+
+ ""modelInfo"":{ #targetServiceModelVersionId
+
+ ""modelType"":""service"",
+
+ ""modelInvariantUuid"":""fe41489e-1563-46a3-b90a-1db629e4375b"",
+
+ ""modelVersionId"" : ""cd4decf6-4f27-4775-9561-0e683ed43635"",
+
+ ""modelVersion"":""1.0""
+
+ }
+
+}"
+Response,"200​ – Successful retrieval of workflows
+
+400 - Bad Request
+
+500 - Internal Server Error
+
+
+
+
+","202​ - Request has been accepted for processing
+
+400 - Bad Request
+
+500 - Internal Server Error"
+,,
+,,
diff --git a/docs/simulators/nf_simulator.rst b/docs/simulators/nf_simulator.rst
new file mode 100644
index 000000000..64a6ac3b5
--- /dev/null
+++ b/docs/simulators/nf_simulator.rst
@@ -0,0 +1,148 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+.. International License. http://creativecommons.org/licenses/by/4.0
+
+.. _nf_simulator:
+
+:orphan:
+
+NF Simulator
+============
+
+Description
+-----------
+An idea behind NF (Network Function) Simulator is to introduce simulator, which supports ORAN defined O1 interface (reporting of NF events to Service Management Orchestrators).
+Within the use-case, it is expected, that an NF configuration change, happening due to multiple reasons (network mechanism triggered change - e.g. discovery of neighbours) is reported to the network management system, using ONAP`s VES REST events.
+The simulator is expected to cover planned NF behaviour - receive the config change via a NetConf protocol and report that change (also potentially other related changes) to the network management system using ONAP`s VES event.
+
+|image1|
+
+**Figure 1. Architecture Overview**
+
+1. NF Simulator code is stored in https://github.com/onap/integration-simulators-nf-simulator and all it's sub repos are:
+
+ - for VES Client - https://github.com/onap/integration-simulators-nf-simulator-ves-client
+ - for Netconf Server - https://github.com/onap/integration-simulators-nf-simulator-netconf-server
+ - for AVCN Manager - https://github.com/onap/integration-simulators-nf-simulator-avcn-manager
+ - for PM HTTPS Server - https://github.com/onap/integration-simulators-nf-simulator-pm-https-server
+
+2. For above components have been prepared:
+
+ - docker images in ONAP Nexus:
+
+ - VES Client image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.vesclient
+ - AVCN Manager image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.avcnmanager
+ - PM HTTPS Server image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver
+ - Netconf Server images - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.netconfserver
+
+ - example helm charts:
+
+ - `Helm charts <https://github.com/onap/integration-simulators-nf-simulator/tree/master/helm>`_
+
+**VES Client, Netconf Server and PM HTTPS Server can be used and deployed separately depending on needs.**
+
+Only AVCN Manger connects VES Client with Netconf Server in order to support O1 interface.
+
+1. VES Client:
+--------------
+
+1. Purpose of VES Client
+
+ Application that generates VES events on demand.
+
+2. What does it do?
+
+ - Support both basic auth and TLS CMPv2 method of authentication.
+ - Allow to turn on and turn off hostname, verification in SSL.
+ - Allow to send one-time event and periodic events, based on event templates.
+ - Expose API to manage VES Client
+ - Provide template mechanism (Template is a draft event. Merging event with patch will result in valid VES event.
+ Template itself should be a correct VES event as well as valid json object. )
+ - Patching - User is able to provide patch in request, which will be merged into template.
+ - Simulator support corresponding keywords in templates: RandomInteger(start,end), RandomPrimitiveInteger(start,end), RandomInteger,
+ RandomString(length), RandomString, Timestamp, TimestampPrimitive, Increment
+ - In place variables support - Simulator supports dynamic keywords e.g. #dN to automatically substitute selected phrases in defined json schema.
+ - Logging - Every start of simulator will generate new logs that can be found in docker ves-client container.
+ - Swagger - Detailed view of simulator REST API is available via Swagger UI
+ - History - User is able to view events history.
+
+2. Netconf Server:
+------------------
+
+1. Purpose of Netconf Server
+
+ This server uses sysrepo to simulate network configuration.
+ It is based on sysrepo-netopeer2 image.
+
+2. What does it do?
+
+ Server allows to:
+
+ - Install custom configuration models on start up.
+ - Change configuration of that modules on runtime.
+ - Use TLS custom certificates
+ - Configure change subscription for particular YANG modules (Netconf server image run python application on the startup.)
+ - Manage netconf server using REST interface, with endpoints:
+
+ - GET /healthcheck returns 200 "UP" if server is up and running
+ - POST /readiness return 200 "Ready" if server is ready, if not, returns 503 "Not Ready"
+ - POST /change_config/<path:module_name> changes configuration ad returns 202 "Accepted"
+ - GET /change_history returns 200 and change history as json
+ - GET /get_config/<path:module_name> returns 200 and current configuration
+
+3. AVCN Manager:
+----------------
+
+1. Purpose of AVCN Manager
+
+ Manager that fetch changes of configuration from kafka and sends them to VES client.
+
+2. What does it do?
+
+ The manager process notifications from NETCONF server. It does this by being a subscriber of a Kafka topic that is fed with NETCONF notifications. Incoming notifications are then processed and output of this processing is sent to VES client.
+
+4. PM HTTPS Server:
+-------------------
+
+1. Purpose of PM HTTPS Server
+
+ Server that is used in Bulk PM usecase over HTTPS
+
+2. What does it do?
+
+ - Support TLS (CMPv2) method of authentication (used during connection to Data File Collector)
+ - Allow to use custom certificates
+ - Expose REST API in order to manage PM files stored in HTTPS server
+
+
+Guides
+======
+
+User And Developer Guide
+------------------------
+1. User guides:
+
+ - `VES Client user guide. <https://github.com/onap/integration-simulators-nf-simulator-ves-client/blob/master/README.md>`_
+ - `AVCN Manager user guide. <https://github.com/onap/integration-simulators-nf-simulator-avcn-manager/blob/master/README.md>`_
+ - `PM HTTPS Server user guide. <https://github.com/onap/integration-simulators-nf-simulator-pm-https-server/blob/master/README.md>`_
+ - `Netconf Server user guide. <https://github.com/onap/integration-simulators-nf-simulator-netconf-server/blob/master/README.md>`_
+ - `Netconf Notification Application user guide. <https://github.com/onap/integration-simulators-nf-simulator-netconf-server/blob/master/src/python/README.md>`_
+ - `NF Simulator CLI user guide <https://github.com/onap/integration-simulators-nf-simulator/blob/master/simulator-cli/README.md>`_
+
+2. Jenkins builds:
+
+ - `VES Client jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-avcn-manager/>`_
+ - `AVCN Manager jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/>`_
+ - `PM HTTPS Server jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-pm-https-server/>`_
+ - `Netconf Server jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-ves-client/>`_
+
+3. NF Simulator CSIT test cases:
+
+ - `Project integration-simulators-nf-simulator-netconf-server-master-verify-csit-testsuites <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/job/integration-simulators-nf-simulator-netconf-server-master-verify-csit-testsuites/>`_
+ - `Project integration-simulators-nf-simulator-netconf-server-master-csit-testsuites <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/job/integration-simulators-nf-simulator-netconf-server-master-csit-testsuites/>`_
+
+4. NF Simulator sanity checks:
+
+ - https://github.com/onap/integration-simulators-nf-simulator/tree/master/sanitycheck
+
+.. |image1| image:: ../files/simulators/NF-Simulator.png
+ :width: 10in \ No newline at end of file
diff --git a/docs/tox.ini b/docs/tox.ini
new file mode 100644
index 000000000..46075fa6a
--- /dev/null
+++ b/docs/tox.ini
@@ -0,0 +1,31 @@
+[tox]
+minversion = 1.6
+envlist = docs,docs-linkcheck,docs-spellcheck
+skipsdist = true
+
+[testenv:docs]
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
+commands =
+ sphinx-build -W -q -b html -n -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/html
+
+[testenv:docs-linkcheck]
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
+commands =
+ sphinx-build -W -q -b linkcheck -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/linkcheck
+
+[testenv:docs-spellcheck]
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
+commands =
+ sphinx-build -W -q -b spelling -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/spellcheck
diff --git a/docs/usecases/deprecated_usecases.rst b/docs/usecases/deprecated_usecases.rst
new file mode 100644
index 000000000..6d82140db
--- /dev/null
+++ b/docs/usecases/deprecated_usecases.rst
@@ -0,0 +1,28 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _deprecated_usecases:
+
+:orphan:
+
+Deprecated Use Cases and Functional Requirements
+================================================
+
+Each ONAP release deals with lots of use cases and functional requirements.
+When possible, it is strongly recommended to automate the use cases.
+In this case Integration team can take over the maintenance part of the use case.
+If not automated, the use cases are fully under the responsibility of the use
+case team and usually valid for the release the team was involved in.
+However, these use cases, their artifacts remain in the repository.
+Anyone can give a try even if the use cases are no more supported.
+
+This section deals with such use cases.
+These use cases have been part of one release but have not been tested on the
+last releases. They might fully deprecated or usable through minor adaptations.
+The entry points are the use case owners.
+
+.. csv-table:: deprecated use case table
+ :file: ../files/csv/usecases-deprecated.csv
+ :widths: 50,20,10,20
+ :header-rows: 1
+ :delim: ;
diff --git a/docs/usecases/release_automated_usecases.rst b/docs/usecases/release_automated_usecases.rst
new file mode 100644
index 000000000..932a0d4f3
--- /dev/null
+++ b/docs/usecases/release_automated_usecases.rst
@@ -0,0 +1,37 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_automated_usecases:
+
+:orphan:
+
+Automated Use Cases
+-------------------
+
+These use cases have been run on the Daily CI chains and are used to
+validate the integration of any new dockers in OOM.
+New tests are indicated in **bold**.
+
+.. csv-table:: Infrastructure Healthcheck Tests
+ :file: ../files/csv/tests-infrastructure-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Healthcheck Tests
+ :file: ../files/csv/tests-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Smoke Tests
+ :file: ../files/csv/tests-smoke.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Security Tests
+ :file: ../files/csv/tests-security.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_non_functional_requirements.rst b/docs/usecases/release_non_functional_requirements.rst
new file mode 100644
index 000000000..b3f5a0b54
--- /dev/null
+++ b/docs/usecases/release_non_functional_requirements.rst
@@ -0,0 +1,15 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_non_functional_requirements:
+
+:orphan:
+
+Non Functional Requirements
+----------------------------
+
+.. csv-table:: non functional requirements table
+ :file: ../files/csv/usecases-non-functional-requirements.csv
+ :widths: 5,45,12,38
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_requirements.rst b/docs/usecases/release_requirements.rst
new file mode 100644
index 000000000..0ec4b1b95
--- /dev/null
+++ b/docs/usecases/release_requirements.rst
@@ -0,0 +1,15 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_requirements:
+
+:orphan:
+
+Functional Requirements
+-----------------------
+
+.. csv-table:: functional requirements table
+ :file: ../files/csv/usecases-functional-requirements.csv
+ :widths: 5,45,12,38
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_usecases.rst b/docs/usecases/release_usecases.rst
new file mode 100644
index 000000000..749c4483a
--- /dev/null
+++ b/docs/usecases/release_usecases.rst
@@ -0,0 +1,37 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_usecases:
+
+:orphan:
+
+Kohn Use Cases
+==============
+
+Description
+-----------
+
+This session includes use cases and functional requirements which have been
+officially verified in Kohn release by the ONAP community.
+
+For each use case or functional requirement, you can find contact names and a
+link to the associated documentation.
+
+This documentation deals with
+
+ 1. What has been implemented
+ 2. Step by step instructions to deploy and execute the tests, including the
+ links to download the related assets and resources
+ 3. Known issues and workarounds
+
+Use cases
+---------
+
+Kohn Official Use Cases
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. csv-table::
+ :file: ../files/csv/usecases.csv
+ :widths: 10,40,20,30
+ :delim: ;
+ :header-rows: 1
diff --git a/documentation/api-dependencies/README.md b/documentation/api-dependencies/README.md
index f82ddcf41..342539823 100644
--- a/documentation/api-dependencies/README.md
+++ b/documentation/api-dependencies/README.md
@@ -1,3 +1 @@
-
-This directory contains the documentation for API dependencies between
-ONAP projects.
+This directory contains the documentation for API dependencies between ONAP projects.
diff --git a/pipelines/docker-onap-k8s-toolbox/Dockerfile b/pipelines/docker-onap-k8s-toolbox/Dockerfile
new file mode 100644
index 000000000..93c30bff6
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/Dockerfile
@@ -0,0 +1,20 @@
+FROM ubuntu:jammy
+
+ARG KUBECTL_VERSION="v1.28.4"
+ARG KUBECTL_URL="https://dl.k8s.io/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl"
+
+ARG HELM_VERSION="v3.10.3"
+ARG HELM_ARCHIVE="helm-${HELM_VERSION}-linux-amd64.tar.gz"
+ARG HELM_URL="https://get.helm.sh/${HELM_ARCHIVE}"
+
+WORKDIR /root
+
+RUN apt-get update && apt-get install -y git curl make \
+ && curl -L $KUBECTL_URL -o /usr/local/bin/kubectl \
+ && chmod +x /usr/local/bin/kubectl \
+ && curl -L $HELM_URL -o $HELM_ARCHIVE \
+ && tar -zxf ${HELM_ARCHIVE} \
+ && mv linux-amd64/helm /usr/local/bin/helm \
+ && rm -rf linux-amd64 $HELM_ARCHIVE
+
+CMD ["/bin/bash"] \ No newline at end of file
diff --git a/pipelines/docker-onap-k8s-toolbox/README.md b/pipelines/docker-onap-k8s-toolbox/README.md
new file mode 100644
index 000000000..8b4714551
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/README.md
@@ -0,0 +1,4 @@
+# ONAP k8s toolbox
+
+ONAP k8s toolbox is a simple docker image designed to run gating jobs of building and deploying ONAP
+in a managed cluster.
diff --git a/pipelines/docker-onap-k8s-toolbox/container-tag.yaml b/pipelines/docker-onap-k8s-toolbox/container-tag.yaml
new file mode 100644
index 000000000..e0bf6aaf3
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/container-tag.yaml
@@ -0,0 +1 @@
+tag: "1.1.0"
diff --git a/ptl/edit_committers_info/README.md b/ptl/edit_committers_info/README.md
new file mode 100644
index 000000000..271550aff
--- /dev/null
+++ b/ptl/edit_committers_info/README.md
@@ -0,0 +1,73 @@
+# Edit your repositories INFO.yaml quickly!
+
+Using that tool it's possible to edit as many INFO.yaml files as you wish. It's not needed to execute the same operations for each of the repository you maintain.
+
+Nowadays only the committer deletion action is available, but addition option should be added soon.
+
+## Requirements
+
+### System requirements
+
+Python 3.10 version is needed to run that tool.
+
+### Virtual environment
+
+It's recommended to create a virtual environment to install all dependencies. Create a virtual env using below command
+
+```
+$ python3.10 -m venv .virtualenv
+```
+
+Virtual environment will be created under `.virtualenv` directory.
+To activate virtual environemnt call
+
+```
+$ source .virtualenv/bin/activate
+```
+
+### Python requirements
+
+- [click](https://click.palletsprojects.com/en/8.0.x/)
+- [GitPython](https://gitpython.readthedocs.io/en/stable/index.html)
+- [ruamel.yaml](https://yaml.readthedocs.io/en/latest/)
+
+Install Python requirements calling
+
+```
+$ pip install -r requirements.txt
+```
+
+## Usage
+
+You need to create a `config` YAML file where you describe what changes you want to perform.
+Required YAML config structure:
+
+```
+---
+repos: # List of the repositories which are going to be udated.
+ # That tool is not smart enough to resolve some conflicts etc.
+ # Please be sure that it would be possible to push the change to the gerrit.
+ # Remember that commit-msg hook should be executed so add that script into .git/hooks dir
+ - path: abs_path_to_the_repo # Local path to the repository
+ branch: master # Branch which needs to be udated
+committers: # List of the committers which are going to be edited
+ - name: Committer Name # The name of the committer which we would delete or add
+ action: Deletion|Addition # Addition or deletion action
+ link: https://link.to.the.tcs.confirmation # Link to the ONAP TSC action confirmation
+commit: # Configure the commit message
+ message: # List of the commit message lines. That's optional
+ - "[INTEGRATION] My awesome first line!"
+ - "Even better second one!"
+ issue_id: INT-2008 # ONAP's JIRA Issue ID is required in the commit message
+```
+
+## Contribute
+
+- Create ONAP Jira ticket
+- Edit the code
+- Check the linters
+ - install tox
+ `$ pip install tox`
+ - call linters
+ `$ tox .`
+ - if no errors: push the change
diff --git a/ptl/edit_committers_info/edit_committers_list.py b/ptl/edit_committers_info/edit_committers_list.py
new file mode 100644
index 000000000..8ed97b6c8
--- /dev/null
+++ b/ptl/edit_committers_info/edit_committers_list.py
@@ -0,0 +1,588 @@
+"""Automate the INFO.yaml update."""
+"""
+ Copyright 2021 Deutsche Telekom AG
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+"""
+
+from enum import Enum
+from itertools import chain, zip_longest
+from pathlib import Path
+from typing import Dict, Iterator, List, Optional, Tuple
+
+import click
+import git
+from ruamel.yaml import YAML
+from ruamel.yaml.scalarstring import SingleQuotedScalarString
+
+
+class CommitterActions(Enum):
+ """Committer Actions enum.
+
+ Available actions:
+ * Addition - will add the commiter with their info into
+ the committers list and the tsc information would be added
+ * Deletion - commiter will be deleted from the committers list
+ and the tsc information would be added
+
+ """
+
+ ADDITION = "Addition"
+ DELETION = "Deletion"
+
+
+class CommitterChange:
+ """Class representing the change on the committers list which needs to be done."""
+
+ def __init__(
+ self,
+ name: str,
+ action: CommitterActions,
+ link: str,
+ email: str = "",
+ company: str = "",
+ committer_id: str = "",
+ timezone: str = "",
+ ) -> None:
+ """Initialize the change object.
+
+ Args:
+ name (str): Committer name
+ action (CommitterActions): Action to be done
+ link (str): Link to the TSC confirmation
+ email (str, optional): Committer's e-mail. Needed only for addition.
+ Defaults to "".
+ company (str, optional): Committer's company name. Needed only for addition.
+ Defaults to "".
+ committer_id (str, optional): Committer's LF ID. Needed only for addition.
+ Defaults to "".
+ timezone (str, optional): Committer's timezone. Needed only for addition.
+ Defaults to "".
+
+ """
+ self._committer_name: str = name
+ self._action: CommitterActions = action
+ self._link: str = link
+ self._email: str = email
+ self._company: str = company
+ self._commiter_id: str = committer_id
+ self._timezone: str = timezone
+
+ @property
+ def action(self) -> CommitterActions:
+ """Enum representing an action which is going to be done by the change.
+
+ Returns:
+ CommitterActions: One of the CommittersActions enum value.
+
+ """
+ return self._action
+
+ @property
+ def committer_name(self) -> str:
+ """Committer name property.
+
+ Returns:
+ str: Name provided during the initialization.
+
+ """
+ return self._committer_name
+
+ @property
+ def email(self) -> str:
+ """Committer email property.
+
+ Returns:
+ str: Email provided during initialization.
+
+ """
+ return self._email
+
+ @property
+ def company(self) -> str:
+ """Committer company property.
+
+ Returns:
+ str: Company name provided during initialization
+
+ """
+ return self._company
+
+ @property
+ def committer_id(self) -> str:
+ """Committer id property.
+
+ Returns:
+ str: Committer ID provided during initialization
+
+ """
+ return self._commiter_id
+
+ @property
+ def timezone(self) -> str:
+ """Committer timezone property.
+
+ Returns:
+ str: Committer timezone provided during initialization
+
+ """
+ return self._timezone
+
+ @property
+ def addition_change(self) -> Dict[str, str]:
+ """Addition change property.
+
+ Returns:
+ Dict[str, str]: Values which are going to be added into committers section
+
+ """
+ return {
+ "name": self.committer_name,
+ "email": self.email,
+ "company": self.company,
+ "id": self.committer_id,
+ "timezone": self.timezone,
+ }
+
+
+class TscChange:
+ """TSC section change class."""
+
+ def __init__(self, action: CommitterActions, link: str) -> None:
+ """Initialize tsc change class instance.
+
+ Args:
+ action (CommitterActions): TSC section change action.
+ link (str): Link to the TSC confirmation
+
+ """
+ self._action: CommitterActions = action
+ self._link: str = link
+ self._names: List[str] = []
+
+ def add_name(self, name: str) -> None:
+ """Add committer name into tsc change.
+
+ For both actions: deletion and addition there is an option to add multiple names
+ for each. That method adds name into the list which will be used then.
+
+ Args:
+ name (str): Committer name to be added to the list of the names in tsc section change.
+ """
+ self._names.append(name)
+
+ @property
+ def tsc_change(self) -> Dict[str, str]:
+ """Tsc section change property.
+
+ Returns:
+ Dict[str, str]: Dictionary with values to be added into TSC section.
+
+ """
+ return {
+ "type": self._action.value,
+ "name": ", ".join(self._names),
+ "link": self._link,
+ }
+
+
+class YamlConfig:
+ """YAML config class which corresponds the configuration YAML file needed to be provided by the user.
+
+ Required YAML config structure:
+
+ ---
+ repos: # List of the repositories which are going to be udated.
+ # That tool is not smart enough to resolve some conflicts etc.
+ # Please be sure that it would be possible to push the change to the gerrit.
+ # Remember that commit-msg hook should be executed so add that script into .git/hooks dir
+ - path: abs_path_to_the_repo # Local path to the repository
+ branch: master # Branch which needs to be udated
+ committers: # List of the committers which are going to be edited
+ - name: Committer Name # The name of the committer which we would delete or add
+ action: Deletion|Addition # Addition or deletion action
+ link: https://link.to.the.tcs.confirmation # Link to the ONAP TSC action confirmation
+ commit: # Configure the commit message
+ message: # List of the commit message lines. That's optional
+ - "[INTEGRATION] My awesome first line!"
+ - "Even better second one!"
+ issue_id: INT-2008 # ONAP's JIRA Issue ID is required in the commit message
+ """
+
+ def __init__(self, yaml_file_path: Path) -> None:
+ """Initialize yaml config object.
+
+ Args:
+ yaml_file_path (Path): Path to the config file provided by the user
+
+ """
+ with yaml_file_path.open("r") as yaml_file:
+ self._yaml = YAML().load(yaml_file.read())
+
+ @property
+ def repos_data(self) -> Iterator[Tuple[Path, str]]:
+ """Repositories information iterator.
+
+ Returns the generator with the tuples on which:
+ * first element is a path to the repo
+ * second element is a branch name which
+ is going to be used to prepare a change
+ and later push into
+
+ Yields:
+ Iterator[Tuple[Path, str]]: Tuples of repository data: repo local abs path and branch name
+
+ """
+ for repo_info in self._yaml["repos"]:
+ yield (Path(repo_info["path"]), repo_info["branch"])
+
+ @property
+ def committers_changes(self) -> Iterator[CommitterChange]:
+ """Committer changes iterator.
+
+ Returns the generator with `CommitterChange` class instances
+
+ Yields:
+ Iterator[CommitterChange]: Committer changes generator
+
+ """
+ for committer_change in self._yaml["committers"]:
+ # Start ignoring PyLintBear
+ match action := CommitterActions(committer_change["action"]):
+ case CommitterActions.ADDITION:
+ yield CommitterChange(
+ name=committer_change["name"],
+ action=action,
+ link=committer_change["link"],
+ email=committer_change["email"],
+ company=committer_change["company"],
+ committer_id=committer_change["id"],
+ timezone=committer_change["timezone"],
+ )
+ case CommitterActions.DELETION:
+ yield CommitterChange(
+ name=committer_change["name"],
+ action=action,
+ link=committer_change["link"],
+ )
+ # Stop ignoring
+
+ @property
+ def tsc_changes(self) -> Iterator[TscChange]:
+ """Iterate through tsc section changes.
+
+ Instead of create TSC for every committers change that method
+ groups them.
+
+ Yields:
+ Iterator[TscChange]: TSC section change which is going to be added into INFO.yaml file
+
+ """
+ deletion_tsc_change: Optional[TscChange] = None
+ addition_tsc_change: Optional[TscChange] = None
+ for committer_change in self._yaml["committers"]:
+ # Start ignoring PyLintBear
+ match action := CommitterActions(committer_change["action"]):
+ case CommitterActions.ADDITION:
+ if not addition_tsc_change:
+ addition_tsc_change = TscChange(
+ action, committer_change["link"]
+ )
+ addition_tsc_change.add_name(committer_change["name"])
+ case CommitterActions.DELETION:
+ if not deletion_tsc_change:
+ deletion_tsc_change = TscChange(
+ action, committer_change["link"]
+ )
+ deletion_tsc_change.add_name(committer_change["name"])
+ # Stop ignoring
+ return (
+ change for change in [deletion_tsc_change, addition_tsc_change] if change
+ )
+
+ @property
+ def issue_id(self) -> str:
+ """Onap's Jira issue id.
+
+ That issue id would be used in the commit message.
+
+ Returns:
+ str: ONAP's Jira issue ID
+
+ """
+ return self._yaml["commit"]["issue_id"]
+
+ @property
+ def commit_msg(self) -> Optional[List[str]]:
+ """Commit message lines list.
+
+ Optional, if user didn't provide it in the config file
+ it will returns None
+
+ Returns:
+ Optional[List[str]]: List of the commit message lines or None
+
+ """
+ return self._yaml["commit"].get("message")
+
+
+class OnapRepo:
+ """ONAP repo class."""
+
+ def __init__(self, git_repo_path: Path, git_repo_branch: str) -> None:
+ """Initialize the Onap repo class object.
+
+ During that method an attempt will be made to change the branch to the one specified by the user.
+
+ Args:
+ git_repo_path (Path): Repository local abstract path
+ git_repo_branch (str): Branch name
+
+ Raises:
+ ValueError: Branch provided by the user doesn't exist
+
+ """
+ self._repo: git.Repo = git.Repo(git_repo_path)
+ self._branch: str = git_repo_branch
+ if self._repo.head.ref.name != self._branch:
+ for branch in self._repo.branches:
+ if branch.name == self._branch:
+ branch.checkout()
+ break
+ else:
+ raise ValueError(
+ f"Branch {self._branch} doesn't exist in {self._repo.working_dir} repo"
+ )
+
+ @property
+ def git(self) -> git.Repo:
+ """Git repository object.
+
+ Returns:
+ git.Repo: Repository object.
+
+ """
+ return self._repo
+
+ @property
+ def info_file_path_abs(self) -> Path:
+ """Absolute path to the repositories INFO.yaml file.
+
+ Concanenated repository working tree directory and INFO.yaml
+
+ Returns:
+ Path: Repositories INFO.yaml file abs path
+
+ """
+ return Path(self._repo.working_tree_dir, "INFO.yaml")
+
+ def push_the_change(self, issue_id: str, commit_msg: List[str] = None) -> None:
+ """Push the change to the repository.
+
+ INFO.yaml file will be added to index and then the commit message has to be created.
+ If used doesn't provide commit message in the config file the default one will be used.
+ Commit command will look:
+ `git commit -m <First line> -m <Second line> ... -m <Last line> -m Issue-ID: <issue ID> -s`
+ And push command:
+ `git push origin HEAD:refs/for/<branch defined by user>`
+
+ Args:
+ issue_id (str): ONAP's Jira issue ID
+ commit_msg (List[str], optional): Commit message lines. Defaults to None.
+
+ """
+ index = self.git.index
+ index.add(["INFO.yaml"])
+ if not commit_msg:
+ commit_msg = ["Edit INFO.yaml file."]
+ commit_msg_with_m = list(
+ chain.from_iterable(zip_longest([], commit_msg, fillvalue="-m"))
+ )
+ self.git.git.execute(
+ [
+ "git",
+ "commit",
+ *commit_msg_with_m,
+ "-m",
+ "That change was done by automated integration tool to maintain commiters list in INFO.yaml",
+ "-m",
+ f"Issue-ID: {issue_id}",
+ "-s",
+ ]
+ )
+ self.git.git.execute(["git", "push", "origin", f"HEAD:refs/for/{self._branch}"])
+ print(f"Pushed successfully to {self._repo} respository")
+
+
+class InfoYamlLoader(YAML):
+ """Yaml loader class.
+
+ Contains the options which are same as used in the INFO.yaml file.
+ After making changes and save INFO.yaml file would have same format as before.
+ Several options are set:
+ * indent - 4
+ * sequence dash indent - 4
+ * sequence item indent - 6
+ * explicit start (triple dashes at the file beginning '---')
+ * preserve quotes - keep the quotes for all strings loaded from the file.
+ It doesn't mean that all new strings would also have quotas.
+ To make new strings be stored with quotas ruamel.yaml.scalarstring.SingleQuotedScalarString
+ class needs to be used.
+ """
+
+ def __init__(self, *args, **kwargs) -> None:
+ """Initialize loader object."""
+ super().__init__(*args, **kwargs)
+ self.preserve_quotes = True
+ self.indent = 4
+ self.sequence_dash_offset = 4
+ self.sequence_indent = 6
+ self.explicit_start = True
+
+
+class InfoYamlFile:
+ """Class to store information about INFO.yaml file.
+
+ It's context manager class, so it's possible to use it by
+ ```
+ with InfoTamlFile(Path(...)) as info_file:
+ ...
+ ```
+ It's recommended because at the end all changes are going to be
+ saved on the same path as provided by the user (INFO.yaml will
+ be overrited)
+
+ """
+
+ def __init__(self, info_yaml_file_path: Path) -> None:
+ """Initialize the object.
+
+ Args:
+ info_yaml_file_path (Path): Path to the INFO.yaml file
+
+ """
+ self._info_yaml_file_path: Path = info_yaml_file_path
+ self._yml = InfoYamlLoader()
+ with info_yaml_file_path.open("r") as info:
+ self._info = self._yml.load(info.read())
+
+ def __enter__(self):
+ """Enter context manager."""
+ return self
+
+ def __exit__(self, *_):
+ """Exit context manager.
+
+ File is going to be saved now.
+
+ """
+ with self._info_yaml_file_path.open("w") as info:
+ self._yml.dump(self._info, info)
+
+ def perform_committer_change(self, committer_change: CommitterChange) -> None:
+ """Perform the committer change action.
+
+ Depends on the action change the right method is going to be executed:
+ * delete_committer for Deletion.
+ For the addition action ValueError exception is going to be raised as
+ it's not supported yet
+
+ Args:
+ committer_change (CommitterChange): Committer change object
+
+ Raises:
+ ValueError: Addition action called - not supported yet
+
+ """
+ match committer_change.action:
+ case CommitterActions.ADDITION:
+ self.add_committer(committer_change)
+ case CommitterActions.DELETION:
+ self.delete_committer(committer_change.committer_name)
+ # self.add_tsc_change(committer_change)
+
+ def delete_committer(self, name: str) -> None:
+ """Delete commiter action execution.
+
+ Based on the name commiter is going to be removed from the INFO.yaml 'committers' section.
+
+ Args:
+ name (str): Committer name to delete.
+
+ Raises:
+ ValueError: Committer not found on the list
+
+ """
+ for index, committer in enumerate(self._info["committers"]):
+ if committer["name"] == name:
+ del self._info["committers"][index]
+ return
+ raise ValueError(f"Committer {name} is not on the committer list")
+
+ def add_committer(self, commiter_change: CommitterChange) -> None:
+ """Add committer action.
+
+ All provided data are going to be formatted properly and added into INFO.yaml file 'committers' section.
+
+ Args:
+ commiter_change (CommitterChange): Change to be added
+
+ """
+ self._info["committers"].append(
+ {
+ key: SingleQuotedScalarString(value)
+ for key, value in commiter_change.addition_change.items()
+ }
+ )
+
+ def add_tsc_change(self, tsc_change: TscChange) -> None:
+ """Add Technical Steering Committee entry.
+
+ All actions need to be confirmed by the TSC. That entry proves that
+ TSC was informed and approved the change.
+
+ Args:
+ committer_change (CommitterChange): Committer change object.
+
+ """
+ self._info["tsc"]["changes"].append(
+ {
+ key: SingleQuotedScalarString(value)
+ for key, value in tsc_change.tsc_change.items()
+ }
+ )
+
+
+@click.command()
+@click.option(
+ "--changes_yaml_file_path",
+ "changes_yaml_file_path",
+ required=True,
+ type=click.Path(exists=True),
+ help="Path to the file where chages are described",
+)
+def update_infos(changes_yaml_file_path):
+ """Run the tool."""
+ yaml_config = YamlConfig(Path(changes_yaml_file_path))
+ for repo, branch in yaml_config.repos_data:
+ onap_repo = OnapRepo(repo, branch)
+ with InfoYamlFile(onap_repo.info_file_path_abs) as info:
+ for committer_change in yaml_config.committers_changes:
+ info.perform_committer_change(committer_change)
+ for tsc_change in yaml_config.tsc_changes:
+ info.add_tsc_change(tsc_change)
+ onap_repo.push_the_change(yaml_config.issue_id, yaml_config.commit_msg)
+
+
+if __name__ == "__main__":
+ update_infos()
diff --git a/ptl/edit_committers_info/requirements.txt b/ptl/edit_committers_info/requirements.txt
new file mode 100644
index 000000000..466b954bc
--- /dev/null
+++ b/ptl/edit_committers_info/requirements.txt
@@ -0,0 +1,3 @@
+click==8.0.3
+GitPython==3.1.24
+ruamel.yaml==0.17.17 \ No newline at end of file
diff --git a/ptl/edit_committers_info/tox.ini b/ptl/edit_committers_info/tox.ini
new file mode 100644
index 000000000..5674a5caa
--- /dev/null
+++ b/ptl/edit_committers_info/tox.ini
@@ -0,0 +1,17 @@
+[tox]
+envlist = black,mypy,docstyle
+skipsdist = True
+
+[testenv:black]
+deps = black
+commands = black --check edit_committers_list.py --target-version py310
+
+[testenv:mypy]
+deps =
+ mypy
+ -rrequirements.txt
+commands = mypy --strict edit_committers_list.py
+
+[testenv:docstyle]
+deps = pydocstyle
+commands = pydocstyle edit_committers_list.py
diff --git a/test/README.md b/test/README.md
index 3fee9a34f..b05f768fe 100644
--- a/test/README.md
+++ b/test/README.md
@@ -1,9 +1,7 @@
-
# ONAP Integration - Test
## Description
-* Code and tools for automatic system testing and continuous integration test flows across ONAP projects
-* Common guidelines, templates, and best practices to help project developers to write unit and system test code
-* Framework and tools for security testing
-
+- Code and tools for automatic system testing and continuous integration test flows across ONAP projects
+- Common guidelines, templates, and best practices to help project developers to write unit and system test code
+- Framework and tools for security testing
diff --git a/test/csit/README.md b/test/csit/README.md
deleted file mode 100644
index 912c4c38b..000000000
--- a/test/csit/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-# CSIT Scripts - Deprecated
-All CSIT test suites have been relocated to the "integration/csit" repo.
diff --git a/test/ete/scripts/probe-onap.py b/test/ete/scripts/probe-onap.py
index c41b97b0a..2ab849387 100644
--- a/test/ete/scripts/probe-onap.py
+++ b/test/ete/scripts/probe-onap.py
@@ -189,7 +189,7 @@ class Pod(object):
return self.shas_images.values()
def __cmp__(self, other):
- return cmp(self.name, other.name)
+ return cmp(self.name, other.name) # pylint: disable=E0602
def __str__(self):
desc = self.name + "\n"
diff --git a/test/hpa_automation/heat/README.md b/test/hpa_automation/heat/README.md
index 404ddbea1..75a96e121 100644
--- a/test/hpa_automation/heat/README.md
+++ b/test/hpa_automation/heat/README.md
@@ -4,26 +4,26 @@ These guide describes how to run the hpa_automation.py script. It can be used to
## Prerequisites
- - Install ONAP CLI. See [link](https://onap.readthedocs.io/en/dublin/submodules/cli.git/docs/installation_guide.html)
- - Install python mysql.connector (pip install mysql-connector-python)
- - Must have connectivity to the ONAP, a k8s vm already running is recommended as connectivity to the ONAP k8s network is required for the SDC onboarding section.
- - Create policies for homing using the temp_resource_module_name specified in hpa_automation_config.json. Sample policies can be seen in the sample_vfw_policies directory. Be sure to specify the right path to the directory in hpa_automation_config.json, only policies should exist in the directory
- - Create Nodeport for Policy pdp using the pdp_service_expose.yaml file (copy pdp_service_expose.yaml in hpa_automation/heat to rancher and run kubectl apply -f pdp_expose.yaml)
- - Put in the CSAR file to be used to create service models and specify its path in hpa_automation_config.json
- - Modify the SO bpmn configmap to change the SO vnf adapter endpoint to v2. See step 4 [here](https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/docs_vfwHPA.html#docs-vfw-hpa)
- - Prepare sdnc_preload file and put in the right path to its location in hpa_automation_config.json
- - Put in the right parameters for automation in hpa_automation_config.json
- - Ensure the insert_policy_models_heat.py script is in the same location as the hpa_automation.py script as the automation scripts calls the insert_policy_models_heat.py script.
+- Install ONAP CLI. See [link](https://onap.readthedocs.io/en/dublin/submodules/cli.git/docs/installation_guide.html)
+- Install python mysql.connector (pip install mysql-connector-python)
+- Must have connectivity to the ONAP, a k8s vm already running is recommended as connectivity to the ONAP k8s network is required for the SDC onboarding section.
+- Create policies for homing using the temp_resource_module_name specified in hpa_automation_config.json. Sample policies can be seen in the sample_vfw_policies directory. Be sure to specify the right path to the directory in hpa_automation_config.json, only policies should exist in the directory
+- Create Nodeport for Policy pdp using the pdp_service_expose.yaml file (copy pdp_service_expose.yaml in hpa_automation/heat to rancher and run kubectl apply -f pdp_expose.yaml)
+- Put in the CSAR file to be used to create service models and specify its path in hpa_automation_config.json
+- Modify the SO bpmn configmap to change the SO vnf adapter endpoint to v2. See step 4 [here](https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/docs_vfwHPA.html#docs-vfw-hpa)
+- Prepare sdnc_preload file and put in the right path to its location in hpa_automation_config.json
+- Put in the right parameters for automation in hpa_automation_config.json
+- Ensure the insert_policy_models_heat.py script is in the same location as the hpa_automation.py script as the automation scripts calls the insert_policy_models_heat.py script.
**Points to Note:**
- - The hpa_automation.py runs end to end. It does the following;
- - Create cloud complex
- - Register cloud regions
- - Create service type
- - Create customer and adds customer subscription
- - SDC Onboarding (Create VLM, VSP, VF Model, and service model)
- - Upload policy models and adds policies
- - Create Service Instance and VNF Instance
- - SDNC Preload and Creates VF module
-
- - There are well named functions that do the above items every time the script is run. If you do not wish to run any part of that, you can go into the script and comment out the section at the bottom that handles that portion.
+
+1. The hpa_automation.py runs end to end. It does the following;
+ - Create cloud complex
+ - Register cloud regions
+ - Create service type
+ - Create customer and adds customer subscription
+ - SDC Onboarding (Create VLM, VSP, VF Model, and service model)
+ - Upload policy models and adds policies
+ - Create Service Instance and VNF Instance
+ - SDNC Preload and Creates VF module
+2. There are well named functions that do the above items every time the script is run. If you do not wish to run any part of that, you can go into the script and comment out the section at the bottom that handles that portion.
diff --git a/test/hpa_automation/heat/hpa_automation.py b/test/hpa_automation/heat/hpa_automation.py
index ab1d8d07f..5f90a3be3 100755
--- a/test/hpa_automation/heat/hpa_automation.py
+++ b/test/hpa_automation/heat/hpa_automation.py
@@ -463,25 +463,25 @@ create_customer(parameters)
add_customer_subscription(parameters)
vlm_output = create_vlm(parameters)
-print "vlm parameters={}".format(vlm_output)
+print("vlm parameters={}".format(vlm_output))
vsp_id = create_vsp(parameters, vlm_output)
-print "vsp id={}".format(vsp_id)
+print("vsp id={}".format(vsp_id))
vf_model_dict = create_vf_model(parameters, vsp_id)
-print "vf model parameters={}".format(vf_model_dict)
+print("vf model parameters={}".format(vf_model_dict))
vf_id = vf_model_dict["vf_id"]
vf_unique_id = vf_model_dict["vf_unique_id"]
service_model_list = create_service_model(parameters, vf_unique_id)
-print "service model parameters={}".format(service_model_list)
+print("service model parameters={}".format(service_model_list))
upload_policy_models(parameters)
add_policies(parameters)
#Create Service Instance
service_dict = create_service_instance(parameters, service_model_list)
-print "service instance parameters={}".format(service_dict)
+print("service instance parameters={}".format(service_dict))
service_model_uuid = service_dict["service_uuid"]
time.sleep(2)
db_dict = query_db(parameters, service_model_uuid, vf_id)
@@ -492,16 +492,16 @@ while True:
check_service_instance = os.popen("oclip service-instance-list -u {} -p {} -m {} |grep {}-{}".format(parameters["aai_username"], \
parameters["aai_password"], parameters["aai_url"], parameters["instance-name"], parameters["service_name"])).read()
if check_service_instance:
- print "service instance created successfully"
+ print("service instance created successfully")
#Create VNF Instance
vnf_dict = create_vnf(parameters, service_dict, db_dict, vf_model_dict)
time.sleep(10)
- print "vnf instance parameters={}".format(vnf_dict)
+ print("vnf instance parameters={}".format(vnf_dict))
break
- print "service instance create in progress"
+ print("service instance create in progress")
time.sleep(30)
#Preload VF module and create VF module
sdnc_preload(parameters, db_dict, service_dict)
create_vf_module(parameters, service_dict, vnf_dict, db_dict)
-print "Deployment complete!!!, check cloud to confirm that vf module has been created"
+print("Deployment complete!!!, check cloud to confirm that vf module has been created")
diff --git a/test/hpa_automation/heat/hpa_automation_config.json b/test/hpa_automation/heat/hpa_automation_config.json
index 9587ffd42..3142557fa 100755
--- a/test/hpa_automation/heat/hpa_automation_config.json
+++ b/test/hpa_automation/heat/hpa_automation_config.json
@@ -1,192 +1,165 @@
{
-
-"open_cli_product" : "onap-elalto",
-"open_cli_home" : "/opt/oclip",
-"aai_url" : "https://10.12.5.110:30233",
-"aai_username" : "AAI",
-"aai_password" : "AAI",
-"sdc_onboarding_url" : "https://10.12.5.110:30207/sdc1/feProxy",
-"sdc_catalog_url" : "https://10.12.5.110:30204",
-"sdc_password" : "demo123456!",
-"sdc_creator" : "cs0008",
-"sdc_tester" : "jm0007",
-"sdc_governor" : "gv0001",
-"sdc_operator" : "op0001",
-"csar-file-path" : "/root/vfw_hpa.zip",
-"sdnc_url" : "https://10.12.5.110:30267",
-"sdnc_user" : "admin",
-"sdnc_password" : "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U",
-"sdnc_preload_file" : "/root/integration/test/hpa_automation/heat/sample_vfw_hpa_preload.txt",
-"so_url" : "http://10.12.5.110:30277",
-"so_username" : "InfraPortalClient",
-"so_password" : "password1$",
-"mariadb_host" : "10.42.8.3",
-"so_mariadb_user" : "root",
-"so_mariadb_password" : "secretpassword",
-"so_mariadb_db" : "catalogdb",
-"multicloud_url" : "http://10.12.5.110:30280",
-"policy_url" : "https://10.12.5.110:30694",
-"policy_username" : "testpdp",
-"policy_password" : "alpha123",
-"policy_directory" : "/root/integration/test/hpa_automation/heat/sample_vfw_policies",
-"policy_scope" : "OSDF_DUBLIN",
-"policy_onapName" : "SampleDemo",
-"policy_config_type": "MicroService",
-"policy_pdp_group" : "default",
-"policy_db_ip" : "10.42.10.14",
-"policy_db_user" : "root",
-"policy_db_password" : "secret",
-"//" : "Put in a temp resource module name, should be the same in policy files, script will replace it in policies",
-"temp_resource_module_name" : "resource_name",
-
-"//" : "#Parameters required to create cloud complex",
-"complex_name" : "clli1",
-"street1" : "street1",
-"street2" : "street2",
-"physical_location" : "phy_type",
-"data_center_code" : "code1",
-"latitude" : "32.89948",
-"longitude" : "97.045443",
-"lata" : "example-lata-val-28399",
-"elevation" : "example-elevation-val-28399",
-"region" : "northwest",
-"state" : "oregon",
-"city" : "hillsboro",
-"postal-code" : "00000",
-"country" : "USA",
-"identity_url" : "example-identity-url-val-56898",
-
-"//" : "#Dictionary containing cloud regions and their Parameters",
-
-
- "cloud_region_data":{
- "ONAP-POD-01-Rail-04":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"8c85ce1f-aa78-45bf-8d6f-4b62784e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"PUT IN OPENSTACK USERNAME",
- "password":"PUT IN OPENSTACK PASSWORD",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
- "ONAP-POD-01-Rail-05":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-aa78-44bf-8d6f-4b62784e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"PUT IN OPENSTACK USERNAME",
- "password":"PUT IN OPENSTACK PASSWORD",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
-
- "ONAP-POD-01-Rail-06":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-ab77-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"PUT IN OPENSTACK USERNAME",
- "password":"PUT IN OPENSTACK PASSWORD",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
-
- "ONAP-POD-01-Rail-07":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"PUT IN OPENSTACK USERNAME",
- "password":"PUT IN OPENSTACK PASSWORD",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- }
- },
-
-
-"//" : "#Parameters to register cloud region",
-"cloud-owner" : "CloudOwner",
-"owner-defined-type" : "t1",
-"cloud-zone" : "CloudZone",
-
-"service_name" : "vFW",
-"customer_name" : "hpa_cust",
-"subscriber_name" : "hpa_cust",
-
-"//" : "Onboarding parameters",
-"vendor-name" : "hpa-vendor",
-"entitlement-pool-name" : "hpa-pool",
-"start-date" : "10/08/2019",
-"expiry-date" : "12/31/2040",
-"key-group-name" : "hpa-key",
-"key-group-type" : "Universal",
-"feature-grp-name" : "hpa-feature",
-"part-no" : "hpa-part",
-"agreement-name" : "hpa-agreement",
-
-"onboarding-method" : "NetworkPackage",
-"vsp-name" : "hpa-vsp",
-"vsp-desc" : "hpa-vsp-desc",
-"vsp-version" : "1.0",
-
-"vf-name" : "hpa-vf",
-"vf-description" : "hpa-vf",
-"vf-remarks" :"remarkss",
-"vf-version" : "1.0",
-
-"//" : "Be sure to include single quotes in parameters that have spaces",
-"project-code" : "000000",
-"service-model-name" : "vfw-hpa",
-"service-model-desc" : "'hpa service model'",
-"icon-id" : "network_l_1-3",
-"category-display" : "'Network L1-3'",
-"category" : "'network l1-3'",
-
-"service-test-remarks" : "test",
-"service-accept-remarks" : "accepted",
-"service-approve-remarks" : "approved",
-
-
-"//" : "service instance parameters",
-"instance-name" : "hpa-instance",
-"requestor-id" : "demo",
-"customer-latitude":"32.897480",
-"customer-longitude":"97.040443",
-"company-name":"some_company",
-"homing-solution" : "oof",
-"orchestrator" : "multicloud",
-"owning-entity-name":"OE-Demonstration",
-"projectName" : "Project-Demonstration",
-"a-la-carte" : true,
-"test-api" : "VNF_API",
-
-"//" : "vnf parameters",
-"platform-name" : "Platform-Demonstration",
-"lob-name" : "LOB-Demonstration",
-"generic-vnf-name" : "vfw-hpa-vnf",
-
-"vf-module-name" : "vfw-sriov-cli",
-"supress-rollback" : "false",
-"use-preload" : "true"
+ "open_cli_product": "onap-elalto",
+ "open_cli_home": "/opt/oclip",
+ "aai_url": "https://10.12.5.110:30233",
+ "aai_username": "AAI",
+ "aai_password": "AAI",
+ "sdc_onboarding_url": "https://10.12.5.110:30207/sdc1/feProxy",
+ "sdc_catalog_url": "https://10.12.5.110:30204",
+ "sdc_password": "demo123456!",
+ "sdc_creator": "cs0008",
+ "sdc_tester": "jm0007",
+ "sdc_governor": "gv0001",
+ "sdc_operator": "op0001",
+ "csar-file-path": "/root/vfw_hpa.zip",
+ "sdnc_url": "https://10.12.5.110:30267",
+ "sdnc_user": "admin",
+ "sdnc_password": "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U",
+ "sdnc_preload_file": "/root/integration/test/hpa_automation/heat/sample_vfw_hpa_preload.txt",
+ "so_url": "http://10.12.5.110:30277",
+ "so_username": "InfraPortalClient",
+ "so_password": "password1$",
+ "mariadb_host": "10.42.8.3",
+ "so_mariadb_user": "root",
+ "so_mariadb_password": "secretpassword",
+ "so_mariadb_db": "catalogdb",
+ "multicloud_url": "http://10.12.5.110:30280",
+ "policy_url": "https://10.12.5.110:30694",
+ "policy_username": "testpdp",
+ "policy_password": "alpha123",
+ "policy_directory": "/root/integration/test/hpa_automation/heat/sample_vfw_policies",
+ "policy_scope": "OSDF_DUBLIN",
+ "policy_onapName": "SampleDemo",
+ "policy_config_type": "MicroService",
+ "policy_pdp_group": "default",
+ "policy_db_ip": "10.42.10.14",
+ "policy_db_user": "root",
+ "policy_db_password": "secret",
+ "temp_resource_module_name": "resource_name",
+ "complex_name": "clli1",
+ "street1": "street1",
+ "street2": "street2",
+ "physical_location": "phy_type",
+ "data_center_code": "code1",
+ "latitude": "32.89948",
+ "longitude": "97.045443",
+ "lata": "example-lata-val-28399",
+ "elevation": "example-elevation-val-28399",
+ "region": "northwest",
+ "state": "oregon",
+ "city": "hillsboro",
+ "postal-code": "00000",
+ "country": "USA",
+ "identity_url": "example-identity-url-val-56898",
+ "cloud_region_data": {
+ "ONAP-POD-01-Rail-04": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "8c85ce1f-aa78-45bf-8d6f-4b62784e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "PUT IN OPENSTACK USERNAME",
+ "password": "PUT IN OPENSTACK PASSWORD",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ },
+ "ONAP-POD-01-Rail-05": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-aa78-44bf-8d6f-4b62784e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "PUT IN OPENSTACK USERNAME",
+ "password": "PUT IN OPENSTACK PASSWORD",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ },
+ "ONAP-POD-01-Rail-06": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-ab77-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "PUT IN OPENSTACK USERNAME",
+ "password": "PUT IN OPENSTACK PASSWORD",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ },
+ "ONAP-POD-01-Rail-07": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "PUT IN OPENSTACK USERNAME",
+ "password": "PUT IN OPENSTACK PASSWORD",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ }
+ },
+ "cloud-owner": "CloudOwner",
+ "owner-defined-type": "t1",
+ "cloud-zone": "CloudZone",
+ "service_name": "vFW",
+ "customer_name": "hpa_cust",
+ "subscriber_name": "hpa_cust",
+ "vendor-name": "hpa-vendor",
+ "entitlement-pool-name": "hpa-pool",
+ "start-date": "10/08/2019",
+ "expiry-date": "12/31/2040",
+ "key-group-name": "hpa-key",
+ "key-group-type": "Universal",
+ "feature-grp-name": "hpa-feature",
+ "part-no": "hpa-part",
+ "agreement-name": "hpa-agreement",
+ "onboarding-method": "NetworkPackage",
+ "vsp-name": "hpa-vsp",
+ "vsp-desc": "hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "hpa-vf",
+ "vf-description": "hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "project-code": "000000",
+ "service-model-name": "vfw-hpa",
+ "service-model-desc": "'hpa service model'",
+ "icon-id": "network_l_1-3",
+ "category-display": "'Network L1-3'",
+ "category": "'network l1-3'",
+ "service-test-remarks": "test",
+ "service-accept-remarks": "accepted",
+ "service-approve-remarks": "approved",
+ "instance-name": "hpa-instance",
+ "requestor-id": "demo",
+ "customer-latitude": "32.897480",
+ "customer-longitude": "97.040443",
+ "company-name": "some_company",
+ "homing-solution": "oof",
+ "orchestrator": "multicloud",
+ "owning-entity-name": "OE-Demonstration",
+ "projectName": "Project-Demonstration",
+ "a-la-carte": true,
+ "test-api": "VNF_API",
+ "platform-name": "Platform-Demonstration",
+ "lob-name": "LOB-Demonstration",
+ "generic-vnf-name": "vfw-hpa-vnf",
+ "vf-module-name": "vfw-sriov-cli",
+ "supress-rollback": "false",
+ "use-preload": "true"
}
diff --git a/test/hpa_automation/heat/insert_policy_models_heat.py b/test/hpa_automation/heat/insert_policy_models_heat.py
index 68a1d23bf..549763897 100644
--- a/test/hpa_automation/heat/insert_policy_models_heat.py
+++ b/test/hpa_automation/heat/insert_policy_models_heat.py
@@ -1,13 +1,13 @@
import mysql.connector
import sys
-
+
mydb = mysql.connector.connect(
host= sys.argv[1],
user= sys.argv[2],
passwd= sys.argv[3],
database="onap_sdk",
)
-
+
mycursor = mydb.cursor()
sql = "INSERT INTO microservicemodels (modelname, description, dependency, imported_by, attributes, ref_attributes, sub_attributes, version, annotation, enumValues, dataOrderInfo) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
@@ -22,9 +22,9 @@ val = [
('pciPolicy', 'Pci policy model', '[]', 'demo', 'identity=string:defaultValue-null:required-true:MANY-false:description-null', 'policyScope=MANY-true,policyType=POLICYTYPE:MANY-false,resources=MANY-true,pciProperties=pciProperties_properties:MANY-true:description-null', '{"pciProperties_properties":{"pciOptimizationTimeConstraint":"string:defaultValue-null:required-false:MANY-false:description-null","pciOptimizationNwConstraint":"string:defaultValue-null:required-false:MANY-false:description-null","algoCategory":"string:defaultValue-null:required-false:MANY-false:description-null","pciOptmizationAlgoName":"string:defaultValue-null:required-false:MANY-false:description-null","pciOptimizationPriority":"string:defaultValue-null:required-false:MANY-false:description-null"}}', 'test1', 'olicyScope=matching-true, policyType=matching-true ', ' POLICYTYPE=[pciPolicy,]', '""'),
('subscriberPolicy', 'Subscriber Policy Model', '[]', 'demo', 'identity=string:defaultValue-null:required-true:MANY-false:description-null', 'policyScope=MANY-true,policyType=POLICYTYPE:MANY-false,properties=properties_properties:MANY-false:description-type of a policy', '{"properties_properties":{"provStatus":"PROVSTATUS:defaultValue-null:required-true:MANY-false:description-null","subscriberName":"SUBSCRIBERNAME:defaultValue-null:required-true:MANY-false:description-null","subscriberRole":"SUBSCRIBERROLE:defaultValue-null:required-true:MANY-false:description-null"}}', 'test1', 'policyScope=matching-true, policyType=matching-true, properties=matching-true ', ' SUBSCRIBERNAME=[], SUBSCRIBERROLE=[], POLICYTYPE=[subscriberPolicy,], PROVSTATUS=[]', '""')
]
-
+
mycursor.executemany(sql, val)
-
+
mydb.commit()
-
+
print(mycursor.rowcount, "was inserted.")
diff --git a/test/hpa_automation/tosca/hpa_automation.py b/test/hpa_automation/tosca/hpa_automation.py
index 86b51f562..ac6a161d2 100755
--- a/test/hpa_automation/tosca/hpa_automation.py
+++ b/test/hpa_automation/tosca/hpa_automation.py
@@ -3,7 +3,7 @@
#Prerequisites for machine to run this
#Put in required parameters in hpa_automation_config.json
#Install python-pip (apt install python-pip)
-#Install python mysql.connector (pip install mysql-connector-python)
+#Install python mysql.connector (pip install --no-cache-dir mysql-connector-python)
#Install ONAP CLI
#Must have connectivity to the ONAP, a k8s vm already running is recommended
#Create Preload File, the script will modify the parameters required from serivce model, service instance
@@ -14,7 +14,6 @@
import json
import os
-import time
import argparse
import sys
import requests
@@ -224,16 +223,16 @@ def create_customer(parameters):
def add_customer_subscription(parameters):
subscription_check = 0
for cloud_region, cloud_region_values in (parameters["cloud_region_data"]).iteritems():
- if subscription_check == 0 :
- subscription_string = "oclip subscription-create -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
- parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
- cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
- else:
- subscription_string = "oclip subscription-cloud-add -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
- parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
- cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
- os.system(subscription_string)
- subscription_check+=1
+ if subscription_check == 0 :
+ subscription_string = "oclip subscription-create -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
+ parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
+ cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
+ else:
+ subscription_string = "oclip subscription-cloud-add -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
+ parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
+ cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
+ os.system(subscription_string)
+ subscription_check+=1
def register_vnfm_helper(vnfm_key, values, parameters):
#Create vnfm
@@ -250,7 +249,7 @@ def register_vnfm(parameters):
for vnfm_key, vnfm_values in vnfm_params.iteritems():
register_vnfm_helper(vnfm_key, vnfm_values, parameters)
-def add_policy_models(parameters):
+def add_policy_models():
mydb = mysql.connector.connect(
host="policydb",
user="policy_user",
@@ -287,7 +286,7 @@ def add_policy_models(parameters):
'OPERATOR=[<,<equal-sign,>,>equal-sign,equal-sign,!equal-sign,any,all,subset,], POLICYTYPE=[hpa,]', '""')
mycursor.execute(hpa_sql, hpa_val)
-
+
sql = "INSERT INTO microservicemodels (modelname, description, dependency, imported_by, \
attributes, ref_attributes, sub_attributes, version, annotation, enumValues, \
dataOrderInfo) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
@@ -358,19 +357,19 @@ def add_policy_models(parameters):
mycursor.executemany(sql, val)
mydb.commit()
print(mycursor.rowcount, "was inserted.")
-
+
def add_policies(parameters):
#Loop through policy, put in resource_model_name and create policies
for policy in os.listdir(parameters["policy_directory"]):
- policy_name = "{}.{}".format(parameters["policy_scope"], os.path.splitext(policy)[0])
- policy_file = (os.path.join(parameters["policy_directory"], policy))
- #Create policy
- os.system("oclip policy-create-outdated -m {} -u {} -p {} -x {} -S {} -T {} -o {} -b $(cat {})".format(parameters["policy_url"],\
- parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_scope"], \
- parameters["policy_config_type"], parameters["policy_onapName"], policy_file))
-
- #Push policy
- os.system("oclip policy-push-outdated -m {} -u {} -p {} -x {} -b {} -c {}".format(parameters["policy_url"], \
+ policy_name = "{}.{}".format(parameters["policy_scope"], os.path.splitext(policy)[0])
+ policy_file = (os.path.join(parameters["policy_directory"], policy))
+ # Create policy
+ os.system("oclip policy-create-outdated -m {} -u {} -p {} -x {} -S {} -T {} -o {} -b $(cat {})".format(parameters["policy_url"],\
+ parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_scope"], \
+ parameters["policy_config_type"], parameters["policy_onapName"], policy_file))
+
+ # Push policy
+ os.system("oclip policy-push-outdated -m {} -u {} -p {} -x {} -b {} -c {}".format(parameters["policy_url"], \
parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_config_type"],\
parameters["policy_pdp_group"]))
@@ -394,30 +393,30 @@ def create_ns(parameters, csar_id):
ns = parameters["ns"]
ns_create_string = 'oclip vfc-nslcm-create -m {} -c {} -n {} -q {} -S {}'.format(parameters["vfc-url"], \
csar_id, ns.get("name"), parameters["customer_name"], parameters["service_name"])
- print ns_create_string
+ print(ns_create_string)
ns_create_out = (os.popen(ns_create_string)).read()
- print ns_create_out
+ print(ns_create_out)
ns_instance_id = (get_out_helper_2(ns_create_out))[4]
return ns_instance_id
def instantiate_ns(parameters, ns_instance_id):
ns_instantiate_string = 'oclip vfc-nslcm-instantiate -m {} -i {} -c {} -n {}'.format(parameters["vfc-url"], \
ns_instance_id, parameters["location"], parameters["sdc-controller-id"])
- print ns_instantiate_string
+ print(ns_instantiate_string)
ns_instantiate_out = (os.popen(ns_instantiate_string)).read()
return ns_instantiate_out
def terminate_ns(parameters, ns_instance_id):
ns_terminate_string = 'oclip vfc-nslcm-terminate -m {} -i {}'.format(parameters["vfc-url"], ns_instance_id)
- print ns_terminate_string
+ print(ns_terminate_string)
ns_terminate_out = (os.popen(ns_terminate_string)).read()
- print ns_terminate_out
+ print(ns_terminate_out)
return ns_terminate_out
def delete_ns(parameters, ns_instance_id):
ns_delete_string = 'oclip vfc-nslcm-delete -m {} -c {}'.format(parameters["vfc-url"], ns_instance_id)
- print ns_delete_string
+ print(ns_delete_string)
ns_delete_out = (os.popen(ns_delete_string)).read()
return ns_delete_out
@@ -426,7 +425,7 @@ def create_ns_package(parameters):
create_ns_string = 'oclip vfc-catalog-create-ns -m {} -c {} -e {}'.format(parameters["vfc-url"], \
ns.get("key"), ns.get("value"))
cmd_out = (os.popen(create_ns_string)).read()
- out_list = get_out_helper_2(cmd_out)
+ out_list = get_out_helper_2(cmd_out)
return out_list[4]
def create_vnf_package(parameters):
@@ -437,7 +436,7 @@ def create_vnf_package(parameters):
create_vnf_string = 'oclip vfc-catalog-create-vnf -m {} -c {} -e {}'.format(parameters["vfc-url"], \
vnf_values.get("key"), vnf_values.get("value"))
cmd_out = (os.popen(create_vnf_string)).read()
- out_list = get_out_helper_2(cmd_out)
+ out_list = get_out_helper_2(cmd_out)
outputs[vnf_key] = out_list[4]
return outputs
@@ -445,8 +444,8 @@ def create_vnf_package(parameters):
def upload_ns_package(parameters, ns_package_output):
ns = parameters["ns"]
ns_upload_string = '{}/api/nsd/v1/ns_descriptors/{}/nsd_content'.format(parameters["vfc-url"], ns_package_output)
- print ns_upload_string
- print ns.get("path")
+ print(ns_upload_string)
+ print(ns.get("path"))
resp = requests.put(ns_upload_string, files={'file': open(ns.get("path"), 'rb')})
return resp
@@ -496,48 +495,48 @@ register_vnfm(parameters)
# 5.1 upload csar file to catalog
# 5.2 FIXME:Because SDC internal API will change without notice, so I will maually design VNF and Service.
# SDC output data model is not align with VFC, we use an workaround method
-# We just do run time automation
+# We just do run time automation
ns_package_output = ""
if model == "sdc":
- print "use csar file is distributed by sdc"
+ print("use csar file is distributed by sdc")
# output = create_vlm(parameters)
# vsp_dict = create_vsp(parameters, output)
# vf_dict = create_vf_model(parameters, vsp_dict)
# service_model_list = create_service_model(parameters, vf_dict)
-
+
vnf_onboard_output = onboard_vnf(parameters)
- print vnf_onboard_output
+ print(vnf_onboard_output)
ns_out = onboard_ns(parameters)
- print ns_out
+ print(ns_out)
else:
- print "use csar file is uploaded by local"
+ print("use csar file is uploaded by local")
vnf_package_output = create_vnf_package(parameters)
- print vnf_package_output
+ print(vnf_package_output)
ns_package_output = create_ns_package(parameters)
- print ns_package_output
+ print(ns_package_output)
upload_vnf_out = upload_vnf_package(parameters, vnf_package_output)
- print upload_vnf_out
+ print(upload_vnf_out)
ns_out = upload_ns_package(parameters, ns_package_output)
- print ns_out
+ print(ns_out)
# 6.add_policies function not currently working, using curl commands
-add_policy_models(parameters)
+add_policy_models()
add_policies(parameters)
# 7. VFC part
ns_instance_id = create_ns(parameters, ns_out)
-print ns_instance_id
+print(ns_instance_id)
instantiate_ns_output = instantiate_ns(parameters, ns_instance_id)
-print instantiate_ns_output
+print(instantiate_ns_output)
#terminate and delete ns;
-#option args add the end of json file
+#option args add the end of json file
if sys.argv[3] == "terminate":
terminate_ns_output = terminate_ns(parameters, ns_instance_id)
- print terminate_ns_output
+ print(terminate_ns_output)
elif sys.argv[3] == "delete":
delete_ns_output = delete_ns(parameters, ns_instance_id)
- print delete_ns_output
+ print(delete_ns_output)
diff --git a/test/hpa_automation/tosca/vcpe_config.json b/test/hpa_automation/tosca/vcpe_config.json
index 5f306883c..47ca8b371 100755
--- a/test/hpa_automation/tosca/vcpe_config.json
+++ b/test/hpa_automation/tosca/vcpe_config.json
@@ -1,221 +1,203 @@
{
- "open_cli_product" : "onap-dublin",
- "open_cli_home" : "/opt/oclip",
- "aai_url" : "https://10.12.5.224:30233",
- "aai_username" : "AAI",
- "aai_password" : "AAI",
- "sdc_onboarding_url" : "http://10.43.89.129:8081",
- "sdc_catalog_url" : "http://10.12.5.224:30205",
- "sdc_password" : "demo123456!",
- "sdc_creator" : "cs0008",
- "sdc_tester" : "jm0007",
- "sdc_governor" : "gv0001",
- "sdc_operator" : "op0001",
-
- "multicloud_url" : "http://10.12.5.224:30280",
- "policy_url" : "https://10.12.5.224:30694",
- "policy_username" : "testpdp",
- "policy_password" : "alpha123",
- "policy_directory" : "/opt/oclip/dublin/vcpe_policies",
- "policy_scope" : "OSDF_DUBLIN",
- "policy_onapName" : "SampleDemo",
- "policy_config_type": "MicroService",
- "policy_pdp_group" : "default",
- "//" : "Put in a temp resource module name, should be the same in policy files, script will replace it in policies",
- "temp_resource_module_name" : "resource_name",
-
- "//" : "#Parameters required to create cloud complex",
- "complex_name" : "clli1",
- "street1" : "street1",
- "street2" : "street2",
- "physical_location" : "phy_type",
- "data_center_code" : "code1",
- "latitude" : "32.89948",
- "longitude" : "97.045443",
- "lata" : "example-lata-val-28399",
- "elevation" : "example-elevation-val-28399",
- "region" : "northwest",
- "state" : "oregon",
- "city" : "hillsboro",
- "postal-code" : "00000",
- "country" : "USA",
- "identity_url" : "example-identity-url-val-56898",
- "service-model-name" : "vfw-hpa",
- "//" : "#Dictionary containing cloud regions and their Parameters",
-
- "cloud_region_data":{
- "ONAP-POD-01-Rail-05":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-aa78-4ebf-8d6f-4b62784e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
- "ONAP-POD-01-Rail-06":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
- "ONAP-POD-01-Rail-07":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- }
+ "open_cli_product": "onap-dublin",
+ "open_cli_home": "/opt/oclip",
+ "aai_url": "https://10.12.5.224:30233",
+ "aai_username": "AAI",
+ "aai_password": "AAI",
+ "sdc_onboarding_url": "http://10.43.89.129:8081",
+ "sdc_catalog_url": "http://10.12.5.224:30205",
+ "sdc_password": "demo123456!",
+ "sdc_creator": "cs0008",
+ "sdc_tester": "jm0007",
+ "sdc_governor": "gv0001",
+ "sdc_operator": "op0001",
+ "multicloud_url": "http://10.12.5.224:30280",
+ "policy_url": "https://10.12.5.224:30694",
+ "policy_username": "testpdp",
+ "policy_password": "alpha123",
+ "policy_directory": "/opt/oclip/dublin/vcpe_policies",
+ "policy_scope": "OSDF_DUBLIN",
+ "policy_onapName": "SampleDemo",
+ "policy_config_type": "MicroService",
+ "policy_pdp_group": "default",
+ "temp_resource_module_name": "resource_name",
+ "complex_name": "clli1",
+ "street1": "street1",
+ "street2": "street2",
+ "physical_location": "phy_type",
+ "data_center_code": "code1",
+ "latitude": "32.89948",
+ "longitude": "97.045443",
+ "lata": "example-lata-val-28399",
+ "elevation": "example-elevation-val-28399",
+ "region": "northwest",
+ "state": "oregon",
+ "city": "hillsboro",
+ "postal-code": "00000",
+ "country": "USA",
+ "identity_url": "example-identity-url-val-56898",
+ "service-model-name": "vcpe-hpa",
+ "cloud_region_data": {
+ "ONAP-POD-01-Rail-05": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-aa78-4ebf-8d6f-4b62784e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
},
-
- "//" : "#Parameters to register cloud region",
- "cloud-owner" : "CloudOwner",
- "owner-defined-type" : "t1",
- "cloud-zone" : "CloudZone",
-
- "service_name" : "vCPE",
- "customer_name" : "hpa_cust",
- "subscriber_name" : "hpa_cust",
-
- "//" : "Onboarding parameters",
- "vendor-name" : "Intel",
- "entitlement-pool-name" : "hpa-pool",
- "entitlement-description" : "hpa-pool",
- "start-date" : "04/23/2019",
- "expiry-date" : "12/31/2040",
- "key-group-name" : "hpa-key",
- "key-group-type" : "Universal",
- "feature-grp-name" : "hpa-feature",
- "feature-grp-desc" : "hpa-feature",
- "part-no" : "hpa-part",
- "agreement-name" : "hpa-agreement",
- "agreement-desc" : "hpa-agreement",
-
- "onboarding-method" : "NetworkPackage",
-
- "//" : "Be sure to include single quotes in parameters that have spaces",
- "project-code" : "000000",
- "service-model-name" : "vcpe-hpa",
- "service-model-desc" : "'hpa service model'",
- "icon-id" : "network_l_1-3",
- "category-display" : "'Network L1-3'",
- "category" : "'network l1-3'",
-
- "service-test-remarks" : "test",
- "service-accept-remarks" : "accepted",
- "service-approve-remarks" : "approved",
-
- "//" : "#Parameters to vfc",
- "vfc-url": "http://10.12.5.224:30280",
- "vnfs":{
- "infra":{
- "path": "/opt/oclip/dublin/infra.csar",
- "csar-id": "You need change it",
- "vsp-name" : "infra-hpa-vsp",
- "vsp-desc" : "infra-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "infra-hpa-vf",
- "vf-description" : "infra-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- },
- "vgmux":{
- "path": "/opt/oclip/dublin/vgmux.csar",
- "csar-id": "You need change it",
- "vsp-name" : "vgmux-hpa-vsp",
- "vsp-desc" : "vgmux-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "vgmux-hpa-vf",
- "vf-description" : "vgmux-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- },
- "vbng":{
- "path": "/opt/oclip/dublin/vbng.csar",
- "csar-id": "You need change it",
- "vsp-name" : "vbng-hpa-vsp",
- "vsp-desc" : "vbng-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "vbng-hpa-vf",
- "vf-description" : "vbng-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- },
- "vbrgemu":{
- "path": "/opt/oclip/dublin/vbrgemu.csar",
- "csar-id": "You need change it",
- "vsp-name" : "vbrgemu-hpa-vsp",
- "vsp-desc" : "vbgremu-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "vbgremu-hpa-vf",
- "vf-description" : "vbgremu-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- },
- "vgw":{
- "path": "/opt/oclip/dublin/vgw.csar",
- "csar-id": "You need change it",
- "vsp-name" : "vgw-hpa-vsp",
- "vsp-desc" : "vgw-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "vgw-hpa-vf",
- "vf-description" : "vgw-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- }
+ "ONAP-POD-01-Rail-06": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
},
- "ns":{
- "csar-id": "You need change it",
- "key": "key1",
- "value": "value1",
- "path": "/opt/oclip/dublin/ns_vgw.csar",
- "name": "vcpe1"
+ "ONAP-POD-01-Rail-07": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ }
+ },
+ "cloud-owner": "CloudOwner",
+ "owner-defined-type": "t1",
+ "cloud-zone": "CloudZone",
+ "service_name": "vCPE",
+ "customer_name": "hpa_cust",
+ "subscriber_name": "hpa_cust",
+ "vendor-name": "Intel",
+ "entitlement-pool-name": "hpa-pool",
+ "entitlement-description": "hpa-pool",
+ "start-date": "04/23/2019",
+ "expiry-date": "12/31/2040",
+ "key-group-name": "hpa-key",
+ "key-group-type": "Universal",
+ "feature-grp-name": "hpa-feature",
+ "feature-grp-desc": "hpa-feature",
+ "part-no": "hpa-part",
+ "agreement-name": "hpa-agreement",
+ "agreement-desc": "hpa-agreement",
+ "onboarding-method": "NetworkPackage",
+ "project-code": "000000",
+ "service-model-desc": "'hpa service model'",
+ "icon-id": "network_l_1-3",
+ "category-display": "'Network L1-3'",
+ "category": "'network l1-3'",
+ "service-test-remarks": "test",
+ "service-accept-remarks": "accepted",
+ "service-approve-remarks": "approved",
+ "vfc-url": "http://10.12.5.224:30280",
+ "vnfs": {
+ "infra": {
+ "path": "/opt/oclip/dublin/infra.csar",
+ "csar-id": "You need change it",
+ "vsp-name": "infra-hpa-vsp",
+ "vsp-desc": "infra-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "infra-hpa-vf",
+ "vf-description": "infra-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
},
- "location": "CloudOwner_ONAP-POD-01-Rail-07",
- "vnfm_params":{
- "GVNFMDRIVER":{
- "type": "gvnfmdriver",
- "vendor": "vfc",
- "version": "v1.0",
- "url": "http://msb-iag:80/",
- "vim-id": "CloudOwner_ONAP-POD-01-Rail-07",
- "user-name": "admin",
- "user-password": "admin",
- "vnfm-version": "v1.0"
- }
+ "vgmux": {
+ "path": "/opt/oclip/dublin/vgmux.csar",
+ "csar-id": "You need change it",
+ "vsp-name": "vgmux-hpa-vsp",
+ "vsp-desc": "vgmux-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "vgmux-hpa-vf",
+ "vf-description": "vgmux-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
},
- "sdc-controller-id": "2"
+ "vbng": {
+ "path": "/opt/oclip/dublin/vbng.csar",
+ "csar-id": "You need change it",
+ "vsp-name": "vbng-hpa-vsp",
+ "vsp-desc": "vbng-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "vbng-hpa-vf",
+ "vf-description": "vbng-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbrgemu": {
+ "path": "/opt/oclip/dublin/vbrgemu.csar",
+ "csar-id": "You need change it",
+ "vsp-name": "vbrgemu-hpa-vsp",
+ "vsp-desc": "vbgremu-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "vbgremu-hpa-vf",
+ "vf-description": "vbgremu-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
+ },
+ "vgw": {
+ "path": "/opt/oclip/dublin/vgw.csar",
+ "csar-id": "You need change it",
+ "vsp-name": "vgw-hpa-vsp",
+ "vsp-desc": "vgw-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "vgw-hpa-vf",
+ "vf-description": "vgw-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
+ }
+ },
+ "ns": {
+ "csar-id": "You need change it",
+ "key": "key1",
+ "value": "value1",
+ "path": "/opt/oclip/dublin/ns_vgw.csar",
+ "name": "vcpe1"
+ },
+ "location": "CloudOwner_ONAP-POD-01-Rail-07",
+ "vnfm_params": {
+ "GVNFMDRIVER": {
+ "type": "gvnfmdriver",
+ "vendor": "vfc",
+ "version": "v1.0",
+ "url": "http://msb-iag:80/",
+ "vim-id": "CloudOwner_ONAP-POD-01-Rail-07",
+ "user-name": "admin",
+ "user-password": "admin",
+ "vnfm-version": "v1.0"
+ }
+ },
+ "sdc-controller-id": "2"
}
diff --git a/test/hpa_automation/tosca/vcpe_vgw_config.json b/test/hpa_automation/tosca/vcpe_vgw_config.json
index d4bcef901..0291e22c3 100755
--- a/test/hpa_automation/tosca/vcpe_vgw_config.json
+++ b/test/hpa_automation/tosca/vcpe_vgw_config.json
@@ -1,171 +1,153 @@
{
- "open_cli_product" : "onap-dublin",
- "open_cli_home" : "/opt/oclip",
- "aai_url" : "https://10.12.5.224:30233",
- "aai_username" : "AAI",
- "aai_password" : "AAI",
- "sdc_onboarding_url" : "http://10.43.89.129:8081",
- "sdc_catalog_url" : "http://10.12.5.224:30205",
- "sdc_password" : "demo123456!",
- "sdc_creator" : "cs0008",
- "sdc_tester" : "jm0007",
- "sdc_governor" : "gv0001",
- "sdc_operator" : "op0001",
-
- "multicloud_url" : "http://10.12.5.224:30280",
- "policy_url" : "https://10.12.5.224:30694",
- "policy_username" : "testpdp",
- "policy_password" : "alpha123",
- "policy_directory" : "/opt/oclip/dublin/vcpe_policies",
- "policy_scope" : "OSDF_DUBLIN",
- "policy_onapName" : "SampleDemo",
- "policy_config_type": "MicroService",
- "policy_pdp_group" : "default",
- "//" : "Put in a temp resource module name, should be the same in policy files, script will replace it in policies",
- "temp_resource_module_name" : "resource_name",
-
- "//" : "#Parameters required to create cloud complex",
- "complex_name" : "clli1",
- "street1" : "street1",
- "street2" : "street2",
- "physical_location" : "phy_type",
- "data_center_code" : "code1",
- "latitude" : "32.89948",
- "longitude" : "97.045443",
- "lata" : "example-lata-val-28399",
- "elevation" : "example-elevation-val-28399",
- "region" : "northwest",
- "state" : "oregon",
- "city" : "hillsboro",
- "postal-code" : "00000",
- "country" : "USA",
- "identity_url" : "example-identity-url-val-56898",
- "service-model-name" : "vfw-hpa",
- "//" : "#Dictionary containing cloud regions and their Parameters",
-
- "cloud_region_data":{
- "ONAP-POD-01-Rail-05":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-aa78-4ebf-8d6f-4b62784e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
- "ONAP-POD-01-Rail-06":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"5c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- },
- "ONAP-POD-01-Rail-07":{
- "cloud-region-version" : "titanium_cloud",
- "esr-system-info-id":"4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
- "service-url": "http://10.12.11.1:5000/v3",
- "user-name":"${cloud-username}",
- "password":"${cloud-password}",
- "system-type": "VIM",
- "ssl-insecure":true,
- "cloud-domain":"Default",
- "default-tenant":"Integration-HPA",
- "tenant-id" : "709ba629fe194f8699b12f9d6ffd86a0",
- "cloud-type" : "openstack",
- "identity-url": "WillBeUpdatedByMultiCloud",
- "system-status":"active"
- }
+ "open_cli_product": "onap-dublin",
+ "open_cli_home": "/opt/oclip",
+ "aai_url": "https://10.12.5.224:30233",
+ "aai_username": "AAI",
+ "aai_password": "AAI",
+ "sdc_onboarding_url": "http://10.43.89.129:8081",
+ "sdc_catalog_url": "http://10.12.5.224:30205",
+ "sdc_password": "demo123456!",
+ "sdc_creator": "cs0008",
+ "sdc_tester": "jm0007",
+ "sdc_governor": "gv0001",
+ "sdc_operator": "op0001",
+ "multicloud_url": "http://10.12.5.224:30280",
+ "policy_url": "https://10.12.5.224:30694",
+ "policy_username": "testpdp",
+ "policy_password": "alpha123",
+ "policy_directory": "/opt/oclip/dublin/vcpe_policies",
+ "policy_scope": "OSDF_DUBLIN",
+ "policy_onapName": "SampleDemo",
+ "policy_config_type": "MicroService",
+ "policy_pdp_group": "default",
+ "temp_resource_module_name": "resource_name",
+ "complex_name": "clli1",
+ "street1": "street1",
+ "street2": "street2",
+ "physical_location": "phy_type",
+ "data_center_code": "code1",
+ "latitude": "32.89948",
+ "longitude": "97.045443",
+ "lata": "example-lata-val-28399",
+ "elevation": "example-elevation-val-28399",
+ "region": "northwest",
+ "state": "oregon",
+ "city": "hillsboro",
+ "postal-code": "00000",
+ "country": "USA",
+ "identity_url": "example-identity-url-val-56898",
+ "service-model-name": "vcpe-hpa",
+ "cloud_region_data": {
+ "ONAP-POD-01-Rail-05": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-aa78-4ebf-8d6f-4b62784e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
},
-
- "//" : "#Parameters to register cloud region",
- "cloud-owner" : "CloudOwner",
- "owner-defined-type" : "t1",
- "cloud-zone" : "CloudZone",
-
- "service_name" : "vCPE",
- "customer_name" : "hpa_cust",
- "subscriber_name" : "hpa_cust",
-
- "//" : "Onboarding parameters",
- "vendor-name" : "Intel",
- "entitlement-pool-name" : "hpa-pool",
- "entitlement-description" : "hpa-pool",
- "start-date" : "04/23/2019",
- "expiry-date" : "12/31/2040",
- "key-group-name" : "hpa-key",
- "key-group-type" : "Universal",
- "feature-grp-name" : "hpa-feature",
- "feature-grp-desc" : "hpa-feature",
- "part-no" : "hpa-part",
- "agreement-name" : "hpa-agreement",
- "agreement-desc" : "hpa-agreement",
-
- "onboarding-method" : "NetworkPackage",
-
- "//" : "Be sure to include single quotes in parameters that have spaces",
- "project-code" : "000000",
- "service-model-name" : "vcpe-hpa",
- "service-model-desc" : "'hpa service model'",
- "icon-id" : "network_l_1-3",
- "category-display" : "'Network L1-3'",
- "category" : "'network l1-3'",
-
- "service-test-remarks" : "test",
- "service-accept-remarks" : "accepted",
- "service-approve-remarks" : "approved",
-
- "//" : "#Parameters to vfc",
- "vfc-url": "http://10.12.5.224:30280",
- "vnfs":{
- "vgw":{
- "path": "/opt/oclip/dublin/vgw.csar",
- "csar-id": "You need change it",
- "url": "http://msb-iag:80",
- "vsp-name" : "vgw-hpa-vsp",
- "vsp-desc" : "vgw-hpa-vsp-desc",
- "vsp-version" : "1.0",
- "vf-name" : "vgw-hpa-vf",
- "vf-description" : "vgw-hpa-vf",
- "vf-remarks" :"remarkss",
- "vf-version" : "1.0",
- "key": "key2",
- "value": "value2"
- }
+ "ONAP-POD-01-Rail-06": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "5c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
},
- "ns":{
- "url": "http://msb-iag:80",
- "csar-id": "You need change it",
- "key": "key1",
- "value": "value1",
- "path": "/opt/oclip/dublin/ns_vgw.csar",
- "name": "vcpe1"
- },
- "location": "CloudOwner_ONAP-POD-01-Rail-07",
- "vnfm_params":{
- "GVNFMDRIVER":{
- "type": "gvnfmdriver",
- "vendor": "vfc",
- "version": "v1.0",
- "url": "http://msb-iag:80/",
- "vim-id": "CloudOwner_ONAP-POD-01-Rail-07",
- "user-name": "admin",
- "user-password": "admin",
- "vnfm-version": "v1.0"
- }
- },
- "sdc-controller-id": "2"
+ "ONAP-POD-01-Rail-07": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "4c85ce1f-aa78-4ebf-8d6f-4b62773e9bc7",
+ "service-url": "http://10.12.11.1:5000/v3",
+ "user-name": "${cloud-username}",
+ "password": "${cloud-password}",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "Integration-HPA",
+ "tenant-id": "709ba629fe194f8699b12f9d6ffd86a0",
+ "cloud-type": "openstack",
+ "identity-url": "WillBeUpdatedByMultiCloud",
+ "system-status": "active"
+ }
+ },
+ "cloud-owner": "CloudOwner",
+ "owner-defined-type": "t1",
+ "cloud-zone": "CloudZone",
+ "service_name": "vCPE",
+ "customer_name": "hpa_cust",
+ "subscriber_name": "hpa_cust",
+ "vendor-name": "Intel",
+ "entitlement-pool-name": "hpa-pool",
+ "entitlement-description": "hpa-pool",
+ "start-date": "04/23/2019",
+ "expiry-date": "12/31/2040",
+ "key-group-name": "hpa-key",
+ "key-group-type": "Universal",
+ "feature-grp-name": "hpa-feature",
+ "feature-grp-desc": "hpa-feature",
+ "part-no": "hpa-part",
+ "agreement-name": "hpa-agreement",
+ "agreement-desc": "hpa-agreement",
+ "onboarding-method": "NetworkPackage",
+ "project-code": "000000",
+ "service-model-desc": "'hpa service model'",
+ "icon-id": "network_l_1-3",
+ "category-display": "'Network L1-3'",
+ "category": "'network l1-3'",
+ "service-test-remarks": "test",
+ "service-accept-remarks": "accepted",
+ "service-approve-remarks": "approved",
+ "vfc-url": "http://10.12.5.224:30280",
+ "vnfs": {
+ "vgw": {
+ "path": "/opt/oclip/dublin/vgw.csar",
+ "csar-id": "You need change it",
+ "url": "http://msb-iag:80",
+ "vsp-name": "vgw-hpa-vsp",
+ "vsp-desc": "vgw-hpa-vsp-desc",
+ "vsp-version": "1.0",
+ "vf-name": "vgw-hpa-vf",
+ "vf-description": "vgw-hpa-vf",
+ "vf-remarks": "remarkss",
+ "vf-version": "1.0",
+ "key": "key2",
+ "value": "value2"
+ }
+ },
+ "ns": {
+ "url": "http://msb-iag:80",
+ "csar-id": "You need change it",
+ "key": "key1",
+ "value": "value1",
+ "path": "/opt/oclip/dublin/ns_vgw.csar",
+ "name": "vcpe1"
+ },
+ "location": "CloudOwner_ONAP-POD-01-Rail-07",
+ "vnfm_params": {
+ "GVNFMDRIVER": {
+ "type": "gvnfmdriver",
+ "vendor": "vfc",
+ "version": "v1.0",
+ "url": "http://msb-iag:80/",
+ "vim-id": "CloudOwner_ONAP-POD-01-Rail-07",
+ "user-name": "admin",
+ "user-password": "admin",
+ "vnfm-version": "v1.0"
+ }
+ },
+ "sdc-controller-id": "2"
}
diff --git a/test/legal/docker_license_analysis/Dockerfile.sample b/test/legal/docker_license_analysis/Dockerfile.sample
new file mode 100644
index 000000000..851f92d6d
--- /dev/null
+++ b/test/legal/docker_license_analysis/Dockerfile.sample
@@ -0,0 +1,2 @@
+FROM scratch
+RUN echo "This is dummy image."
diff --git a/test/legal/docker_license_analysis/README.rst b/test/legal/docker_license_analysis/README.rst
new file mode 100644
index 000000000..71a4a3394
--- /dev/null
+++ b/test/legal/docker_license_analysis/README.rst
@@ -0,0 +1,80 @@
+#####################################
+License Analysis of Docker Containers
+#####################################
+
+Vagrantfile that includes tern + scancode for performing dynamic license analysis
+of docker containers. It takes either a Dockerfile or image name to analyse.
+
+
+*********
+Reasoning
+*********
+
+While there are tools supporting ONAP development that perform license analysis
+and produce SBoM, they do it via static static analysis. When base image
+introduces licensing issue we will have no way to know from those tools.
+Additionally, the tools performing those static analysis require special access
+rights which only few people have. This Vagrant box is meant to be run as close
+to Docker build as possible to give feedback directly to developers.
+
+It has been placed in a VM due to following reasons:
+
+- reproducibility
+- tern requires:
+
+ * access to /dev/fuse
+ * access to docker.sock
+
+Due to the above requirements, running in Docker would require:
+
+ * running container in --privileged mode
+ * passing host's /dev/fuse to the container
+ * passing host's docker.sock to the container
+
+Running it in VM creates new instances of both which should alleviate security
+issues that could be present when running on host/docker
+
+
+***************
+Getting started
+***************
+
+Prerequisites
+=============
+
+`Vagrant <https://www.vagrantup.com/downloads>`_
+
+
+Running
+=======
+
+Dockerfile analysis
+-------------------
+
+Substitute the DOCKER_FILE_ANALYSE value with location of the Dockerfile
+you want to analyse::
+
+ DOCKER_FILE_ANALYSE="/path/to/Dockerfile" vagrant up
+
+Please mind that the Docker on the VM needs to be able to download the base
+image for analysis to take place.
+
+Docker image analysis
+---------------------
+
+
+Substitute the DOCKER_IMAGE_ANALYSE value with your image of choice::
+
+ DOCKER_IMAGE_ANALYSE="debian:buster" vagrant up
+
+Please mind that the Docker on the VM needs to be able to download the image
+for analysis to take place.
+
+Gathering results
+=================
+
+::
+
+ vagrant ssh-config > ssh-config
+ scp -F ssh-config default:~/ternvenv/report-scancode.json report-scancode.json
+
diff --git a/test/legal/docker_license_analysis/Vagrantfile b/test/legal/docker_license_analysis/Vagrantfile
new file mode 100644
index 000000000..5a0333942
--- /dev/null
+++ b/test/legal/docker_license_analysis/Vagrantfile
@@ -0,0 +1,102 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+VM_MEMORY = 2 * 1024
+VM_CPUS = 2
+VM_DISK = 128
+VM_STORAGE_POOL = "default"
+VM_USER = "vagrant"
+
+# Dockerfile to analyse
+DOCKER_FILE = ENV["DOCKER_FILE_ANALYSE"] || "Dockerfile.sample"
+DOCKER_FILE_PATH = "/home/vagrant/ternvenv/Dockerfile"
+# Docker image to analyse (in form of "debian:latest").
+# Takes precedence over DOCKER_FILE
+DOCKER_IMAGE = ENV['DOCKER_IMAGE_ANALYSE']
+
+$install_docker= <<-SCRIPT
+ apt-get update
+ apt-get install --yes \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ gnupg-agent \
+ software-properties-common
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+ add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable"
+ apt-get update
+ apt-get install --yes \
+ docker-ce docker-ce-cli containerd.io
+ groupadd -f docker
+ usermod -a -G docker $USER
+SCRIPT
+
+$install_python = <<-SCRIPT
+ apt-get update
+ apt-get install --yes \
+ python3.8 libpython3.8-dev python3-pip python3.8-venv python3-setuptools\
+ python3-apt \
+ attr bzip2 xz-utils zlib1g libxml2-dev libxslt1-dev \
+ findutils git gnupg2 tar util-linux
+ pip3 install --upgrade pip
+SCRIPT
+
+$install_tern = <<-SCRIPT
+ cd /home/$USER
+ python3 -m venv ternvenv
+ cd ternvenv
+ source bin/activate
+ pip3 install --upgrade pip
+ pip3 install --no-cache-dir tern scancode-toolkit[full]
+SCRIPT
+
+Vagrant.configure("2") do |config|
+ config.vm.box = "generic/ubuntu2004"
+ config.vm.hostname = "vagrant"
+
+ config.vm.provider :virtualbox do |v|
+ v.name = config.vm.hostname
+ v.memory = VM_MEMORY
+ v.cpus = VM_CPUS
+ end
+
+ config.vm.provider :libvirt do |v|
+ v.memory = VM_MEMORY
+ v.cpus = VM_CPUS
+ v.machine_virtual_size = VM_DISK
+ v.storage_pool_name = VM_STORAGE_POOL
+ end
+
+ config.vm.synced_folder '.', '/vagrant', disabled: true
+
+ config.vm.provision "install_docker", type: "shell" do |s|
+ s.privileged = true
+ s.env = {"DEBIAN_FRONTEND" => "noninteractive", "USER":VM_USER}
+ s.inline = $install_docker
+ s.reset = true
+ end
+
+ config.vm.provision "install_python", type: "shell" do |s|
+ s.privileged = true
+ s.env = {"DEBIAN_FRONTEND" => "noninteractive"}
+ s.inline = $install_python
+ end
+
+ config.vm.provision "install_tern", type: "shell" do |s|
+ s.privileged = false
+ s.env = {"USER":VM_USER}
+ s.inline = $install_tern
+ end
+
+ # Add the Dockerfile for analysis to the Vagrant box
+ config.vm.provision "file", source: DOCKER_FILE, destination: DOCKER_FILE_PATH
+
+ config.vm.provision "license_analysis", type: "shell" do |s|
+ s.privileged = false
+ s.env = {"IMAGE":DOCKER_IMAGE, "FILE":DOCKER_FILE_PATH}
+ s.path = "tools/analysis.sh"
+ end
+end
diff --git a/test/legal/docker_license_analysis/tools/analysis.sh b/test/legal/docker_license_analysis/tools/analysis.sh
new file mode 100755
index 000000000..a667ce69c
--- /dev/null
+++ b/test/legal/docker_license_analysis/tools/analysis.sh
@@ -0,0 +1,31 @@
+#!/bin/env sh
+# Analysis is run twice to populate tern cache:
+# https://github.com/tern-tools/tern/issues/818
+
+TERNVENV="${TERNVENV:-$HOME/ternvenv}"
+
+if [ -d "$TERNVENV" ]; then
+ cd $TERNVENV
+ if [ -f bin/activate ]; then
+ . bin/activate
+ else
+ echo "Tern virtual environment is not initialized!" >&2;
+ exit 1
+ fi
+else
+ echo "Ternenv directory not found, if it is not in $HOME/ternvenv set the \$TERNVENV to your location." >&2;
+ exit 1
+fi
+
+if [ -n "$IMAGE" ]; then
+ echo 'Running Docker Image analysis'
+ tern report -f json -o /dev/null -i "$IMAGE"
+ tern report -f json -o report-scancode.json -x scancode -i "$IMAGE"
+elif [ -f "$FILE" ]; then
+ echo 'Running Dockerfile analysis'
+ tern report -f json -o /dev/null -d $FILE
+ tern report -f json -o report-scancode.json -x scancode -d $FILE
+else
+ echo "\$IMAGE is not set and \$FILE does not point to a file." >&2;
+fi
+
diff --git a/test/mocks/aai-simulator/aai-sim/pom.xml b/test/mocks/aai-simulator/aai-sim/pom.xml
new file mode 100755
index 000000000..96cc6b6ca
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/pom.xml
@@ -0,0 +1,81 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <artifactId>aai-sim</artifactId>
+ <properties>
+ <version.aai.schema>1.0.0</version.aai.schema>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>${project.parent.groupId}</groupId>
+ <artifactId>common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.aai.schema-service</groupId>
+ <artifactId>aai-schema</artifactId>
+ <version>${version.aai.schema}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.module</groupId>
+ <artifactId>jackson-module-jaxb-annotations</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-security</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-impl</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.activation</groupId>
+ <artifactId>activation</artifactId>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ <configuration>
+ <mainClass>org.onap.aaisimulator.AaiSimulatorApplication</mainClass>
+ </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>repackage</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project> \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/Main.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java
index 7288c2a28..abe186a6c 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/Main.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java
@@ -1,35 +1,38 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
-package org.onap.pnfsimulator.integration;
+package org.onap.aaisimulator;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.scheduling.annotation.EnableAsync;
-
-@SpringBootApplication
-@EnableAsync
-public class Main {
+import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
+import org.springframework.cache.annotation.EnableCaching;
- public static void main(String[] args) {
- SpringApplication.run(Main.class, args);
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@EnableCaching
+@SpringBootApplication(scanBasePackages = {"org.onap"})
+public class AaiSimulatorApplication extends SpringBootServletInitializer {
+
+ public static void main(final String[] args) {
+ SpringApplication.run(AaiSimulatorApplication.class, args);
}
}
-
-
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java
new file mode 100755
index 000000000..6e53c1483
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java
@@ -0,0 +1,109 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configration;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.net.ssl.SSLContext;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.ssl.SSLContextBuilder;
+import org.onap.aaisimulator.utils.CacheName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.concurrent.ConcurrentMapCache;
+import org.springframework.cache.support.SimpleCacheManager;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.core.io.Resource;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.web.client.RestTemplate;
+import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Configuration
+public class ApplicationConfigration {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationConfigration.class);
+
+
+ @Bean
+ public Jackson2ObjectMapperBuilderCustomizer jacksonCustomizer() {
+ return (mapperBuilder) -> mapperBuilder.modulesToInstall(new JaxbAnnotationModule());
+ }
+
+ @Bean
+ public CacheManager cacheManager() {
+ final SimpleCacheManager manager = new SimpleCacheManager();
+
+ final List<Cache> caches = new ArrayList<>();
+ for (final CacheName cacheName : CacheName.values()) {
+ caches.add(getCache(cacheName.getName()));
+ }
+ manager.setCaches(caches);
+ return manager;
+ }
+
+ private Cache getCache(final String name) {
+ LOGGER.info("Creating cache with name: {}", name);
+ return new ConcurrentMapCache(name);
+ }
+
+ @Profile("!test")
+ @Bean
+ public RestTemplate restTemplate(@Value("${http.client.ssl.trust-store:#{null}}") final Resource trustStore,
+ @Value("${http.client.ssl.trust-store-password:#{null}}") final String trustStorePassword)
+ throws Exception {
+ LOGGER.info("Setting up RestTemplate .... ");
+ final RestTemplate restTemplate = new RestTemplate();
+
+ final HttpComponentsClientHttpRequestFactory factory =
+ new HttpComponentsClientHttpRequestFactory(httpClient(trustStore, trustStorePassword));
+
+ restTemplate.setRequestFactory(factory);
+ return restTemplate;
+ }
+
+ private CloseableHttpClient httpClient(final Resource trustStore, final String trustStorePassword)
+ throws Exception {
+ LOGGER.info("Creating SSLConnectionSocketFactory with custom SSLContext and HostnameVerifier ... ");
+ return HttpClients.custom().setSSLSocketFactory(getSSLConnectionSocketFactory(trustStore, trustStorePassword))
+ .build();
+ }
+
+ private SSLConnectionSocketFactory getSSLConnectionSocketFactory(final Resource trustStore,
+ final String trustStorePassword) throws Exception {
+ return new SSLConnectionSocketFactory(getSslContext(trustStore, trustStorePassword));
+ }
+
+ private SSLContext getSslContext(final Resource trustStore, final String trustStorePassword)
+ throws Exception, Exception {
+ return new SSLContextBuilder().loadTrustMaterial(trustStore.getURL(), trustStorePassword.toCharArray()).build();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java
new file mode 100755
index 000000000..2a2d04d8c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java
@@ -0,0 +1,49 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configration;
+
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.configuration.SimulatorSecurityConfigurer;
+import org.onap.aaisimulator.model.UserCredentials;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Configuration
+@EnableWebSecurity
+public class WebSecurityConfigImpl extends SimulatorSecurityConfigurer {
+
+ @Autowired
+ public WebSecurityConfigImpl(final UserCredentials userCredentials) {
+ super(userCredentials.getUsers());
+ }
+
+ @Override
+ protected void configure(final HttpSecurity http) throws Exception {
+ http.csrf().disable().authorizeRequests().antMatchers(Constants.BUSINESS_URL + "/**/**").authenticated().and()
+ .httpBasic();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java
new file mode 100755
index 000000000..f2ce98ecc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java
@@ -0,0 +1,51 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.HEALTHY;
+import javax.ws.rs.core.MediaType;
+import org.onap.aaisimulator.utils.Constants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+import org.springframework.web.bind.annotation.ResponseStatus;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = Constants.BASE_URL)
+public class AaiSimulatorController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AaiSimulatorController.class);
+
+ @ResponseBody
+ @GetMapping(value = "/healthcheck", produces = MediaType.TEXT_PLAIN)
+ @ResponseStatus(code = HttpStatus.OK)
+ public String healthCheck() {
+ LOGGER.info("Running health check ...");
+ return HEALTHY;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java
new file mode 100755
index 000000000..8559e8aa7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java
@@ -0,0 +1,356 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_TYPE;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_URL;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_ID;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_SUBSCRIPTION;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = CUSTOMER_URL)
+public class BusinessController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(BusinessController.class);
+ private final CustomerCacheServiceProvider cacheServiceProvider;
+ private final NodesCacheServiceProvider nodesCacheServiceProvider;
+ private final GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @Autowired
+ public BusinessController(final CustomerCacheServiceProvider cacheServiceProvider,
+ final NodesCacheServiceProvider nodesCacheServiceProvider,
+ final GenericVnfCacheServiceProvider genericVnfCacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ this.nodesCacheServiceProvider = nodesCacheServiceProvider;
+ this.genericVnfCacheServiceProvider = genericVnfCacheServiceProvider;
+ }
+
+ @GetMapping(value = "{global-customer-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCustomer(@PathVariable("global-customer-id") final String globalCustomerId,
+ final HttpServletRequest request) {
+ LOGGER.info("Will retrieve customer for 'global customer id': {} ...", globalCustomerId);
+
+ final Optional<Customer> optional = cacheServiceProvider.getCustomer(globalCustomerId);
+ if (optional.isPresent()) {
+ final Customer customer = optional.get();
+ LOGGER.info("found customer {} in cache", customer);
+ return ResponseEntity.ok(customer);
+ }
+
+ LOGGER.error("Couldn't find {} in cache", globalCustomerId);
+ return getRequestErrorResponseEntity(request, CUSTOMER_TYPE);
+ }
+
+ @PutMapping(value = "/{global-customer-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putCustomer(@RequestBody final Customer customer,
+ @PathVariable("global-customer-id") final String globalCustomerId, final HttpServletRequest request) {
+ LOGGER.info("Will put customer for 'global customer id': {} ...", globalCustomerId);
+
+ if (customer.getResourceVersion() == null || customer.getResourceVersion().isEmpty()) {
+ customer.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putCustomer(globalCustomerId, customer);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCustomer(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType, final HttpServletRequest request) {
+ LOGGER.info("Will retrieve service subscription for 'global customer id': {} and 'service type': {} ...",
+ globalCustomerId, serviceType);
+
+ final Optional<ServiceSubscription> optional =
+ cacheServiceProvider.getServiceSubscription(globalCustomerId, serviceType);
+ if (optional.isPresent()) {
+ final ServiceSubscription serviceSubscription = optional.get();
+ LOGGER.info("found service subscription {} in cache", serviceSubscription);
+ return ResponseEntity.ok(serviceSubscription);
+ }
+
+ LOGGER.error("Couldn't find 'global customer id': {} and 'service type': {} in cache", globalCustomerId,
+ serviceType);
+ return getRequestErrorResponseEntity(request, SERVICE_SUBSCRIPTION);
+ }
+
+ @PutMapping(value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putServiceSubscription(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @RequestBody final ServiceSubscription serviceSubscription, final HttpServletRequest request) {
+ LOGGER.info("Will add service subscription for 'global customer id': {} and 'service type': {} ...",
+ globalCustomerId, serviceType);
+
+ if (cacheServiceProvider.putServiceSubscription(globalCustomerId, serviceType, serviceSubscription)) {
+ LOGGER.info("Successfully add service subscription in cache ...");
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Couldn't add service subscription using 'global customer id': {} and 'service type': {}",
+ globalCustomerId, serviceType);
+ return getRequestErrorResponseEntity(request, SERVICE_SUBSCRIPTION);
+ }
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getSericeInstances(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @RequestParam(name = "service-instance-name") final String serviceInstanceName,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve service instances for 'global customer id': {}, 'service type': {} and 'service instance name: '{} with depth: {}...",
+ globalCustomerId, serviceType, serviceInstanceName, depth);
+
+ final Optional<ServiceInstances> optional =
+ cacheServiceProvider.getServiceInstances(globalCustomerId, serviceType, serviceInstanceName);
+ if (optional.isPresent()) {
+ final ServiceInstances serviceInstances = optional.get();
+ LOGGER.info("found service instance {} in cache", serviceInstances);
+ return ResponseEntity.ok(serviceInstances);
+ }
+ LOGGER.error(
+ "Couldn't find 'global customer id': {}, 'service type': {} and 'service instance name': {} with depth: {} in cache",
+ globalCustomerId, serviceType, serviceInstanceName, depth);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve service instances for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with depth: {}, resultIndex:{}, resultSize: {} and format: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, depth, resultIndex, resultSize, format);
+
+ final Optional<ServiceInstance> optional =
+ cacheServiceProvider.getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (optional.isPresent()) {
+ final ServiceInstance serviceInstance = optional.get();
+ LOGGER.info("found service instance {} in cache", serviceInstance);
+ return ResponseEntity.ok(serviceInstance);
+ }
+ LOGGER.error(
+ "Couldn't find 'global customer id': {}, 'service type': {} and 'service instance id': {} with depth: {}, resultIndex:{}, resultSize: {} and format: {} in cache",
+ globalCustomerId, serviceType, serviceInstanceId, depth, resultIndex, resultSize, format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String invocationId,
+ @RequestBody final ServiceInstance serviceInstance, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will add service instance for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ globalCustomerId, serviceType, serviceInstanceId);
+
+ if (serviceInstance.getResourceVersion() == null || serviceInstance.getResourceVersion().isEmpty()) {
+ serviceInstance.setResourceVersion(getResourceVersion());
+ }
+
+ if (cacheServiceProvider.putServiceInstance(globalCustomerId, serviceType, serviceInstanceId,
+ serviceInstance)) {
+ nodesCacheServiceProvider.putNodeServiceInstance(serviceInstanceId, new NodeServiceInstance(
+ globalCustomerId, serviceType, serviceInstanceId, SERVICE_RESOURCE_TYPE, request.getRequestURI()));
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Couldn't add 'global customer id': {}, 'service type': {} and 'service instance id': {} to cache",
+ globalCustomerId, serviceType, serviceInstanceId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PostMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ @RequestBody final ServiceInstance serviceInstance, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will post service instance for 'global customer id': {}, 'service type': {}, 'service instance id: '{} and '{}': {}...",
+ globalCustomerId, serviceType, serviceInstanceId, X_HTTP_METHOD_OVERRIDE, xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ cacheServiceProvider.patchServiceInstance(globalCustomerId, serviceType, serviceInstanceId,
+ serviceInstance);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request);
+ }
+
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}/related-to/generic-vnfs",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getRelatedToGenericVnf(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "vnf-name", required = true) final String vnfName, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve generic vnf related to information for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with vnfname: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, vnfName);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.getRelationship(globalCustomerId, serviceType, serviceInstanceId, vnfName);
+
+ if (optional.isPresent()) {
+
+ final Relationship relationship = optional.get();
+ final Optional<RelationshipData> relationshipDataOptional = relationship.getRelationshipData().stream()
+ .filter(existing -> GENERIC_VNF_VNF_ID.equals(existing.getRelationshipKey())).findFirst();
+
+ if (relationshipDataOptional.isPresent()) {
+ final RelationshipData relationshipData = relationshipDataOptional.get();
+ final String vnfId = relationshipData.getRelationshipValue();
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(vnfId);
+ if (genericVnfOptional.isPresent()) {
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().add(genericVnfOptional.get());
+ LOGGER.info("found service instance {} in cache", relationship);
+ return ResponseEntity.ok(genericVnfs);
+ }
+ }
+ }
+ LOGGER.error(
+ "Couldn't find generic vnf related to information for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with vnfname: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, vnfName);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PutMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}"
+ + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putSericeInstanceRelationShip(
+ @PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will add {} relationship for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ relationship.getRelatedTo(), globalCustomerId, serviceType, serviceInstanceId);
+ final Optional<Relationship> optional = cacheServiceProvider.addRelationShip(globalCustomerId, serviceType,
+ serviceInstanceId, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error(
+ "Couldn't add {} relationship for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ relationship.getRelatedTo(), globalCustomerId, serviceType, serviceInstanceId);
+
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @DeleteMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will delete SericeInstance for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+
+ if (cacheServiceProvider.deleteSericeInstance(globalCustomerId, serviceType, serviceInstanceId,
+ resourceVersion)) {
+ LOGGER.info(
+ "Successfully deleted SericeInstance from cache for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error(
+ "Unable to delete SericeInstance from cache for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+
+ return getRequestErrorResponseEntity(request);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java
new file mode 100755
index 000000000..39bdb7d4b
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java
@@ -0,0 +1,396 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGIONS;
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO_LIST;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.service.providers.CloudRegionCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = CLOUD_REGIONS)
+public class CloudRegionsController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CloudRegionsController.class);
+
+ private final CloudRegionCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public CloudRegionsController(final CloudRegionCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putCloudRegion(@RequestBody final CloudRegion cloudRegion,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ if (key.isValid()) {
+ LOGGER.info("Will add CloudRegion to cache with key 'key': {} ....", key);
+ if (cloudRegion.getResourceVersion() == null || cloudRegion.getResourceVersion().isEmpty()) {
+ cloudRegion.setResourceVersion(getResourceVersion());
+ }
+ cacheServiceProvider.putCloudRegion(key, cloudRegion);
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Unable to add CloudRegion in cache because of invalid key {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCloudRegion(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving CloudRegion using key : {} with depth: {}...", key, depth);
+ if (key.isValid()) {
+ final Optional<CloudRegion> optional = cacheServiceProvider.getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ LOGGER.info("found CloudRegion {} in cache", cloudRegion);
+ return ResponseEntity.ok(cloudRegion);
+ }
+ }
+ LOGGER.error("Unable to find CloudRegion in cache using {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, @RequestBody final Relationship relationship,
+ final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(key, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, VSERVER);
+
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putTenant(@RequestBody final Tenant tenant,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ if (key.isValid()) {
+ LOGGER.info("Will add Tenant to cache with key 'key': {} ....", key);
+ if (tenant.getResourceVersion() == null || tenant.getResourceVersion().isEmpty()) {
+ tenant.setResourceVersion(getResourceVersion());
+ }
+ if (cacheServiceProvider.putTenant(key, tenantId, tenant)) {
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add Tenant in cache using key {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getTenant(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving Tenant using key : {} and tenant-id:{} ...", key, tenantId);
+ if (key.isValid()) {
+ final Optional<Tenant> optional = cacheServiceProvider.getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ LOGGER.info("found Tenant {} in cache", tenant);
+ return ResponseEntity.ok(tenant);
+ }
+ }
+ LOGGER.error("Unable to find Tenant in cache key : {} and tenant-id:{} ...", key, tenantId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will put RelationShip for key : {} and tenant-id:{} ...", key, tenantId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), key, tenantId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/esr-system-info-list/esr-system-info/{esr-system-info-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrSystemInfo(@RequestBody final EsrSystemInfo esrSystemInfo,
+ @PathVariable("esr-system-info-id") final String esrSystemInfoId,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ LOGGER.info("Will put esrSystemInfo for 'key': {} ...", key);
+
+ if (esrSystemInfo.getResourceVersion() == null || esrSystemInfo.getResourceVersion().isEmpty()) {
+ esrSystemInfo.setResourceVersion(getResourceVersion());
+
+ }
+
+ if (cacheServiceProvider.putEsrSystemInfo(key, esrSystemInfoId, esrSystemInfo)) {
+ LOGGER.info("Successfully added EsrSystemInfo key : {} ...", key, esrSystemInfo);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add EsrSystemInfo in cache for key : {} ...", key);
+
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO_LIST);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/esr-system-info-list",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrSystemInfoList(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving EsrSystemInfoList using key : {} ...", key);
+ if (key.isValid()) {
+ final Optional<EsrSystemInfoList> optional = cacheServiceProvider.getEsrSystemInfoList(key);
+ if (optional.isPresent()) {
+ final EsrSystemInfoList esrSystemInfoList = optional.get();
+ LOGGER.info("found EsrSystemInfoList {} in cache", esrSystemInfoList);
+ return ResponseEntity.ok(esrSystemInfoList);
+ }
+ }
+ LOGGER.error("Unable to find EsrSystemInfoList in cache using key : {} ...", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putVserver(@RequestBody final Vserver vServer,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ if (vServer.getResourceVersion() == null || vServer.getResourceVersion().isEmpty()) {
+ vServer.setResourceVersion(getResourceVersion());
+ }
+ LOGGER.info("Will put Vserver in cache using using key: {}, tenantId: {}, vServerId: {} ...", key, tenantId,
+ vServerId);
+
+ if (cacheServiceProvider.putVserver(key, tenantId, vServerId, vServer)) {
+
+ if (vServer.getRelationshipList() != null) {
+ for (final Relationship relationship : vServer.getRelationshipList().getRelationship()) {
+ if (relationship.getRelatedLink() != null) {
+ final String requestUri = request.getRequestURI();
+ final String targetBaseUrl =
+ HttpServiceUtils.getBaseUrl(request.getRequestURL(), requestUri).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addVServerRelationShip(incomingHeader,
+ targetBaseUrl, requestUri, key, tenantId, vServerId, relationship);
+ if (!result) {
+ LOGGER.error(
+ "Unable to add Vserver relationship in cache using key: {}, tenantId: {}, vServerId: {}",
+ key, tenantId, vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+ LOGGER.info("Successfully added relationship with {}", relationship.getRelatedLink());
+ }
+ }
+ }
+
+ LOGGER.info("Successfully added Vserver for key: {}, tenantId: {}, vServerId: {} ...", key, tenantId,
+ vServerId);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add Vserver in cache using key: {}, tenantId: {}, vServerId: {}", key, tenantId,
+ vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getVserver(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving Vserver using key: {}, tenant-id: {} and vserver-id: {}...", key, tenantId, vServerId);
+ final Optional<Vserver> optional = cacheServiceProvider.getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ LOGGER.info("found Vserver {} in cache", vServer);
+ return ResponseEntity.ok(vServer);
+ }
+ LOGGER.error("Unable to find Vserver in cache using key: {}, tenant-id: {} and vserver-id: {}...", key,
+ tenantId, vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+
+ @DeleteMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteVserver(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will delete Vserver using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {}...", key,
+ tenantId, vServerId, resourceVersion);
+
+
+ if (cacheServiceProvider.deleteVserver(key, tenantId, vServerId, resourceVersion)) {
+ LOGGER.info(
+ "Successfully delete Vserver from cache for key: {}, tenant-id: {}, vserver-id: {} and resource-version: {}",
+ key, tenantId, vServerId, resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error(
+ "Unable to delete Vserver from cache using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {} ...",
+ key, tenantId, vServerId, resourceVersion);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}"
+ + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putVserverRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addVServerRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), key, tenantId, vServerId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}"
+ + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putBiDirectionalVServerRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional = cacheServiceProvider.addvServerRelationShip(key, tenantId, vServerId,
+ relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java
new file mode 100755
index 000000000..00c296f49
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java
@@ -0,0 +1,175 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO;
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO_LIST;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM;
+import static org.onap.aaisimulator.utils.Constants.EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.List;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.EsrVnfmList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.service.providers.ExternalSystemCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL)
+public class ExternalSystemEsrController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ExternalSystemEsrController.class);
+
+ private final ExternalSystemCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public ExternalSystemEsrController(final ExternalSystemCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrVnfm(@RequestBody final EsrVnfm esrVnfm,
+ @PathVariable("vnfm-id") final String vnfmId, final HttpServletRequest request) {
+ LOGGER.info("Will put esr-vnfm to cache for 'vnfm id': {} ...", esrVnfm.getVnfmId());
+
+ if (esrVnfm.getResourceVersion() == null || esrVnfm.getResourceVersion().isEmpty()) {
+ esrVnfm.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putEsrVnfm(vnfmId, esrVnfm);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/esr-vnfm/{vnfm-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrVnfm(@PathVariable("vnfm-id") final String vnfmId,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+ LOGGER.info("Will retrieve ESR VNFM for 'vnfm id': {} with depth: {}...", vnfmId, depth);
+
+ final Optional<EsrVnfm> optional = cacheServiceProvider.getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ LOGGER.info("found esrVnfm {} in cache", esrVnfm);
+ return ResponseEntity.ok(esrVnfm);
+ }
+
+ LOGGER.error("Couldn't Esr Vnfm for 'vnfm id': {} with depth: {}...", vnfmId, depth);
+ return getRequestErrorResponseEntity(request, ESR_VNFM);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrVnfmList(final HttpServletRequest request) {
+ LOGGER.info("Will retrieve a list of all ESR VNFMs");
+
+ final List<EsrVnfm> esrVnfms = cacheServiceProvider.getAllEsrVnfm();
+ LOGGER.info("found {} Esr Vnfms in cache", esrVnfms.size());
+
+ final EsrVnfmList esrVnfmList = new EsrVnfmList();
+ esrVnfmList.getEsrVnfm().addAll(esrVnfms);
+
+ return ResponseEntity.ok(esrVnfmList);
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}/esr-system-info-list/esr-system-info/{esr-system-info-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrSystemInfo(@RequestBody final EsrSystemInfo esrSystemInfo,
+ @PathVariable("vnfm-id") final String vnfmId,
+ @PathVariable("esr-system-info-id") final String esrSystemInfoId, final HttpServletRequest request) {
+ LOGGER.info("Will put esrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId, esrSystemInfo);
+
+ if (esrSystemInfo.getResourceVersion() == null || esrSystemInfo.getResourceVersion().isEmpty()) {
+ esrSystemInfo.setResourceVersion(getResourceVersion());
+
+ }
+
+ if (cacheServiceProvider.putEsrSystemInfo(vnfmId, esrSystemInfoId, esrSystemInfo)) {
+ LOGGER.info("Successfully added EsrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId,
+ esrSystemInfo);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add esrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId,
+ esrSystemInfo);
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO_LIST);
+ }
+
+ @GetMapping(value = "/esr-vnfm/{vnfm-id}/esr-system-info-list",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrSystemInfoList(@PathVariable("vnfm-id") final String vnfmId,
+ final HttpServletRequest request) {
+ LOGGER.info("Will retrieve esrSystemInfoList for 'vnfm id': {} ...", vnfmId);
+
+ final Optional<EsrSystemInfoList> optional = cacheServiceProvider.getEsrSystemInfoList(vnfmId);
+ if (optional.isPresent()) {
+ final EsrSystemInfoList esrSystemInfoList = optional.get();
+ LOGGER.info("found esrSystemInfoList {} in cache", esrSystemInfoList);
+ return ResponseEntity.ok(esrSystemInfoList);
+ }
+
+ LOGGER.error("Couldn't find esrSystemInfoList for 'vnfm id': {} ...", vnfmId);
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO);
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrVnfmRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnfm-id") final String vnfmId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnfm-id': {} ...", vnfmId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), vnfmId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, ESR_VNFM);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java
new file mode 100755
index 000000000..2f922ea88
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java
@@ -0,0 +1,215 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.APPLICATION_MERGE_PATCH_JSON;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNFS_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.List;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = GENERIC_VNFS_URL)
+public class GenericVnfsController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(GenericVnfsController.class);
+
+ private final GenericVnfCacheServiceProvider cacheServiceProvider;
+
+
+ @Autowired
+ public GenericVnfsController(final GenericVnfCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putGenericVnf(@RequestBody final GenericVnf genericVnf,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will add GenericVnf to cache with 'vnf-id': {} ...", vnfId);
+
+ if (genericVnf.getResourceVersion() == null || genericVnf.getResourceVersion().isEmpty()) {
+ genericVnf.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putGenericVnf(vnfId, genericVnf);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/generic-vnf/{vnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnf(@PathVariable("vnf-id") final String vnfId,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info(
+ "Will get GenericVnf for 'vnf-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ vnfId, depth, resultIndex, resultSize, format);
+
+ final Optional<GenericVnf> optional = cacheServiceProvider.getGenericVnf(vnfId);
+
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ LOGGER.info("found GenericVnf {} in cache", genericVnf);
+ return ResponseEntity.ok(genericVnf);
+ }
+
+ LOGGER.error(
+ "Unable to find GenericVnf in cache for 'vnf-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format:{} ...",
+ vnfId, depth, resultIndex, resultSize, format);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putGenericVnfRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnf-id': {} ...", vnfId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), vnfId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putBiDirectionalRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnf-id': {} ...", vnfId);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(vnfId, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PostMapping(value = "/generic-vnf/{vnf-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, APPLICATION_MERGE_PATCH_JSON},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchGenericVnf(@RequestBody final GenericVnf genericVnf,
+ @PathVariable("vnf-id") final String vnfId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ final HttpServletRequest request) {
+
+ LOGGER.info("Will post GenericVnf to cache with 'vnf-id': {} and '{}': {} ...", vnfId, X_HTTP_METHOD_OVERRIDE,
+ xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ if (cacheServiceProvider.patchGenericVnf(vnfId, genericVnf)) {
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to apply patch to GenericVnf using 'vnf-id': {} ... ", vnfId);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnfs(@RequestParam(name = "selflink") final String selflink,
+ final HttpServletRequest request) {
+ LOGGER.info("will retrieve GenericVnfs using selflink: {}", selflink);
+
+ final List<GenericVnf> genericVnfList = cacheServiceProvider.getGenericVnfs(selflink);
+
+ if (genericVnfList.isEmpty()) {
+ LOGGER.error("No matching generic vnfs found using selflink: {}", selflink);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ LOGGER.info("found {} GenericVnfs in cache", genericVnfList.size());
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().addAll(genericVnfList);
+ return ResponseEntity.ok(genericVnfs);
+ }
+
+ @DeleteMapping(value = "/generic-vnf/{vnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteGenericVnf(@PathVariable("vnf-id") final String vnfId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+ LOGGER.info("Will delete GenericVnf for 'vnf-id': {} and 'resource-version': {}", vnfId, resourceVersion);
+
+ if (cacheServiceProvider.deleteGenericVnf(vnfId, resourceVersion)) {
+ LOGGER.info("Successfully delete GenericVnf from cache for 'vnf-id': {} and 'resource-version': {}", vnfId,
+ resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error("Unable to delete GenericVnf for 'vnf-id': {} and 'resource-version': {} ...", vnfId,
+ resourceVersion);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java
new file mode 100755
index 000000000..de1c5b995
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java
@@ -0,0 +1,138 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.LINES_OF_BUSINESS_URL;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = LINES_OF_BUSINESS_URL)
+public class LinesOfBusinessController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(LinesOfBusinessController.class);
+
+ private final LinesOfBusinessCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public LinesOfBusinessController(final LinesOfBusinessCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{line-of-business-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putLineOfBusiness(@RequestBody final LineOfBusiness lineOfBusiness,
+ @PathVariable("line-of-business-name") final String lineOfBusinessName, final HttpServletRequest request) {
+
+ LOGGER.info("Will add LineOfBusiness to cache with key 'line-of-business-name': {} ...",
+ lineOfBusiness.getLineOfBusinessName());
+
+ if (lineOfBusiness.getResourceVersion() == null || lineOfBusiness.getResourceVersion().isEmpty()) {
+ lineOfBusiness.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putLineOfBusiness(lineOfBusinessName, lineOfBusiness);
+ return ResponseEntity.accepted().build();
+ }
+
+
+ @GetMapping(value = "{line-of-business-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getLineOfBusiness(@PathVariable("line-of-business-name") final String lineOfBusinessName,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "retrieving Platform for 'platform-name': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ lineOfBusinessName, depth, resultIndex, resultSize, format);
+
+ final Optional<LineOfBusiness> optional = cacheServiceProvider.getLineOfBusiness(lineOfBusinessName);
+ if (optional.isPresent()) {
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final LineOfBusiness platform = optional.get();
+ LOGGER.info("found LineOfBusiness {} in cache", platform);
+ return ResponseEntity.ok(platform);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(LINE_OF_BUSINESS, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ }
+ LOGGER.error("Unable to find LineOfBusiness in cache using {}", lineOfBusinessName);
+ return getRequestErrorResponseEntity(request, LINE_OF_BUSINESS);
+ }
+
+ @PutMapping(value = "/{line-of-business-name}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("line-of-business-name") final String lineOfBusinessName,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(lineOfBusinessName, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'line-of-business-name': {} ...", relationship.getRelatedTo(),
+ lineOfBusinessName);
+
+ return getRequestErrorResponseEntity(request, LINE_OF_BUSINESS);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java
new file mode 100755
index 000000000..101f372c3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java
@@ -0,0 +1,115 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.NODES_URL;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_LINK;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = NODES_URL)
+public class NodesController {
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(NodesController.class);
+
+ private final NodesCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public NodesController(final NodesCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @GetMapping(value = "/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getProject(@PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving service instance using 'service-instance-id': {} and format: {}...", serviceInstanceId,
+ format);
+
+ final Optional<NodeServiceInstance> optional = cacheServiceProvider.getNodeServiceInstance(serviceInstanceId);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", serviceInstanceId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ final NodeServiceInstance nodeServiceInstance = optional.get();
+ switch (value) {
+ case PATHED:
+ LOGGER.info("found project {} in cache", nodeServiceInstance);
+ final Map<String, Object> map = new LinkedHashMap<>();
+ map.put(RESOURCE_TYPE, nodeServiceInstance.getResourceType());
+ map.put(RESOURCE_LINK, nodeServiceInstance.getResourceLink());
+ return ResponseEntity.ok(new Results(map));
+ case RAW:
+ final Optional<ServiceInstance> serviceInstance =
+ cacheServiceProvider.getServiceInstance(nodeServiceInstance);
+ if (serviceInstance.isPresent()) {
+ return ResponseEntity.ok(serviceInstance.get());
+ }
+ LOGGER.error("Unable to find Service instance in cahce using {}", nodeServiceInstance);
+ return getRequestErrorResponseEntity(request);
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @GetMapping(value = "/generic-vnfs", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnfs(@RequestParam(name = "vnf-name") final String vnfName,
+ final HttpServletRequest request) {
+ LOGGER.info("will find GenericVnfs for name: {}", vnfName);
+ final Optional<GenericVnfs> optional = cacheServiceProvider.getGenericVnfs(vnfName);
+ if (optional.isPresent()) {
+ LOGGER.info("found matching GenericVnfs for name: {}", vnfName);
+ return ResponseEntity.ok(optional.get());
+ }
+ LOGGER.error("Unable to find GenericVnfs in cahce using {}", vnfName);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java
new file mode 100755
index 000000000..0a08d648c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.OwnEntityCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = OWNING_ENTITY_URL)
+public class OwningEntityController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(OwningEntityController.class);
+
+ private final OwnEntityCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public OwningEntityController(final OwnEntityCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{owning-entity-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putOwningEntity(@RequestBody final OwningEntity owningEntity,
+ @PathVariable("owning-entity-id") final String owningEntityId, final HttpServletRequest request) {
+ LOGGER.info("Will add OwningEntity to cache with key 'owning-entity-id': {} ...",
+ owningEntity.getOwningEntityId());
+
+ if (owningEntity.getResourceVersion() == null || owningEntity.getResourceVersion().isEmpty()) {
+ owningEntity.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putOwningEntity(owningEntityId, owningEntity);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "{owning-entity-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getOwningEntity(@PathVariable("owning-entity-id") final String owningEntityId,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving owning entity for 'owning-entity-id': {} ...", owningEntityId);
+
+ final Optional<OwningEntity> optional = cacheServiceProvider.getOwningEntity(owningEntityId);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", owningEntityId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final OwningEntity owningEntity = optional.get();
+ LOGGER.info("found OwningEntity {} in cache", owningEntity);
+ return ResponseEntity.ok(owningEntity);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(OWNING_ENTITY, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(value = "/{owning-entity-id}/relationship-list/relationship",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putOwningEntityRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("owning-entity-id") final String owningEntityId, final HttpServletRequest request) {
+
+ LOGGER.info("adding relationship for owning-entity-id: {} ...", owningEntityId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), owningEntityId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java
new file mode 100755
index 000000000..5eef96077
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java
@@ -0,0 +1,134 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM;
+import static org.onap.aaisimulator.utils.Constants.PLATFORMS_URL;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = PLATFORMS_URL)
+public class PlatformController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(PlatformController.class);
+
+ private final PlatformCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public PlatformController(final PlatformCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{platform-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putPlatform(@RequestBody final Platform platform,
+ @PathVariable("platform-name") final String platformName, final HttpServletRequest request) {
+ LOGGER.info("Will add Platform to cache with key 'platform-name': {} ...", platform.getPlatformName());
+
+ if (platform.getResourceVersion() == null || platform.getResourceVersion().isEmpty()) {
+ platform.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putPlatform(platformName, platform);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/{platform-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPlatform(@PathVariable("platform-name") final String platformName,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "retrieving Platform for 'platform-name': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ platformName, depth, resultIndex, resultSize, format);
+ final Optional<Platform> optional = cacheServiceProvider.getPlatform(platformName);
+ if (optional.isPresent()) {
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final Platform platform = optional.get();
+ LOGGER.info("found Platform {} in cache", platform);
+ return ResponseEntity.ok(platform);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(PLATFORM, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+
+ }
+ LOGGER.error("Unable to find Platform in cahce using {}", platformName);
+ return getRequestErrorResponseEntity(request, PLATFORM);
+ }
+
+ @PutMapping(value = "/{platform-name}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("platform-name") final String platformName,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(platformName, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'platform-name': {} ...", relationship.getRelatedTo(),
+ platformName);
+
+ return getRequestErrorResponseEntity(request, PLATFORM);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java
new file mode 100755
index 000000000..6311af6e2
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java
@@ -0,0 +1,159 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aai.domain.yang.v15.Pnfs;
+import org.onap.aaisimulator.service.providers.PnfCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import java.util.List;
+import java.util.Optional;
+
+import static org.onap.aaisimulator.utils.Constants.APPLICATION_MERGE_PATCH_JSON;
+import static org.onap.aaisimulator.utils.Constants.PNF;
+import static org.onap.aaisimulator.utils.Constants.PNFS_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+@Controller
+@RequestMapping(path = PNFS_URL)
+public class PnfsController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PnfsController.class);
+
+ private final PnfCacheServiceProvider cacheServiceProvider;
+
+
+ @Autowired
+ public PnfsController(final PnfCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/pnf/{pnf-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putPnf(@RequestBody final Pnf pnf,
+ @PathVariable("pnf-id") final String pnfId, final HttpServletRequest request) {
+ LOGGER.info("Will add Pnf to cache with 'pnf-id': {} ...", pnfId);
+
+ if (pnf.getResourceVersion() == null || pnf.getResourceVersion().isEmpty()) {
+ pnf.setResourceVersion(getResourceVersion());
+ }
+ cacheServiceProvider.putPnf(pnfId, pnf);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/pnf/{pnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPnf(@PathVariable("pnf-id") final String pnfId, final HttpServletRequest request) {
+ LOGGER.info("Will get Pnf for 'pnf-id': {} ", pnfId);
+
+ final Optional<Pnf> optional = cacheServiceProvider.getPnf(pnfId);
+
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ LOGGER.info("found Pnf {} in cache", pnf);
+ return ResponseEntity.ok(pnf);
+ }
+
+ LOGGER.error("Unable to find Pnf in cache for 'pnf-id': {}", pnfId);
+ return getRequestErrorResponseEntity(request, "pnf");
+
+ }
+
+ @PostMapping(value = "/pnf/{pnf-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, APPLICATION_MERGE_PATCH_JSON},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchPnf(@RequestBody final Pnf pnf,
+ @PathVariable("pnf-id") final String pnfId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ final HttpServletRequest request) {
+
+ LOGGER.info("Will post Pnf to cache with 'pnf-id': {} and '{}': {} ...", pnfId, X_HTTP_METHOD_OVERRIDE,
+ xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ if (cacheServiceProvider.patchPnf(pnfId, pnf)) {
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to apply patch to Pnf using 'pnf-id': {} ... ", pnfId);
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPnfs(@RequestParam(name = "selflink") final String selflink,
+ final HttpServletRequest request) {
+ LOGGER.info("will retrieve Pnfs using selflink: {}", selflink);
+
+ final List<Pnf> pnfList = cacheServiceProvider.getPnfs(selflink);
+
+ if (pnfList.isEmpty()) {
+ LOGGER.error("No matching pnfs found using selflink: {}", selflink);
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+
+ LOGGER.info("found {} Pnfs in cache", pnfList.size());
+ final Pnfs pnfs = new Pnfs();
+ pnfs.getPnf().addAll(pnfList);
+ return ResponseEntity.ok(pnfs);
+ }
+
+ @DeleteMapping(value = "/pnf/{pnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deletePnf(@PathVariable("pnf-id") final String pnfId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+ LOGGER.info("Will delete Pnf for 'pnf-id': {} and 'resource-version': {}", pnfId, resourceVersion);
+
+ if (cacheServiceProvider.deletePnf(pnfId, resourceVersion)) {
+ LOGGER.info("Successfully delete Pnf from cache for 'pnf-id': {} and 'resource-version': {}", pnfId,
+ resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error("Unable to delete Pnf for 'pnf-id': {} and 'resource-version': {} ...", pnfId,
+ resourceVersion);
+ return getRequestErrorResponseEntity(request, PNF);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java
new file mode 100755
index 000000000..c901aa819
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.PROJECT;
+import static org.onap.aaisimulator.utils.Constants.PROJECT_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.ProjectCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = PROJECT_URL)
+public class ProjectController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProjectController.class);
+
+ private final ProjectCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public ProjectController(final ProjectCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/{project-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putProject(@RequestBody final Project project,
+ @PathVariable("project-name") final String projectName, final HttpServletRequest request) {
+ LOGGER.info("Will put project for 'project-name': {} ...", project.getProjectName());
+
+ if (project.getResourceVersion() == null || project.getResourceVersion().isEmpty()) {
+ project.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putProject(projectName, project);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/{project-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getProject(@PathVariable("project-name") final String projectName,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving project for 'project-name': {} ...", projectName);
+
+ final Optional<Project> optional = cacheServiceProvider.getProject(projectName);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", projectName);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final Project project = optional.get();
+ LOGGER.info("found project {} in cache", project);
+ return ResponseEntity.ok(project);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(PROJECT, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(value = "/{project-name}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putProjectRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("project-name") final String projectName, final HttpServletRequest request) {
+
+ LOGGER.info("adding relationship for project-name: {} ...", projectName);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), projectName, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java
new file mode 100644
index 000000000..7f12341e7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java
@@ -0,0 +1,74 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import static org.onap.aaisimulator.utils.Constants.SERVICE_DESIGN_AND_CREATION_URL;
+
+/**
+ * @author Eliezio Oliveira (eliezio.oliveira@est.tech)
+ */
+@RestController
+@RequestMapping(path = SERVICE_DESIGN_AND_CREATION_URL)
+public class ServiceDesignAndCreationController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ServiceDesignAndCreationController.class);
+
+ @Value("${SERVICE_DESIGN_AND_CREATION_RESPONSES_LOCATION:./}")
+ private String responsesLocation;
+
+ @GetMapping(path = "/models/model/{model-invariant-id}/model-vers",
+ produces = MediaType.APPLICATION_XML_VALUE)
+ public ResponseEntity<String> getModelVers(@PathVariable("model-invariant-id") String modelInvariantId) {
+ Path responsesPath = Paths.get(responsesLocation).toAbsolutePath();
+ LOGGER.info("Will get ModelVer for 'model-invariant-id': {}, looking under {}",
+ modelInvariantId, responsesPath.toString());
+
+ Path responsePath = responsesPath.resolve(modelInvariantId + ".xml");
+ if (!responsePath.toFile().exists()) {
+ LOGGER.error("{} not found", responsePath.toString());
+ return ResponseEntity.notFound().build();
+ }
+ try {
+ String content = new String(Files.readAllBytes(responsePath), StandardCharsets.UTF_8);
+ LOGGER.info("{} found with {} characters", responsePath.toString(), content.length());
+ return ResponseEntity.ok().body(content);
+ } catch (IOException e) {
+ LOGGER.error("Failed to read response from {}: {}}", responsePath.toString(), e.getMessage());
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
+ }
+ }
+} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java
index a9349174a..f587b9b81 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,14 +13,25 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
-package org.onap.pnfsimulator.simulator.validation;
+package org.onap.aaisimulator.exception;
-public class ValidationException extends Exception {
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class InvalidRestRequestException extends RuntimeException {
+ private static final long serialVersionUID = -1158414939006977465L;
- public ValidationException(String message) {
+ public InvalidRestRequestException(final String message) {
super(message);
}
+
+ public InvalidRestRequestException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java
index d3765a8c1..11218c8d7 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,14 +13,25 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.exception;
-package org.onap.pnfsimulator.simulator.validation;
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class RestProcessingException extends RuntimeException {
-public class NoRopFilesException extends Exception {
+ private static final long serialVersionUID = 16862313537198441L;
- public NoRopFilesException(String message) {
+ public RestProcessingException(final String message) {
super(message);
}
+
+ public RestProcessingException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java
new file mode 100755
index 000000000..b557434ac
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java
@@ -0,0 +1,85 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class CloudRegionKey implements Serializable {
+
+ private static final long serialVersionUID = 6175094050996035737L;
+
+ private final String cloudOwner;
+
+ private final String cloudRegionId;
+
+ public CloudRegionKey(final String cloudOwner, final String cloudRegionId) {
+ this.cloudOwner = cloudOwner;
+ this.cloudRegionId = cloudRegionId;
+ }
+
+ /**
+ * @return the cloudOwner
+ */
+ public String getCloudOwner() {
+ return cloudOwner;
+ }
+
+ /**
+ * @return the cloudRegionId
+ */
+ public String getCloudRegionId() {
+ return cloudRegionId;
+ }
+
+ public boolean isValid() {
+ return cloudOwner != null && !cloudOwner.isEmpty() && cloudRegionId != null && !cloudRegionId.isEmpty();
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (ObjectUtils.nullSafeHashCode(cloudOwner));
+ result = prime * result + (ObjectUtils.nullSafeHashCode(cloudRegionId));
+
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj instanceof CloudRegionKey) {
+ final CloudRegionKey other = (CloudRegionKey) obj;
+ return ObjectUtils.nullSafeEquals(cloudOwner, other.cloudOwner)
+ && ObjectUtils.nullSafeEquals(cloudRegionId, other.cloudRegionId);
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "CloudRegionKey [cloudOwner=" + cloudOwner + ", cloudRegionId=" + cloudRegionId + "]";
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/LoadModelResponse.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java
index a6e292f62..174e4166b 100644..100755
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/LoadModelResponse.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java
@@ -1,40 +1,49 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.models;
-package org.onap.netconfsimulator.netconfcore.model;
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public enum Format {
-public class LoadModelResponse {
+ COUNT("count"), RAW("raw"), PATHED("pathed");
- private Integer statusCode;
- private String message;
+ private final String value;
- public LoadModelResponse(Integer statusCode, String message) {
- this.statusCode = statusCode;
- this.message = message;
+ private Format(final String value) {
+ this.value = value;
}
- public Integer getStatusCode() {
- return this.statusCode;
+ public String getValue() {
+ return value;
}
- public String getMessage() {
- return this.message;
+ public static Format forValue(final String value) {
+ for (final Format format : Format.values()) {
+ if (format.getValue().equals(value)) {
+ return format;
+ }
+ }
+ return RAW;
}
+
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java
new file mode 100755
index 000000000..6b4762f9e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java
@@ -0,0 +1,139 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class NodeServiceInstance implements Serializable {
+
+ private static final long serialVersionUID = -3314166327618070948L;
+
+ private String globalCustomerId;
+ private String serviceType;
+ private String serviceInstanceId;
+ private String resourceType;
+ private String resourceLink;
+
+ public NodeServiceInstance() {}
+
+
+ public NodeServiceInstance(final String globalCustomerId, final String serviceType, final String serviceInstanceId,
+ final String resourceType, final String resourceLink) {
+ this.globalCustomerId = globalCustomerId;
+ this.serviceType = serviceType;
+ this.serviceInstanceId = serviceInstanceId;
+ this.resourceType = resourceType;
+ this.resourceLink = resourceLink;
+ }
+
+
+ /**
+ * @return the globalCustomerId
+ */
+ public String getGlobalCustomerId() {
+ return globalCustomerId;
+ }
+
+
+ /**
+ * @param globalCustomerId the globalCustomerId to set
+ */
+ public void setGlobalCustomerId(final String globalCustomerId) {
+ this.globalCustomerId = globalCustomerId;
+ }
+
+
+ /**
+ * @return the serviceType
+ */
+ public String getServiceType() {
+ return serviceType;
+ }
+
+
+ /**
+ * @param serviceType the serviceType to set
+ */
+ public void setServiceType(final String serviceType) {
+ this.serviceType = serviceType;
+ }
+
+
+ /**
+ * @return the serviceInstanceId
+ */
+ public String getServiceInstanceId() {
+ return serviceInstanceId;
+ }
+
+
+ /**
+ * @param serviceInstanceId the serviceInstanceId to set
+ */
+ public void setServiceInstanceId(final String serviceInstanceId) {
+ this.serviceInstanceId = serviceInstanceId;
+ }
+
+
+ /**
+ * @return the resourceType
+ */
+ public String getResourceType() {
+ return resourceType;
+ }
+
+
+ /**
+ * @param resourceType the resourceType to set
+ */
+ public void setResourceType(final String resourceType) {
+ this.resourceType = resourceType;
+ }
+
+
+ /**
+ * @return the resourceLink
+ */
+ public String getResourceLink() {
+ return resourceLink;
+ }
+
+
+ /**
+ * @param resourceLink the resourceLink to set
+ */
+ public void setResourceLink(final String resourceLink) {
+ this.resourceLink = resourceLink;
+ }
+
+
+ @Override
+ public String toString() {
+ return "NodeServiceInstance [globalCustomerId=" + globalCustomerId + ", serviceType=" + serviceType
+ + ", serviceInstanceId=" + serviceInstanceId + ", resourceType=" + resourceType + ", resourceLink="
+ + resourceLink + "]";
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java
new file mode 100755
index 000000000..8954327fd
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java
@@ -0,0 +1,67 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class Results implements Serializable {
+
+ private static final long serialVersionUID = 3967660859271162759L;
+
+ @JsonProperty("results")
+ private List<Map<String, Object>> values = new ArrayList<>();
+
+ public Results() {}
+
+ public Results(final Map<String, Object> value) {
+ this.values.add(value);
+ }
+
+ /**
+ * @return the values
+ */
+ public List<Map<String, Object>> getValues() {
+ return values;
+ }
+
+ /**
+ * @param values the values to set
+ */
+ public void setValues(final List<Map<String, Object>> values) {
+ this.values = values;
+ }
+
+
+ @JsonIgnore
+ @Override
+ public String toString() {
+ return "Result [values=" + values + "]";
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java
index e7d113dce..d20d41228 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,12 +13,16 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.simulator.client;
-
-public interface HttpClientAdapter {
-
- void send(String content);
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface Clearable {
+ void clearAll();
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java
new file mode 100755
index 000000000..3f440ec3c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java
@@ -0,0 +1,69 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface CloudRegionCacheServiceProvider extends Clearable {
+
+ void putCloudRegion(final CloudRegionKey cloudRegionKey, final CloudRegion cloudRegion);
+
+ Optional<CloudRegion> getCloudRegion(final CloudRegionKey cloudRegionKey);
+
+ Optional<Relationship> addRelationShip(final CloudRegionKey key, final Relationship relationship,
+ final String requestUri);
+
+ boolean putTenant(final CloudRegionKey key, final String tenantId, Tenant tenant);
+
+ Optional<Tenant> getTenant(final CloudRegionKey key, final String tenantId);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI,
+ final CloudRegionKey key, final String tenantId, final Relationship relationship);
+
+ Optional<EsrSystemInfoList> getEsrSystemInfoList(final CloudRegionKey key);
+
+ boolean putEsrSystemInfo(final CloudRegionKey key, final String esrSystemInfoId, final EsrSystemInfo esrSystemInfo);
+
+ boolean putVserver(final CloudRegionKey key, final String tenantId, final String vServerId, Vserver vServer);
+
+ Optional<Vserver> getVserver(final CloudRegionKey key, final String tenantId, final String vServerId);
+
+ boolean deleteVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final String resourceVersion);
+
+ Optional<Relationship> addvServerRelationShip(final CloudRegionKey key, final String tenantId,
+ final String vServerId, final Relationship relationship, final String requestUri);
+
+ boolean addVServerRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI, final CloudRegionKey key,
+ final String tenantId, final String vServerId, final Relationship relationship);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a26c0eb50
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java
@@ -0,0 +1,471 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.CLOUD_REGION_CACHE;
+import static org.onap.aaisimulator.utils.Constants.BELONGS_TO;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_CLOUD_OWNER;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_CLOUD_REGION_ID;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_OWNER_DEFINED_TYPE;
+import static org.onap.aaisimulator.utils.Constants.HOSTED_ON;
+import static org.onap.aaisimulator.utils.Constants.LOCATED_IN;
+import static org.onap.aaisimulator.utils.Constants.TENANT;
+import static org.onap.aaisimulator.utils.Constants.TENANT_TENANT_ID;
+import static org.onap.aaisimulator.utils.Constants.TENANT_TENANT_NAME;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.Constants.VSERVER_VSERVER_ID;
+import static org.onap.aaisimulator.utils.Constants.VSERVER_VSERVER_NAME;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Tenants;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aai.domain.yang.Vservers;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class CloudRegionCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements CloudRegionCacheServiceProvider {
+
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CloudRegionCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public CloudRegionCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putCloudRegion(final CloudRegionKey cloudRegionKey, final CloudRegion cloudRegion) {
+ LOGGER.info("Adding CloudRegion to cache with key: {} ...", cloudRegionKey);
+ final Cache cache = getCache(CLOUD_REGION_CACHE.getName());
+ cache.put(cloudRegionKey, cloudRegion);
+
+ }
+
+ @Override
+ public Optional<CloudRegion> getCloudRegion(final CloudRegionKey cloudRegionKey) {
+ LOGGER.info("getting CloudRegion from cache using key: {}", cloudRegionKey);
+ final Cache cache = getCache(CLOUD_REGION_CACHE.getName());
+ final CloudRegion value = cache.get(cloudRegionKey, CloudRegion.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find CloudRegion in cache using key:{} ", cloudRegionKey);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final CloudRegionKey key, final Relationship relationship,
+ final String requestUri) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ RelationshipList relationshipList = cloudRegion.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ cloudRegion.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to CloudRegion with key: {}", key);
+
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(CLOUD_REGION);
+ resultantRelationship.setRelationshipLabel(LOCATED_IN);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, cloudRegion.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, cloudRegion.getCloudRegionId()));
+
+ final List<RelatedToProperty> relatedToPropertyList = resultantRelationship.getRelatedToProperty();
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(CLOUD_REGION_OWNER_DEFINED_TYPE);
+ relatedToProperty.setPropertyValue(cloudRegion.getOwnerDefinedType());
+ relatedToPropertyList.add(relatedToProperty);
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find CloudRegion using key: {} ...", key);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putTenant(final CloudRegionKey key, final String tenantId, final Tenant tenant) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ Tenants tenants = cloudRegion.getTenants();
+ if (tenants == null) {
+ tenants = new Tenants();
+ cloudRegion.setTenants(tenants);
+ }
+
+ final Optional<Tenant> existingTenantOptional = tenants.getTenant().stream()
+ .filter(existing -> existing.getTenantId() != null && existing.getTenantId().equals(tenantId))
+ .findFirst();
+
+ if (!existingTenantOptional.isPresent()) {
+ return tenants.getTenant().add(tenant);
+ }
+
+ LOGGER.warn("Tenant already exists ...");
+ return false;
+ }
+ LOGGER.error("Unable to add Tenant using key: {} ...", key);
+ return false;
+ }
+
+ @Override
+ public Optional<Tenant> getTenant(final CloudRegionKey key, final String tenantId) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final Tenants tenants = cloudRegion.getTenants();
+ if (tenants != null) {
+ return tenants.getTenant().stream().filter(existing -> existing.getTenantId().equals(tenantId))
+ .findFirst();
+ }
+ }
+
+ LOGGER.error("Unable to find Tenant using key: {} and tenantId: {} ...", key, tenantId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final CloudRegionKey key, final String tenantId,
+ final Relationship relationship) {
+ try {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, key, tenant);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+ RelationshipList relationshipList = tenant.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ tenant.setRelationshipList(relationshipList);
+ }
+
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+
+
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for CloudRegion: {} and tenant: {}", key, tenantId,
+ exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for CloudRegion: {} and tenant: {}", key, tenantId);
+ return false;
+ }
+
+ @Override
+ public Optional<EsrSystemInfoList> getEsrSystemInfoList(final CloudRegionKey key) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final EsrSystemInfoList esrSystemInfoList = cloudRegion.getEsrSystemInfoList();
+ if (esrSystemInfoList != null) {
+ return Optional.of(esrSystemInfoList);
+ }
+ }
+ LOGGER.error("Unable to find EsrSystemInfoList in cache for CloudRegion: {} ", key);
+
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putEsrSystemInfo(final CloudRegionKey key, final String esrSystemInfoId,
+ final EsrSystemInfo esrSystemInfo) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final List<EsrSystemInfo> esrSystemInfoList = getEsrSystemInfoList(cloudRegion);
+
+ final Optional<EsrSystemInfo> existingEsrSystemInfo =
+ esrSystemInfoList.stream().filter(existing -> existing.getEsrSystemInfoId() != null
+ && existing.getEsrSystemInfoId().equals(esrSystemInfoId)).findFirst();
+ if (existingEsrSystemInfo.isPresent()) {
+ LOGGER.error("EsrSystemInfo already exists {}", existingEsrSystemInfo.get());
+ return false;
+ }
+
+ return esrSystemInfoList.add(esrSystemInfo);
+
+ }
+ return false;
+ }
+
+ @Override
+ public boolean putVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final Vserver vServer) {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ Vservers vServers = tenant.getVservers();
+ if (vServers == null) {
+ vServers = new Vservers();
+ tenant.setVservers(vServers);
+ }
+ final List<Vserver> vServerList = vServers.getVserver();
+
+ final Optional<Vserver> existingVserver = vServerList.stream()
+ .filter(existing -> existing.getVserverId() != null && existing.getVserverId().equals(vServerId))
+ .findFirst();
+
+ if (existingVserver.isPresent()) {
+ LOGGER.error("Vserver already exists {}", existingVserver.get());
+ return false;
+ }
+ return vServerList.add(vServer);
+
+ }
+ return false;
+ }
+
+ @Override
+ public Optional<Vserver> getVserver(final CloudRegionKey key, final String tenantId, final String vServerId) {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ final Vservers vServers = tenant.getVservers();
+ if (vServers != null) {
+ return vServers.getVserver().stream()
+ .filter(vServer -> vServer.getVserverId() != null && vServer.getVserverId().equals(vServerId))
+ .findFirst();
+ }
+ }
+ LOGGER.error("Unable to find vServer in cache ... ");
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean deleteVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final String resourceVersion) {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Optional<Tenant> tenantOptional = getTenant(key, tenantId);
+ if (tenantOptional.isPresent()) {
+ final Tenant tenant = tenantOptional.get();
+ final Vservers vServers = tenant.getVservers();
+ if (vServers != null) {
+ return vServers.getVserver().removeIf(vServer -> {
+ if (vServer.getVserverId() != null && vServer.getVserverId().equals(vServerId)
+ && vServer.getResourceVersion() != null
+ && vServer.getResourceVersion().equals(resourceVersion)) {
+ LOGGER.info("Will remove Vserver from cache with vServerId: {} and resource-version: {} ",
+ vServerId, vServer.getResourceVersion());
+ return true;
+ }
+ return false;
+ });
+ }
+
+ }
+
+ }
+ LOGGER.error(
+ "Unable to find Vserver for using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {} ...",
+ key, tenantId, vServerId, resourceVersion);
+
+ return false;
+ }
+
+ @Override
+ public Optional<Relationship> addvServerRelationShip(final CloudRegionKey key, final String tenantId,
+ final String vServerId, final Relationship relationship, final String requestUri) {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ RelationshipList relationshipList = vServer.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ vServer.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+ LOGGER.info("Successfully added relation to Vserver with key: {}, tenantId: {} and vServerId: {}", key,
+ tenantId, vServerId);
+ final String relatedLink = getBiDirectionalRelationShipListRelatedLink(requestUri);
+
+ final Relationship resultantRelationship = getVserverRelationship(key, tenantId, vServer, relatedLink);
+
+ return Optional.of(resultantRelationship);
+ }
+
+ LOGGER.error("Unable to find Vserver using key: {}, tenantId: {} and vServerId: {}...", key, tenantId,
+ vServerId);
+ return Optional.empty();
+ }
+
+ private Relationship getVserverRelationship(final CloudRegionKey key, final String tenantId, final Vserver vServer,
+ final String relatedLink) {
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(VSERVER);
+ resultantRelationship.setRelationshipLabel(HOSTED_ON);
+ resultantRelationship.setRelatedLink(relatedLink);
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, key.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, key.getCloudRegionId()));
+ relationshipDataList.add(getRelationshipData(TENANT_TENANT_ID, tenantId));
+ relationshipDataList.add(getRelationshipData(VSERVER_VSERVER_ID, vServer.getVserverId()));
+
+ final List<RelatedToProperty> relatedToPropertyList = resultantRelationship.getRelatedToProperty();
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(VSERVER_VSERVER_NAME);
+ relatedToProperty.setPropertyValue(vServer.getVserverName());
+ relatedToPropertyList.add(relatedToProperty);
+ return resultantRelationship;
+ }
+
+ @Override
+ public boolean addVServerRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final CloudRegionKey key, final String tenantId, final String vServerId,
+ final Relationship relationship) {
+ try {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getVserverRelationship(key, tenantId, vServer,
+ getRelationShipListRelatedLink(requestUriString));
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = vServer.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ vServer.setRelationshipList(relationshipList);
+ }
+
+ final Optional<Relationship> relationShipExists = relationshipList.getRelationship().stream()
+ .filter(relation -> relation.getRelatedTo().equals(resultantRelationship.getRelatedTo())
+ && relation.getRelatedLink().equals(resultantRelationship.getRelatedLink()))
+ .findAny();
+
+ if (relationShipExists.isPresent()) {
+ LOGGER.info("relationship {} already exists in cache ", resultantRelationship);
+ return true;
+ }
+
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return relationshipList.getRelationship().add(resultantRelationship);
+ }
+
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for key: {}, tenantId: {} and vServerId: {}", key,
+ tenantId, vServerId, exception);
+ }
+ LOGGER.error("Unable to add Vserver relationship for key: {}, tenantId: {} and vServerId: {}...", key, tenantId,
+ vServerId);
+ return false;
+ }
+
+ private List<EsrSystemInfo> getEsrSystemInfoList(final CloudRegion cloudRegion) {
+ EsrSystemInfoList esrSystemInfoList = cloudRegion.getEsrSystemInfoList();
+ if (esrSystemInfoList == null) {
+ esrSystemInfoList = new EsrSystemInfoList();
+ cloudRegion.setEsrSystemInfoList(esrSystemInfoList);
+ }
+ return esrSystemInfoList.getEsrSystemInfo();
+ }
+
+ private Relationship getRelationship(final String requestUriString, final CloudRegionKey cloudRegionKey,
+ final Tenant tenant) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(TENANT);
+ relationShip.setRelationshipLabel(BELONGS_TO);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, cloudRegionKey.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, cloudRegionKey.getCloudRegionId()));
+ relationshipDataList.add(getRelationshipData(TENANT_TENANT_ID, tenant.getTenantId()));
+
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(TENANT_TENANT_NAME);
+ relatedToProperty.setPropertyValue(tenant.getTenantName());
+ relationShip.getRelatedToProperty().add(relatedToProperty);
+ return relationShip;
+ }
+
+ private RelationshipData getRelationshipData(final String key, final String value) {
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(key);
+ relationshipData.setRelationshipValue(value);
+ return relationshipData;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(CLOUD_REGION_CACHE.getName());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java
new file mode 100755
index 000000000..ad225ff0b
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface CustomerCacheServiceProvider extends Clearable {
+
+ Optional<Customer> getCustomer(final String globalCustomerId);
+
+ void putCustomer(final String globalCustomerId, final Customer customer);
+
+ Optional<ServiceSubscription> getServiceSubscription(final String globalCustomerId, final String serviceType);
+
+ boolean putServiceSubscription(final String globalCustomerId, final String serviceType,
+ final ServiceSubscription serviceSubscription);
+
+ Optional<ServiceInstances> getServiceInstances(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceName);
+
+ Optional<ServiceInstance> getServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId);
+
+ boolean putServiceInstance(final String globalCustomerId, final String serviceType, final String serviceInstanceId,
+ final ServiceInstance serviceInstance);
+
+ boolean patchServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance);
+
+ Optional<Relationship> getRelationship(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String vnfName);
+
+ Optional<Relationship> addRelationShip(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final Relationship relationship, final String requestUri);
+
+ boolean deleteSericeInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String resourceVersion);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java
new file mode 100755
index 000000000..e755c44a5
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java
@@ -0,0 +1,365 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.CUSTOMER_CACHE;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_NAME;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_NAME;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aai.domain.yang.ServiceSubscriptions;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class CustomerCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements CustomerCacheServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CustomerCacheServiceProviderImpl.class);
+
+ @Autowired
+ public CustomerCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public Optional<Customer> getCustomer(final String globalCustomerId) {
+ LOGGER.info("getting customer from cache using key: {}", globalCustomerId);
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public void putCustomer(final String globalCustomerId, final Customer customer) {
+ LOGGER.info("Adding customer: {} with key: {} in cache ...", customer, globalCustomerId);
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+
+ cache.put(globalCustomerId, customer);
+ }
+
+ @Override
+ public Optional<ServiceSubscription> getServiceSubscription(final String globalCustomerId,
+ final String serviceType) {
+ LOGGER.info("getting service subscription from cache for globalCustomerId: {} and serviceType: {}",
+ globalCustomerId, serviceType);
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ return Optional.ofNullable(value.getServiceSubscriptions().getServiceSubscription().stream()
+ .filter(s -> serviceType.equals(s.getServiceType())).findFirst().orElse(null));
+ }
+ return Optional.empty();
+
+ }
+
+ @Override
+ public Optional<ServiceInstances> getServiceInstances(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceName) {
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+ final List<ServiceInstance> serviceInstancesList =
+ serviceInstances.getServiceInstance().stream()
+ .filter(serviceInstance -> serviceInstanceName
+ .equals(serviceInstance.getServiceInstanceName()))
+ .collect(Collectors.toList());
+ if (serviceInstancesList != null && !serviceInstancesList.isEmpty()) {
+ LOGGER.info("Found {} service instances ", serviceInstancesList.size());
+ final ServiceInstances result = new ServiceInstances();
+ result.getServiceInstance().addAll(serviceInstancesList);
+ return Optional.of(result);
+
+ }
+ }
+ }
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<ServiceInstance> getServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId) {
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+ return Optional.ofNullable(serviceInstances.getServiceInstance().stream()
+ .filter(serviceInstance -> serviceInstanceId.equals(serviceInstance.getServiceInstanceId()))
+ .findFirst().orElse(null));
+ }
+
+ }
+ }
+ LOGGER.error(
+ "Unable to find ServiceInstance using globalCustomerId: {}, serviceType: {} and serviceInstanceId: {} ...",
+ globalCustomerId, serviceType, serviceInstanceId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance) {
+ LOGGER.info("Adding serviceInstance: {} in cache ...", serviceInstance, globalCustomerId);
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ final ServiceInstances serviceInstances = getServiceInstances(serviceSubscription);
+
+
+ if (!serviceInstances.getServiceInstance().stream()
+ .filter(existing -> serviceInstanceId.equals(existing.getServiceInstanceId())).findFirst()
+ .isPresent()) {
+ return serviceInstances.getServiceInstance().add(serviceInstance);
+ }
+ LOGGER.error("Service {} already exists ....", serviceInstanceId);
+ return false;
+ }
+ LOGGER.error("Couldn't find service subscription with serviceType: {} in cache ", serviceType);
+ return false;
+ }
+ LOGGER.error("Couldn't find Customer with key: {} in cache ", globalCustomerId);
+ return false;
+ }
+
+ @Override
+ public boolean putServiceSubscription(final String globalCustomerId, final String serviceType,
+ final ServiceSubscription serviceSubscription) {
+
+ final Optional<Customer> customerOptional = getCustomer(globalCustomerId);
+
+ if (customerOptional.isPresent()) {
+ final Customer customer = customerOptional.get();
+ if (customer.getServiceSubscriptions() == null) {
+ final ServiceSubscriptions serviceSubscriptions = new ServiceSubscriptions();
+ customer.setServiceSubscriptions(serviceSubscriptions);
+ return serviceSubscriptions.getServiceSubscription().add(serviceSubscription);
+ }
+
+ final Optional<ServiceSubscription> serviceSubscriptionOptional = customer.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (!serviceSubscriptionOptional.isPresent()) {
+ return customer.getServiceSubscriptions().getServiceSubscription().add(serviceSubscription);
+ }
+ LOGGER.error("ServiceSubscription already exists {}", serviceSubscriptionOptional.get().getServiceType());
+ return false;
+ }
+ LOGGER.error("Unable to add ServiceSubscription to cache becuase customer does not exits ...");
+ return false;
+ }
+
+ @Override
+ public boolean patchServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance) {
+ final Optional<ServiceInstance> instance = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (instance.isPresent()) {
+ final ServiceInstance cachedServiceInstance = instance.get();
+ LOGGER.info("Changing OrchestrationStatus from {} to {} ", cachedServiceInstance.getOrchestrationStatus(),
+ serviceInstance.getOrchestrationStatus());
+ cachedServiceInstance.setOrchestrationStatus(serviceInstance.getOrchestrationStatus());
+ return true;
+ }
+ LOGGER.error("Unable to find ServiceInstance ...");
+ return false;
+ }
+
+ @Override
+ public boolean deleteSericeInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String resourceVersion) {
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+
+ serviceInstances.getServiceInstance().removeIf(serviceInstance -> {
+ final String existingServiceInstanceId = serviceInstance.getServiceInstanceId();
+ final String existingResourceVersion = serviceInstance.getResourceVersion();
+ if (existingServiceInstanceId != null && existingServiceInstanceId.equals(serviceInstanceId)
+ && existingResourceVersion != null && existingResourceVersion.equals(resourceVersion)) {
+ LOGGER.info("Removing ServiceInstance with serviceInstanceId: {} and resourceVersion: {}",
+ existingServiceInstanceId, existingResourceVersion);
+ return true;
+ }
+ return false;
+ });
+
+
+ return true;
+ }
+
+ }
+ }
+ return false;
+ }
+
+ private ServiceInstances getServiceInstances(final Optional<ServiceSubscription> optional) {
+ final ServiceSubscription serviceSubscription = optional.get();
+ final ServiceInstances serviceInstances = serviceSubscription.getServiceInstances();
+ if (serviceInstances == null) {
+ final ServiceInstances instances = new ServiceInstances();
+ serviceSubscription.setServiceInstances(instances);
+ return instances;
+ }
+ return serviceInstances;
+ }
+
+ @Override
+ public Optional<Relationship> getRelationship(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String vnfName) {
+ final Optional<ServiceInstance> optional = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+
+ if (optional.isPresent()) {
+ LOGGER.info("Found service instance ...");
+ final ServiceInstance serviceInstance = optional.get();
+ final RelationshipList relationshipList = serviceInstance.getRelationshipList();
+
+ if (relationshipList != null) {
+ final List<Relationship> relationship = relationshipList.getRelationship();
+ return relationship.stream().filter(
+ relationShip -> relationShip.getRelatedToProperty().stream().filter(relatedToProperty -> {
+ final String propertyKey = relatedToProperty.getPropertyKey();
+ final String propertyValue = relatedToProperty.getPropertyValue();
+ return GENERIC_VNF_VNF_NAME.equals(propertyKey) && propertyValue != null
+ && propertyValue.equals(vnfName);
+ }).findFirst().isPresent()).findFirst();
+ }
+ LOGGER.warn("Relationship list is nulll ...");
+ }
+ LOGGER.error("Unable to RelationShip with property value: {}... ", vnfName);
+
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final Relationship relationship, final String requestUri) {
+ final Optional<ServiceInstance> optional = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (optional.isPresent()) {
+ final ServiceInstance serviceInstance = optional.get();
+ RelationshipList relationshipList = serviceInstance.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ serviceInstance.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to ServiceInstance");
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(GENERIC_VNF);
+ resultantRelationship.setRelationshipLabel(relationship.getRelationshipLabel());
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CUSTOMER_GLOBAL_CUSTOMER_ID, globalCustomerId));
+ relationshipDataList.add(getRelationshipData(SERVICE_SUBSCRIPTION_SERVICE_TYPE, serviceType));
+ relationshipDataList.add(getRelationshipData(SERVICE_INSTANCE_SERVICE_INSTANCE_ID, serviceInstanceId));
+
+ final List<RelatedToProperty> relatedToProperty = resultantRelationship.getRelatedToProperty();
+ relatedToProperty.add(getRelatedToProperty(SERVICE_INSTANCE_SERVICE_INSTANCE_NAME,
+ serviceInstance.getServiceInstanceName()));
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find ServiceInstance ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(CUSTOMER_CACHE.getName());
+ }
+
+ private RelatedToProperty getRelatedToProperty(final String key, final String value) {
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(key);
+ relatedToProperty.setPropertyValue(value);
+ return relatedToProperty;
+ }
+
+ private RelationshipData getRelationshipData(final String key, final String value) {
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(key);
+ relationshipData.setRelationshipValue(value);
+ return relationshipData;
+ }
+
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java
new file mode 100755
index 000000000..108a8f287
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java
@@ -0,0 +1,48 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface ExternalSystemCacheServiceProvider extends Clearable {
+
+ void putEsrVnfm(final String vnfmId, final EsrVnfm esrVnfm);
+
+ Optional<EsrVnfm> getEsrVnfm(final String vnfmId);
+
+ List<EsrVnfm> getAllEsrVnfm();
+
+ Optional<EsrSystemInfoList> getEsrSystemInfoList(final String vnfmId);
+
+ boolean putEsrSystemInfo(final String vnfmId, final String esrSystemInfoId, final EsrSystemInfo esrSystemInfo);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI,
+ final String vnfmId, Relationship relationship);
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a274cc133
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java
@@ -0,0 +1,209 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.ESR_VNFM_CACHE;
+import static org.onap.aaisimulator.utils.Constants.DEPENDS_ON;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM_VNFM_ID;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class ExternalSystemCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements ExternalSystemCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ExternalSystemCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public ExternalSystemCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+
+ }
+
+ @Override
+ public void putEsrVnfm(final String vnfmId, final EsrVnfm esrVnfm) {
+ LOGGER.info("Adding esrVnfm: {} with name to cache", esrVnfm);
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ cache.put(vnfmId, esrVnfm);
+ }
+
+ @Override
+ public Optional<EsrVnfm> getEsrVnfm(final String vnfmId) {
+ LOGGER.info("getting EsrVnfm from cache using key: {}", vnfmId);
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ final EsrVnfm value = cache.get(vnfmId, EsrVnfm.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find EsrVnfm in cache using vnfmId: {} ", vnfmId);
+ return Optional.empty();
+ }
+
+ @Override
+ public List<EsrVnfm> getAllEsrVnfm() {
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<EsrVnfm> result = new ArrayList<>();
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<EsrVnfm> optional = getEsrVnfm(key.toString());
+ if (optional.isPresent()) {
+ result.add(optional.get());
+ }
+ });
+ return result;
+ }
+ }
+ LOGGER.error("Unable to get all esr vnfms ... ");
+ return Collections.emptyList();
+
+ }
+
+ @Override
+ public Optional<EsrSystemInfoList> getEsrSystemInfoList(final String vnfmId) {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ if (esrVnfm.getEsrSystemInfoList() != null) {
+ return Optional.of(esrVnfm.getEsrSystemInfoList());
+ }
+ LOGGER.error("EsrSystemInfoList is null for vnfmId: {} ", vnfmId);
+ }
+ LOGGER.error("Unable to find EsrVnfm in cache using vnfmId: {} ", vnfmId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putEsrSystemInfo(final String vnfmId, final String esrSystemInfoId,
+ final EsrSystemInfo esrSystemInfo) {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ final List<EsrSystemInfo> esrSystemInfoList = getEsrSystemInfoList(esrVnfm);
+
+ final Optional<EsrSystemInfo> existingEsrSystemInfo =
+ esrSystemInfoList.stream().filter(existing -> existing.getEsrSystemInfoId() != null
+ && existing.getEsrSystemInfoId().equals(esrSystemInfoId)).findFirst();
+ if (existingEsrSystemInfo.isPresent()) {
+ LOGGER.error("EsrSystemInfo already exists {}", existingEsrSystemInfo.get());
+ return false;
+ }
+
+ return esrSystemInfoList.add(esrSystemInfo);
+ }
+ LOGGER.error("Unable to add EsrSystemInfo in cache for vnfmId: {} ", vnfmId);
+ return false;
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String vnfmId, final Relationship relationship) {
+ try {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip =
+ getRelationship(getRelationShipListRelatedLink(requestUriString), esrVnfm);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = esrVnfm.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ esrVnfm.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for vnfmId: {}", vnfmId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for vnfmId: {}", vnfmId);
+ return false;
+ }
+
+ private Relationship getRelationship(final String relatedLink, final EsrVnfm esrVnfm) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(ESR_VNFM);
+ relationShip.setRelationshipLabel(DEPENDS_ON);
+ relationShip.setRelatedLink(relatedLink);
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(ESR_VNFM_VNFM_ID);
+ relationshipData.setRelationshipValue(esrVnfm.getVnfmId());
+ relationShip.getRelationshipData().add(relationshipData);
+
+ return relationShip;
+ }
+
+ private List<EsrSystemInfo> getEsrSystemInfoList(final EsrVnfm esrVnfm) {
+ EsrSystemInfoList esrSystemInfoList = esrVnfm.getEsrSystemInfoList();
+ if (esrSystemInfoList == null) {
+ esrSystemInfoList = new EsrSystemInfoList();
+ esrVnfm.setEsrSystemInfoList(esrSystemInfoList);
+ }
+ return esrSystemInfoList.getEsrSystemInfo();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(ESR_VNFM_CACHE.getName());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java
new file mode 100755
index 000000000..e2f05c699
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface GenericVnfCacheServiceProvider extends Clearable {
+
+ void putGenericVnf(final String vnfId, final GenericVnf genericVnf);
+
+ Optional<GenericVnf> getGenericVnf(final String vnfId);
+
+ Optional<Relationship> addRelationShip(final String vnfId, final Relationship relationship,
+ final String requestURI);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUriString,
+ final String vnfId, final Relationship relationship);
+
+ Optional<String> getGenericVnfId(final String vnfName);
+
+ boolean patchGenericVnf(final String vnfId, final GenericVnf genericVnf);
+
+ List<GenericVnf> getGenericVnfs(final String selflink);
+
+ boolean deleteGenericVnf(final String vnfId, final String resourceVersion);
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java
new file mode 100755
index 000000000..4fb6a0644
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java
@@ -0,0 +1,258 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.GENERIC_VNF_CACHE;
+import static org.onap.aaisimulator.utils.Constants.COMPOSED_OF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_ID;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_NAME;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.utils.ShallowBeanCopy;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class GenericVnfCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements GenericVnfCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(GenericVnfCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public GenericVnfCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putGenericVnf(final String vnfId, final GenericVnf genericVnf) {
+ LOGGER.info("Adding customer: {} with key: {} in cache ...", genericVnf, vnfId);
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ cache.put(vnfId, genericVnf);
+ }
+
+ @Override
+ public Optional<GenericVnf> getGenericVnf(final String vnfId) {
+ LOGGER.info("getting GenericVnf from cache using key: {}", vnfId);
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ final GenericVnf value = cache.get(vnfId, GenericVnf.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find GenericVnf ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<String> getGenericVnfId(final String vnfName) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ for (final Object key : concurrentHashMap.keySet()) {
+ final Optional<GenericVnf> optional = getGenericVnf(key.toString());
+ if (optional.isPresent()) {
+ final GenericVnf value = optional.get();
+ final String genericVnfName = value.getVnfName();
+ if (genericVnfName != null && genericVnfName.equals(vnfName)) {
+ final String genericVnfId = value.getVnfId();
+ LOGGER.info("Found matching vnf for name: {}, vnf-id: {}", genericVnfName, genericVnfId);
+ return Optional.of(genericVnfId);
+ }
+ }
+ }
+ }
+ }
+ LOGGER.error("No match found for vnf name: {}", vnfName);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String vnfId, final Relationship relationship) {
+ try {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip =
+ getRelationship(getRelationShipListRelatedLink(requestUriString), genericVnf, COMPOSED_OF);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = genericVnf.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ genericVnf.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for vnfId: {}", vnfId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for vnfId: {}", vnfId);
+ return false;
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String vnfId, final Relationship relationship,
+ final String requestURI) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ RelationshipList relationshipList = genericVnf.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ genericVnf.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+ LOGGER.info("Successfully added relation to GenericVnf for vnfId: {}", vnfId);
+
+ final String relatedLink = getBiDirectionalRelationShipListRelatedLink(requestURI);
+ final Relationship resultantRelationship =
+ getRelationship(relatedLink, genericVnf, relationship.getRelationshipLabel());
+ return Optional.of(resultantRelationship);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean patchGenericVnf(final String vnfId, final GenericVnf genericVnf) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf cachedGenericVnf = optional.get();
+ try {
+ ShallowBeanCopy.copy(genericVnf, cachedGenericVnf);
+ return true;
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to update GenericVnf for vnfId: {}", vnfId, exception);
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf ...");
+ return false;
+ }
+
+ @Override
+ public List<GenericVnf> getGenericVnfs(final String selflink) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<GenericVnf> result = new ArrayList<>();
+
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<GenericVnf> optional = getGenericVnf(key.toString());
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ final String genericVnfSelfLink = genericVnf.getSelflink();
+ final String genericVnfId = genericVnf.getSelflink();
+
+ if (genericVnfSelfLink != null && genericVnfSelfLink.equals(selflink)) {
+ LOGGER.info("Found matching vnf for selflink: {}, vnf-id: {}", genericVnfSelfLink,
+ genericVnfId);
+ result.add(genericVnf);
+ }
+ }
+ });
+ return result;
+ }
+ }
+ LOGGER.error("No match found for selflink: {}", selflink);
+ return Collections.emptyList();
+ }
+
+ @Override
+ public boolean deleteGenericVnf(final String vnfId, final String resourceVersion) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ if (genericVnf.getResourceVersion() != null && genericVnf.getResourceVersion().equals(resourceVersion)) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ LOGGER.info("Will evict GenericVnf from cache with vnfId: {}", genericVnf.getVnfId());
+ cache.evict(vnfId);
+ return true;
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf for vnfId: {} and resourceVersion: {} ...", vnfId, resourceVersion);
+ return false;
+ }
+
+ private Relationship getRelationship(final String relatedLink, final GenericVnf genericVnf,
+ final String relationshipLabel) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(GENERIC_VNF);
+ relationShip.setRelationshipLabel(relationshipLabel);
+ relationShip.setRelatedLink(relatedLink);
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(GENERIC_VNF_VNF_ID);
+ relationshipData.setRelationshipValue(genericVnf.getVnfId());
+ relationShip.getRelationshipData().add(relationshipData);
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(GENERIC_VNF_VNF_NAME);
+ relatedToProperty.setPropertyValue(genericVnf.getVnfName());
+ relationShip.getRelatedToProperty().add(relatedToProperty);
+ return relationShip;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(GENERIC_VNF_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/KeyValuePair.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java
index 5e44452d3..2075fa0c2 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/KeyValuePair.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,26 +13,24 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.template.search.viewmodel;
+import java.util.Optional;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-
-@Getter
-@ToString
-@NoArgsConstructor
-@AllArgsConstructor
/**
- * POJO for mongo structure after $objectToArray mapping where object consists of fields: k and v
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
*/
-public class KeyValuePair {
+public interface HttpRestServiceProvider {
- private String k;
- private String v;
+ <T> ResponseEntity<T> invokeHttpPut(final HttpEntity<Object> httpEntity, final String url, final Class<T> clazz);
+ <T> Optional<T> put(final HttpHeaders headers, final Object object, final String url, final Class<T> clazz);
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java
new file mode 100755
index 000000000..1989c643d
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java
@@ -0,0 +1,98 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aaisimulator.exception.InvalidRestRequestException;
+import org.onap.aaisimulator.exception.RestProcessingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.web.client.RestClientException;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class HttpRestServiceProviderImpl implements HttpRestServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(HttpRestServiceProviderImpl.class);
+
+ private final RestTemplate restTemplate;
+
+ @Autowired
+ public HttpRestServiceProviderImpl(final RestTemplate restTemplate) {
+ this.restTemplate = restTemplate;
+ }
+
+ @Override
+ public <T> ResponseEntity<T> invokeHttpPut(final HttpEntity<Object> httpEntity, final String url,
+ final Class<T> clazz) {
+
+ final HttpMethod httpMethod = HttpMethod.PUT;
+ LOGGER.trace("Will invoke HTTP {} using URL: {}", httpMethod, url);
+ try {
+ return restTemplate.exchange(url, httpMethod, httpEntity, clazz);
+
+ } catch (final HttpClientErrorException httpClientErrorException) {
+ final String message = "Unable to invoke HTTP " + httpMethod + " using url: " + url + ", Response: "
+ + httpClientErrorException.getRawStatusCode();
+ LOGGER.error(message, httpClientErrorException);
+ final int rawStatusCode = httpClientErrorException.getRawStatusCode();
+ if (rawStatusCode == HttpStatus.BAD_REQUEST.value() || rawStatusCode == HttpStatus.NOT_FOUND.value()) {
+ throw new InvalidRestRequestException("No result found for given url: " + url);
+ }
+ throw new RestProcessingException("Unable to invoke HTTP " + httpMethod + " using URL: " + url);
+
+ } catch (final RestClientException restClientException) {
+ LOGGER.error("Unable to invoke HTTP POST using url: {}", url, restClientException);
+ throw new RestProcessingException("Unable to invoke HTTP " + httpMethod + " using URL: " + url,
+ restClientException);
+ }
+ }
+
+ @Override
+ public <T> Optional<T> put(final HttpHeaders headers, final Object object, final String url, final Class<T> clazz) {
+ final HttpEntity<Object> httpEntity = new HttpEntity<Object>(object, headers);
+ final ResponseEntity<T> response = invokeHttpPut(httpEntity, url, clazz);
+
+ if (!response.getStatusCode().equals(HttpStatus.OK) && !response.getStatusCode().equals(HttpStatus.CREATED)
+ && !response.getStatusCode().equals(HttpStatus.ACCEPTED)) {
+ final String message = "Unable to invoke HTTP " + HttpMethod.PUT + " using URL: " + url
+ + ", Response Code: " + response.getStatusCode();
+ LOGGER.error(message);
+ return Optional.empty();
+ }
+
+ if (response.hasBody()) {
+ return Optional.of(response.getBody());
+ }
+ LOGGER.error("Received response without body status code: {}", response.getStatusCode());
+ return Optional.empty();
+ }
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SearchExp.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java
index 41d112fd9..391238556 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SearchExp.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,28 +13,28 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface LinesOfBusinessCacheServiceProvider extends Clearable {
-package org.onap.pnfsimulator.rest.model;
+ void putLineOfBusiness(final String lineOfBusinessName, final LineOfBusiness lineOfBusiness);
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.google.gson.JsonObject;
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-import org.onap.pnfsimulator.rest.util.JsonObjectDeserializer;
+ Optional<LineOfBusiness> getLineOfBusiness(final String lineOfBusinessName);
-import javax.validation.constraints.NotNull;
+ Optional<Relationship> addRelationShip(final String lineOfBusinessName, final Relationship relationship,
+ final String requestURI);
-@Getter
-@NoArgsConstructor
-@AllArgsConstructor
-@ToString
-public class SearchExp {
- @NotNull
- @JsonDeserialize(using = JsonObjectDeserializer.class)
- private JsonObject searchExpr;
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java
new file mode 100755
index 000000000..9c0d4a22f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java
@@ -0,0 +1,111 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.LINES_OF_BUSINESS_CACHE;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.Optional;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class LinesOfBusinessCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements LinesOfBusinessCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LinesOfBusinessCacheServiceProviderImpl.class);
+
+ @Autowired
+ public LinesOfBusinessCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public void putLineOfBusiness(final String lineOfBusinessName, final LineOfBusiness lineOfBusiness) {
+ LOGGER.info("Adding LineOfBusiness to cache with key: {} ...", lineOfBusinessName);
+ final Cache cache = getCache(LINES_OF_BUSINESS_CACHE.getName());
+ cache.put(lineOfBusinessName, lineOfBusiness);
+
+ }
+
+ @Override
+ public Optional<LineOfBusiness> getLineOfBusiness(final String lineOfBusinessName) {
+ LOGGER.info("getting LineOfBusiness from cache using key: {}", lineOfBusinessName);
+ final Cache cache = getCache(LINES_OF_BUSINESS_CACHE.getName());
+ final LineOfBusiness value = cache.get(lineOfBusinessName, LineOfBusiness.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find LineOfBusiness in cache using key:{} ", lineOfBusinessName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String lineOfBusinessName, final Relationship relationship,
+ final String requestUri) {
+ final Optional<LineOfBusiness> optional = getLineOfBusiness(lineOfBusinessName);
+ if (optional.isPresent()) {
+ final LineOfBusiness lineOfBusiness = optional.get();
+ RelationshipList relationshipList = lineOfBusiness.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ lineOfBusiness.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to LineOfBusiness with name: {}", lineOfBusinessName);
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(LINE_OF_BUSINESS);
+ resultantRelationship.setRelationshipLabel(USES);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME);
+ relationshipData.setRelationshipValue(lineOfBusiness.getLineOfBusinessName());
+ resultantRelationship.getRelationshipData().add(relationshipData);
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find LineOfBusiness using name: {} ...", lineOfBusinessName);
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(LINES_OF_BUSINESS_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java
new file mode 100755
index 000000000..113a44f48
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface NodesCacheServiceProvider extends Clearable {
+
+ void putNodeServiceInstance(final String serviceInstanceId, final NodeServiceInstance nodeServiceInstance);
+
+ Optional<NodeServiceInstance> getNodeServiceInstance(final String serviceInstanceId);
+
+ Optional<GenericVnfs> getGenericVnfs(final String vnfName);
+
+ Optional<ServiceInstance> getServiceInstance(final NodeServiceInstance nodeServiceInstance);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java
new file mode 100755
index 000000000..120236b15
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java
@@ -0,0 +1,104 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.NODES_CACHE;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class NodesCacheServiceProviderImpl extends AbstractCacheServiceProvider implements NodesCacheServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(NodesCacheServiceProviderImpl.class);
+ private final GenericVnfCacheServiceProvider cacheServiceProvider;
+ private final CustomerCacheServiceProvider customerCacheServiceProvider;
+
+
+ @Autowired
+ public NodesCacheServiceProviderImpl(final CacheManager cacheManager,
+ final GenericVnfCacheServiceProvider cacheServiceProvider,
+ final CustomerCacheServiceProvider customerCacheServiceProvider) {
+ super(cacheManager);
+ this.cacheServiceProvider = cacheServiceProvider;
+ this.customerCacheServiceProvider = customerCacheServiceProvider;
+ }
+
+ @Override
+ public void putNodeServiceInstance(final String serviceInstanceId, final NodeServiceInstance nodeServiceInstance) {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ LOGGER.info("Adding {} to cache with key: {}...", nodeServiceInstance, serviceInstanceId);
+ cache.put(serviceInstanceId, nodeServiceInstance);
+ }
+
+ @Override
+ public Optional<NodeServiceInstance> getNodeServiceInstance(final String serviceInstanceId) {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ final NodeServiceInstance value = cache.get(serviceInstanceId, NodeServiceInstance.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find node service instance in cache using key:{} ", serviceInstanceId);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<GenericVnfs> getGenericVnfs(final String vnfName) {
+ final Optional<String> genericVnfId = cacheServiceProvider.getGenericVnfId(vnfName);
+ if (genericVnfId.isPresent()) {
+ final Optional<GenericVnf> genericVnf = cacheServiceProvider.getGenericVnf(genericVnfId.get());
+ if (genericVnf.isPresent()) {
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().add(genericVnf.get());
+ return Optional.of(genericVnfs);
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf for name: {}", vnfName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<ServiceInstance> getServiceInstance(final NodeServiceInstance nodeServiceInstance) {
+ return customerCacheServiceProvider.getServiceInstance(nodeServiceInstance.getGlobalCustomerId(),
+ nodeServiceInstance.getServiceType(), nodeServiceInstance.getServiceInstanceId());
+ }
+
+ @Override
+ public void clearAll() {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ final ConcurrentHashMap<?, ?> nativeCache = (ConcurrentHashMap<?, ?>) cache.getNativeCache();
+ LOGGER.info("Clear all entries from cahce: {}", cache.getName());
+ nativeCache.clear();
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherConfig.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java
index 3535e3322..d7aee99f7 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherConfig.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,25 +13,28 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
-package org.onap.pnfsimulator.filesystem;
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface OwnEntityCacheServiceProvider extends Clearable {
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.core.task.TaskExecutor;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+ void putOwningEntity(final String owningEntityId, final OwningEntity owningEntity);
-@Configuration
-public class WatcherConfig {
+ Optional<OwningEntity> getOwningEntity(final String owningEntityId);
- @Bean
- public TaskExecutor watcherTaskExecutor() {
- ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
- executor.setThreadNamePrefix("pnfsimulator_fs_watcher");
- executor.initialize();
- return executor;
- }
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUriString,
+ final String owningEntityId, final Relationship relationship);
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a592f9b3f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java
@@ -0,0 +1,138 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.OWNING_ENTITY_CACHE;
+import static org.onap.aaisimulator.utils.Constants.BELONGS_TO;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY_OWNING_ENTITY_ID;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class OwnEntityCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements OwnEntityCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(OwnEntityCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+
+ @Autowired
+ public OwnEntityCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putOwningEntity(final String owningEntityId, final OwningEntity owningEntity) {
+ LOGGER.info("Adding OwningEntity: {} with name to cache", owningEntityId, owningEntity);
+ final Cache cache = getCache(OWNING_ENTITY_CACHE.getName());
+ cache.put(owningEntityId, owningEntity);
+ }
+
+ @Override
+ public Optional<OwningEntity> getOwningEntity(final String owningEntityId) {
+ LOGGER.info("getting OwningEntity from cache using key: {}", owningEntityId);
+ final Cache cache = getCache(OWNING_ENTITY_CACHE.getName());
+ final OwningEntity value = cache.get(owningEntityId, OwningEntity.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String owningEntityId, final Relationship relationship) {
+ try {
+ final Optional<OwningEntity> optional = getOwningEntity(owningEntityId);
+ if (optional.isPresent()) {
+ final OwningEntity owningEntity = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, owningEntity);
+
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = owningEntity.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ owningEntity.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for owning entity id: {}", owningEntityId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for owning entity id: {}", owningEntityId);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(OWNING_ENTITY_CACHE.getName());
+ }
+
+ private Relationship getRelationship(final String requestUriString, final OwningEntity owningEntity) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(OWNING_ENTITY);
+ relationShip.setRelationshipLabel(BELONGS_TO);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(OWNING_ENTITY_OWNING_ENTITY_ID);
+ relationshipData.setRelationshipValue(owningEntity.getOwningEntityId());
+
+ relationshipDataList.add(relationshipData);
+
+
+ return relationShip;
+ }
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/FlatTemplateContent.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java
index 84235f709..0ede3feb2 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/viewmodel/FlatTemplateContent.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,31 +13,27 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.template.search.viewmodel;
-
-
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-import org.onap.pnfsimulator.db.Row;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
-import java.util.List;
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface PlatformCacheServiceProvider extends Clearable {
-@Getter
-@NoArgsConstructor
-@ToString
-public class FlatTemplateContent extends Row {
+ void putPlatform(final String platformName, final Platform platform);
- private List<KeyValuePair> keyValues;
+ Optional<Platform> getPlatform(final String platformName);
+ Optional<Relationship> addRelationShip(final String platformName, final Relationship relationship,
+ final String requestUri);
- public FlatTemplateContent(String name, List<KeyValuePair> keyValues) {
- this.id = name;
- this.keyValues = keyValues;
- }
}
-
-
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java
new file mode 100755
index 000000000..639b883d6
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.PLATFORM_CACHE;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM_PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class PlatformCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements PlatformCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PlatformCacheServiceProviderImpl.class);
+
+ @Autowired
+ public PlatformCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public void putPlatform(final String platformName, final Platform platform) {
+ LOGGER.info("Adding Platform to cache with key: {} ...", platformName);
+ final Cache cache = getCache(PLATFORM_CACHE.getName());
+ cache.put(platformName, platform);
+ }
+
+ @Override
+ public Optional<Platform> getPlatform(final String platformName) {
+ LOGGER.info("getting Platform from cache using key: {}", platformName);
+ final Cache cache = getCache(PLATFORM_CACHE.getName());
+ final Platform value = cache.get(platformName, Platform.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find Platform in cache using key:{} ", platformName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String platformName, final Relationship relationship,
+ final String requestUri) {
+ final Optional<Platform> optional = getPlatform(platformName);
+ if (optional.isPresent()) {
+ final Platform platform = optional.get();
+ RelationshipList relationshipList = platform.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ platform.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully add relation to Platform with name: {}", platformName);
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(PLATFORM);
+ resultantRelationship.setRelationshipLabel(USES);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(PLATFORM_PLATFORM_NAME);
+ relationshipData.setRelationshipValue(platform.getPlatformName());
+ resultantRelationship.getRelationshipData().add(relationshipData);
+
+ return Optional.of(resultantRelationship);
+ }
+ LOGGER.error("Unable to find Platform ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(PLATFORM_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Storage.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java
index ad98ce0af..c27589e3c 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Storage.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2020 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,27 +13,33 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.db;
-
-import com.google.gson.JsonObject;
+import org.onap.aai.domain.yang.v15.Pnf;
import java.util.List;
import java.util.Optional;
-public interface Storage<T extends Row> {
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+public interface PnfCacheServiceProvider extends Clearable {
+
+ void putPnf(final String pnfId, final Pnf pnf);
+
+ Optional<Pnf> getPnf(final String pnfId);
- List<T> getAll();
+ Optional<String> getPnfId(final String pnfName);
- Optional<T> get(String rowId);
+ boolean patchPnf(final String pnfId, final Pnf pnf);
- void persist(T row);
+ List<Pnf> getPnfs(final String selflink);
- boolean tryPersistOrOverwrite(T row, boolean overwrite);
+ boolean deletePnf(final String pnfId, final String resourceVersion);
- void delete(String rowId);
- List<String> getIdsByContentCriteria(JsonObject queryJson);
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java
new file mode 100755
index 000000000..3f33883b1
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java
@@ -0,0 +1,154 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aaisimulator.utils.ShallowBeanCopy;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+
+import static org.onap.aaisimulator.utils.CacheName.PNF_CACHE;
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+@Service
+public class PnfCacheServiceProviderImpl extends AbstractCacheServiceProvider implements PnfCacheServiceProvider {
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PnfCacheServiceProvider.class);
+
+ private final Cache cache;
+
+ @Autowired
+ public PnfCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ cache = getCache(PNF_CACHE.getName());
+ }
+
+ @Override
+ public void putPnf(final String pnfId, final Pnf pnf) {
+ LOGGER.info("Adding pnf: {} with key: {} in cache ...", pnf, pnfId);
+ cache.put(pnfId, pnf);
+ }
+
+ @Override
+ public Optional<Pnf> getPnf(final String pnfId) {
+ LOGGER.info("getting Pnf from cache using key: {}", pnfId);
+ final Pnf value = cache.get(pnfId, Pnf.class);
+ return Optional.ofNullable(value);
+ }
+
+ @Override
+ public Optional<String> getPnfId(final String pnfName) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked") final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ for (final Object key : concurrentHashMap.keySet()) {
+ final Optional<Pnf> optional = getPnf(key.toString());
+ if (optional.isPresent()) {
+ final String cachedPnfName = optional.get().getPnfName();
+ if (cachedPnfName != null && cachedPnfName.equals(cachedPnfName)) {
+ final String pnfId = optional.get().getPnfId();
+ LOGGER.info("Found matching pnf for name: {}, pnf-id: {}", cachedPnfName, pnfId);
+ return Optional.of(pnfId);
+ }
+ }
+ }
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean patchPnf(final String pnfId, final Pnf pnf) {
+ final Optional<Pnf> optional = getPnf(pnfId);
+ if (optional.isPresent()) {
+ final Pnf cachedPnf = optional.get();
+ try {
+ ShallowBeanCopy.copy(pnf, cachedPnf);
+ return true;
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to update Pnf for pnfId: {}", pnfId, exception);
+ }
+ }
+ LOGGER.error("Unable to find Pnf for pnfID : {}", pnfId);
+ return false;
+ }
+
+ @Override
+ public List<Pnf> getPnfs(String selfLink) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked") final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<Pnf> result = new ArrayList<>();
+
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<Pnf> optional = getPnf(key.toString());
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ final String pnfSelfLink = pnf.getSelflink();
+ final String pnfId = pnf.getSelflink();
+
+ if (pnfSelfLink != null && pnfSelfLink.equals(selfLink)) {
+ LOGGER.info("Found matching pnf for selflink: {}, pnf-id: {}", pnfSelfLink,
+ pnfId);
+ result.add(pnf);
+ }
+ }
+ });
+ return result;
+ }
+ LOGGER.error("No match found for selflink: {}", selfLink);
+ return Collections.emptyList();
+ }
+
+ @Override
+ public boolean deletePnf(String pnfId, String resourceVersion) {
+ final Optional<Pnf> optional = getPnf(pnfId);
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ if (pnf.getResourceVersion() != null && pnf.getResourceVersion().equals(resourceVersion)) {
+ LOGGER.info("Will evict pnf from cache with pnfId: {}", pnf.getPnfId());
+ cache.evict(pnfId);
+ return true;
+ }
+ }
+ LOGGER.error("Unable to find Pnf for pnfId: {} and resourceVersion: {} ...", pnfId, resourceVersion);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(cache.getName());
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java
new file mode 100755
index 000000000..f766b1790
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java
@@ -0,0 +1,40 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface ProjectCacheServiceProvider extends Clearable {
+
+ void putProject(final String projectName, final Project project);
+
+ Optional<Project> getProject(final String projectName);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUri,
+ final String projectName, final Relationship relationship);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java
new file mode 100755
index 000000000..6b690aa5c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java
@@ -0,0 +1,140 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.PROJECT_CACHE;
+import static org.onap.aaisimulator.utils.Constants.PROJECT;
+import static org.onap.aaisimulator.utils.Constants.PROJECT_PROJECT_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class ProjectCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements ProjectCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProjectCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public ProjectCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putProject(final String projectName, final Project project) {
+ LOGGER.info("Adding project: {} with name to cache", project, projectName);
+ final Cache cache = getCache(PROJECT_CACHE.getName());
+ cache.put(projectName, project);
+ }
+
+
+ @Override
+ public Optional<Project> getProject(final String projectName) {
+ LOGGER.info("getting project from cache using key: {}", projectName);
+ final Cache cache = getCache(PROJECT_CACHE.getName());
+ final Project value = cache.get(projectName, Project.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String projectName, final Relationship relationship) {
+ try {
+ final Optional<Project> optional = getProject(projectName);
+
+ if (optional.isPresent()) {
+ final Project project = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, project);
+
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = project.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ project.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for project name: {}", projectName, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for project name: {}", projectName);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(PROJECT_CACHE.getName());
+ }
+
+ private Relationship getRelationship(final String requestUriString, final Project project) {
+
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(PROJECT);
+ relationShip.setRelationshipLabel(USES);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(PROJECT_PROJECT_NAME);
+ relationshipData.setRelationshipValue(project.getProjectName());
+
+ relationshipDataList.add(relationshipData);
+
+
+ return relationShip;
+ }
+
+}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/TemplateRequest.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java
index d5a77f055..63e7e2614 100644..100755
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/TemplateRequest.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java
@@ -1,8 +1,6 @@
/*-
* ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,28 +13,36 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.utils;
-package org.onap.pnfsimulator.rest.model;
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public enum CacheName {
-import lombok.Getter;
-import lombok.Setter;
-import lombok.ToString;
-import org.bson.Document;
+ CUSTOMER_CACHE("customer-cache"),
+ PROJECT_CACHE("project-cache"),
+ NODES_CACHE("nodes-cache"),
+ GENERIC_VNF_CACHE("generic-vnf-cache"),
+ PNF_CACHE("pnf-cache"),
+ OWNING_ENTITY_CACHE("owning-entity-cache"),
+ PLATFORM_CACHE("platform-cache"),
+ LINES_OF_BUSINESS_CACHE("lines-of-business-cache"),
+ CLOUD_REGION_CACHE("cloud-region-cache"),
+ ESR_VNFM_CACHE("esr-vnfm-cache");
-@Getter
-@Setter
-@ToString
-public class TemplateRequest {
private String name;
- private Document template;
- public TemplateRequest(String name, Document template) {
+ private CacheName(final String name) {
this.name = name;
- this.template = template;
}
- public TemplateRequest() {
+ public String getName() {
+ return name;
}
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java
new file mode 100755
index 000000000..24aaa0656
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java
@@ -0,0 +1,153 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class Constants {
+
+ public static final String BASE_URL = "/aai/v{version:\\d+}";
+
+ public static final String NODES_URL = BASE_URL + "/nodes";
+
+ public static final String BUSINESS_URL = BASE_URL + "/business";
+
+ public static final String CLOUD_INFRASTRUCTURE_URL = BASE_URL + "/cloud-infrastructure";
+
+ public static final String CLOUD_REGIONS = CLOUD_INFRASTRUCTURE_URL + "/cloud-regions/cloud-region/";
+
+ public static final String CUSTOMER_URL = BUSINESS_URL + "/customers/customer/";
+
+ public static final String PROJECT_URL = BUSINESS_URL + "/projects/project/";
+
+ public static final String OWNING_ENTITY_URL = BUSINESS_URL + "/owning-entities/owning-entity/";
+
+ public static final String PLATFORMS_URL = BUSINESS_URL + "/platforms/platform/";
+
+ public static final String EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL = BASE_URL + "/external-system/esr-vnfm-list";
+
+ public static final String NETWORK_URL = BASE_URL + "/network";
+
+ public static final String GENERIC_VNFS_URL = NETWORK_URL + "/generic-vnfs";
+
+ public static final String PNFS_URL = NETWORK_URL+ "/pnfs";
+
+ public static final String RELATIONSHIP_LIST_RELATIONSHIP_URL = "/relationship-list/relationship";
+
+ public static final String BI_DIRECTIONAL_RELATIONSHIP_LIST_URL =
+ RELATIONSHIP_LIST_RELATIONSHIP_URL + "/bi-directional";
+
+ public static final String LINES_OF_BUSINESS_URL = BUSINESS_URL + "/lines-of-business/line-of-business/";
+
+ public static final String SERVICE_DESIGN_AND_CREATION_URL = BASE_URL + "/service-design-and-creation";
+
+ public static final String HEALTHY = "healthy";
+
+ public static final String PROJECT = "project";
+
+ public static final String PROJECT_PROJECT_NAME = "project.project-name";
+
+ public static final String OWNING_ENTITY = "owning-entity";
+
+ public static final String OWNING_ENTITY_OWNING_ENTITY_ID = "owning-entity.owning-entity-id";
+
+ public static final String X_HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
+
+ public static final String APPLICATION_MERGE_PATCH_JSON = "application/merge-patch+json";
+
+ public static final String SERVICE_RESOURCE_TYPE = "service-instance";
+
+ public static final String RESOURCE_LINK = "resource-link";
+
+ public static final String RESOURCE_TYPE = "resource-type";
+
+ public static final String GENERIC_VNF_VNF_NAME = "generic-vnf.vnf-name";
+
+ public static final String GENERIC_VNF_VNF_ID = "generic-vnf.vnf-id";
+
+ public static final String SERVICE_INSTANCE_SERVICE_INSTANCE_ID = "service-instance.service-instance-id";
+
+ public static final String SERVICE_SUBSCRIPTION_SERVICE_TYPE = "service-subscription.service-type";
+
+ public static final String CUSTOMER_GLOBAL_CUSTOMER_ID = "customer.global-customer-id";
+
+ public static final String COMPOSED_OF = "org.onap.relationships.inventory.ComposedOf";
+
+ public static final String GENERIC_VNF = "generic-vnf";
+
+ public static final String PNF = "pnf";
+
+ public static final String PLATFORM = "platform";
+
+ public static final String USES = "org.onap.relationships.inventory.Uses";
+
+ public static final String PLATFORM_PLATFORM_NAME = "platform.platform-name";
+
+ public static final String LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME = "line-of-business.line-of-business-name";
+
+ public static final String LINE_OF_BUSINESS = "line-of-business";
+
+ public static final String SERVICE_SUBSCRIPTION = "service-subscription";
+
+ public static final String CUSTOMER_TYPE = "Customer";
+
+ public static final String SERVICE_INSTANCE_SERVICE_INSTANCE_NAME = "service-instance.service-instance-name";
+
+ public static final String CLOUD_REGION_OWNER_DEFINED_TYPE = "cloud-region.owner-defined-type";
+
+ public static final String CLOUD_REGION_CLOUD_REGION_ID = "cloud-region.cloud-region-id";
+
+ public static final String CLOUD_REGION_CLOUD_OWNER = "cloud-region.cloud-owner";
+
+ public static final String LOCATED_IN = "org.onap.relationships.inventory.LocatedIn";
+
+ public static final String CLOUD_REGION = "cloud-region";
+
+ public static final String TENANT_TENANT_NAME = "tenant.tenant-name";
+
+ public static final String TENANT_TENANT_ID = "tenant.tenant-id";
+
+ public static final String BELONGS_TO = "org.onap.relationships.inventory.BelongsTo";
+
+ public static final String TENANT = "tenant";
+
+ public static final String ESR_VNFM = "esr-vnfm";
+
+ public static final String ESR_SYSTEM_INFO = "esr-system-info";
+
+ public static final String ESR_SYSTEM_INFO_LIST = "esr-system-info-list";
+
+ public static final String ESR_VNFM_VNFM_ID = "esr-vnfm.vnfm-id";
+
+ public static final String DEPENDS_ON = "tosca.relationships.DependsOn";
+
+ public static final String VSERVER_VSERVER_NAME = "vserver.vserver-name";
+
+ public static final String VSERVER_VSERVER_ID = "vserver.vserver-id";
+
+ public static final String HOSTED_ON = "tosca.relationships.HostedOn";
+
+ public static final String VSERVER = "vserver";
+
+ private Constants() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java
new file mode 100755
index 000000000..0d83a50f9
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java
@@ -0,0 +1,107 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.springframework.http.MediaType.APPLICATION_XML;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import javax.servlet.http.HttpServletRequest;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.web.util.UriComponentsBuilder;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class HttpServiceUtils {
+
+ private static final String START_WITH_FORWARD_SLASH = "(^/.*?)";
+ private static final String ALPHANUMERIC = "((?:v+[a-z0-9]*)/)";
+ private static final String REGEX = START_WITH_FORWARD_SLASH + ALPHANUMERIC;
+ private static final Pattern PATTERN = Pattern.compile(REGEX, Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
+
+ private HttpServiceUtils() {}
+
+ public static URI getBaseUrl(final HttpServletRequest request) {
+ final StringBuffer url = request.getRequestURL();
+ final String uri = request.getRequestURI();
+ return UriComponentsBuilder.fromHttpUrl(url.substring(0, url.indexOf(uri))).path(getBaseUrl(uri)).build()
+ .toUri();
+ }
+
+ private static String getBaseUrl(final String uri) {
+ final Matcher matcher = PATTERN.matcher(uri);
+ if (matcher.find()) {
+ final StringBuilder builder = new StringBuilder();
+ for (int index = 0; index < matcher.groupCount() - 1; index++) {
+ builder.append(matcher.group(index));
+ }
+ return builder.toString();
+ }
+ return uri;
+ }
+
+ public static URI getBaseUrl(final StringBuffer requestUrl, final String requestUri) {
+ return UriComponentsBuilder.fromHttpUrl(requestUrl.substring(0, requestUrl.indexOf(requestUri))).build()
+ .toUri();
+ }
+
+ public static String getBaseServiceInstanceUrl(final HttpServletRequest request, final String relatedLink) {
+ return UriComponentsBuilder.fromUri(getBaseUrl(request)).path(relatedLink).toUriString();
+ }
+
+ public static HttpHeaders getHeaders(final HttpServletRequest request) {
+ return getHeaders(request, APPLICATION_XML);
+ }
+
+ public static HttpHeaders getHeaders(final HttpServletRequest request, final MediaType mediaType) {
+ final HttpHeaders headers = new HttpHeaders();
+ for (final Enumeration<String> enumeration = request.getHeaderNames(); enumeration.hasMoreElements();) {
+ final String headerName = enumeration.nextElement();
+ headers.add(headerName, request.getHeader(headerName));
+ }
+ headers.setContentType(mediaType);
+ headers.setAccept(Arrays.asList(MediaType.APPLICATION_XML));
+ return headers;
+ }
+
+ public static String getTargetUrl(final String targetBaseUrl, final String relatedLink) {
+ return UriComponentsBuilder.fromUriString(targetBaseUrl).path(relatedLink)
+ .path(BI_DIRECTIONAL_RELATIONSHIP_LIST_URL).toUriString();
+ }
+
+ public static String getRelationShipListRelatedLink(final String requestUriString) {
+ return requestUriString != null ? requestUriString.replaceFirst(RELATIONSHIP_LIST_RELATIONSHIP_URL, "")
+ : requestUriString;
+ }
+
+ public static String getBiDirectionalRelationShipListRelatedLink(final String requestUriString) {
+ return requestUriString != null ? requestUriString.replaceFirst(BI_DIRECTIONAL_RELATIONSHIP_LIST_URL, "")
+ : requestUriString;
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java
new file mode 100755
index 000000000..191e9afb3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@XmlRootElement(name = "requestError")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class RequestError {
+
+ @XmlElement(name = "serviceException")
+ private ServiceException serviceException;
+
+ /**
+ * @return the serviceException
+ */
+ public ServiceException getServiceException() {
+ return serviceException;
+ }
+
+ /**
+ * @param serviceException the serviceException to set
+ */
+ public void setServiceException(final ServiceException serviceException) {
+ this.serviceException = serviceException;
+ }
+
+ @Override
+ public String toString() {
+ return "RequestError [serviceException=" + serviceException + "]";
+ }
+
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java
new file mode 100755
index 000000000..a67cd9d92
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class RequestErrorBuilder {
+
+ private final ServiceException serviceException = new ServiceException();
+
+ public RequestErrorBuilder messageId(final String messageId) {
+ this.serviceException.setMessageId(messageId);
+ return this;
+ }
+
+ public RequestErrorBuilder text(final String text) {
+ this.serviceException.setText(text);
+ return this;
+ }
+
+ public RequestErrorBuilder variables(final List<String> variables) {
+ this.serviceException.setVariables(variables);
+ return this;
+ }
+
+ public RequestErrorBuilder variables(final String... variables) {
+ this.serviceException.setVariables(Arrays.asList(variables));
+ return this;
+ }
+
+ public RequestError build() {
+ final RequestError requestError = new RequestError();
+ requestError.setServiceException(serviceException);
+ return requestError;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java
new file mode 100755
index 000000000..bc302405c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java
@@ -0,0 +1,57 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import javax.servlet.http.HttpServletRequest;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class RequestErrorResponseUtils {
+
+ public static final String ERROR_MESSAGE_ID = "SVC3001";
+
+ public static final String ERROR_MESSAGE = "Resource not found for %1 using id %2 (msg=%3) (ec=%4)";
+
+ private static final String EMPTY_STRING = "";
+
+ public static final String getResourceVersion() {
+ return System.currentTimeMillis() + EMPTY_STRING;
+ }
+
+ public static ResponseEntity<?> getRequestErrorResponseEntity(final HttpServletRequest request,
+ final String nodeType) {
+ return new ResponseEntity<>(new RequestErrorBuilder().messageId(ERROR_MESSAGE_ID).text(ERROR_MESSAGE)
+ .variables(request.getMethod(), request.getRequestURI(),
+ "Node Not Found:No Node of " + nodeType + " found at: " + request.getRequestURI(),
+ "ERR.5.4.6114")
+ .build(), HttpStatus.NOT_FOUND);
+ }
+
+ public static ResponseEntity<?> getRequestErrorResponseEntity(final HttpServletRequest request) {
+ return getRequestErrorResponseEntity(request, Constants.SERVICE_RESOURCE_TYPE);
+ }
+
+ private RequestErrorResponseUtils() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java
new file mode 100755
index 000000000..c349b8e4a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java
@@ -0,0 +1,88 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.util.List;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@XmlRootElement(name = "serviceException")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ServiceException {
+
+ private String messageId;
+
+ private String text;
+
+ private List<String> variables;
+
+ /**
+ * @return the messageId
+ */
+ public String getMessageId() {
+ return messageId;
+ }
+
+ /**
+ * @param messageId the messageId to set
+ */
+ public void setMessageId(final String messageId) {
+ this.messageId = messageId;
+ }
+
+ /**
+ * @return the text
+ */
+ public String getText() {
+ return text;
+ }
+
+ /**
+ * @param text the text to set
+ */
+ public void setText(final String text) {
+ this.text = text;
+ }
+
+ /**
+ * @return the variables
+ */
+ public List<String> getVariables() {
+ return variables;
+ }
+
+ /**
+ * @param variables the variables to set
+ */
+ public void setVariables(final List<String> variables) {
+ this.variables = variables;
+ }
+
+ @Override
+ public String toString() {
+ return "ServiceException [messageId=" + messageId + ", text=" + text + ", variables=" + variables + "]";
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java
new file mode 100755
index 000000000..64c6efc5e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java
@@ -0,0 +1,89 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class ShallowBeanCopy {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ShallowBeanCopy.class);
+
+ private ShallowBeanCopy() {}
+
+ public static void copy(final Object from, final Object to) throws Exception {
+ final Map<String, Method> fromMethods = getMethods(from);
+ final Map<String, Method> toMethods = getMethods(to);
+
+ for (final Entry<String, Method> entry : fromMethods.entrySet()) {
+ final String methodName = entry.getKey();
+ final Method fromMethod = entry.getValue();
+
+ final Optional<Method> optional = getSetMethod(to, fromMethod);
+ if (optional.isPresent()) {
+ final Method toGetMethod = toMethods.get(methodName);
+ final Method toMethod = optional.get();
+ final Object toValue = fromMethod.invoke(from);
+
+ final Object fromValue = toGetMethod.invoke(to);
+ if (toValue != null && !toValue.equals(fromValue)) {
+ LOGGER.info("Changing {} value from: {} to: {}", methodName, fromValue, toValue);
+ toMethod.invoke(to, toValue);
+ }
+ }
+ }
+ }
+
+
+ private static Optional<Method> getSetMethod(final Object to, final Method fromMethod) {
+ final String name = fromMethod.getName().replaceFirst("get|is", "set");
+ final Class<?> returnType = fromMethod.getReturnType();
+ try {
+ return Optional.of(to.getClass().getMethod(name, returnType));
+ } catch (final NoSuchMethodException noSuchMethodException) {
+ }
+ return Optional.empty();
+ }
+
+ private static Map<String, Method> getMethods(final Object object) {
+ final Map<String, Method> methodsFound = new HashMap<>();
+ final Method[] methods = object.getClass().getMethods();
+
+ for (final Method method : methods) {
+ if (method.getName().startsWith("get") || method.getName().startsWith("is")) {
+ final String name = method.getName().replaceFirst("get|is", "");
+
+ methodsFound.put(name, method);
+ }
+ }
+
+ return methodsFound;
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml b/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml
new file mode 100755
index 000000000..a59f3ccde
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml
@@ -0,0 +1,25 @@
+server:
+ port: 9993
+ tomcat:
+ max-threads: 4
+ ssl:
+ key-store: classpath:keystore/keystore.p12
+ key-store-password: mYHC98!qX}7h?W}jRv}MIXTJ
+ keyStoreType: PKCS12
+ssl-enable: true
+spring:
+ security:
+ users:
+ - username: aai
+ #password: aai.onap.org:demo123456!
+ password: $2a$04$crRntT01fAF4kb48mxlvgu68/silcLg.czC1LxQsKTdWuDBPpO3YO
+ role: VID
+ - username: aai@aai.onap.org
+ #password: demo123456!
+ password: $2a$04$06VCpDvW5ztE7WOvhhvAtOx7JHLghECyZIzOShIbXLWpnshMva8T6
+ role: VID
+http:
+ client:
+ ssl:
+ trust-store: classpath:truststore/truststore.jks
+ trust-store-password: '*TQH?Lnszprs4LmlAj38yds('
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12 b/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12
new file mode 100644
index 000000000..6b4340d41
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12
Binary files differ
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks b/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks
new file mode 100644
index 000000000..d219aaf5a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks
Binary files differ
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java
new file mode 100755
index 000000000..af9b2367d
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.ResponseEntity;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@ActiveProfiles("test")
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
+@Configuration
+public class AaiSimulatorControllerTest {
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ private TestRestTemplate restTemplate;
+
+ @Test
+ public void test_healthCheck_matchContent() {
+ final String url = getBaseUrl() + "/healthcheck";
+ final ResponseEntity<String> object = restTemplate.getForEntity(url, String.class);
+
+ assertEquals(Constants.HEALTHY, object.getBody());
+ }
+
+ private String getBaseUrl() {
+ return "https://localhost:" + port + TestConstants.BASE_URL_V17;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java
new file mode 100755
index 000000000..4f2eab27e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import java.util.List;
+import org.junit.runner.RunWith;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.utils.TestRestTemplateService;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@ActiveProfiles("test")
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
+@Configuration
+public abstract class AbstractSpringBootTest {
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ protected TestRestTemplateService testRestTemplateService;
+
+ public String getUrl(final String... urls) {
+ return TestUtils.getUrl(port, urls);
+ }
+
+ public RelationshipData getRelationshipData(final List<RelationshipData> relationshipData, final String key) {
+ return relationshipData.stream().filter(data -> data.getRelationshipKey().equals(key)).findFirst().orElse(null);
+ }
+
+ public RelatedToProperty getRelatedToProperty(final List<RelatedToProperty> relatedToPropertyList,
+ final String key) {
+ return relatedToPropertyList.stream().filter(data -> data.getPropertyKey().equals(key)).findFirst()
+ .orElse(null);
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java
new file mode 100755
index 000000000..70f5d21bc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java
@@ -0,0 +1,397 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.RELATED_TO_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCES_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import static org.onap.aaisimulator.utils.TestUtils.getCustomer;
+import static org.onap.aaisimulator.utils.TestUtils.getServiceInstance;
+import java.io.IOException;
+import java.util.Optional;
+import java.util.UUID;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.utils.RequestError;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.onap.aaisimulator.utils.ServiceException;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class BusinessControllerTest extends AbstractSpringBootTest {
+
+ private static final String FIREWALL_SERVICE_TYPE = "Firewall";
+
+ private static final String ORCHESTRATION_STATUS = "Active";
+
+ @Autowired
+ private CustomerCacheServiceProvider cacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putCustomer_successfullyAddedToCache() throws Exception {
+ invokeCustomerEndPointAndAssertResponse();
+ assertTrue(cacheServiceProvider.getCustomer(GLOBAL_CUSTOMER_ID).isPresent());
+ }
+
+ @Test
+ public void test_getCustomer_ableToRetrieveCustomer() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL);
+
+ final ResponseEntity<Void> response = testRestTemplateService.invokeHttpPut(url, getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final ResponseEntity<Customer> actual = testRestTemplateService.invokeHttpGet(url, Customer.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final Customer actualCustomer = actual.getBody();
+ assertEquals(GLOBAL_CUSTOMER_ID, actualCustomer.getGlobalCustomerId());
+ assertNotNull(actualCustomer.getResourceVersion());
+ assertFalse(actualCustomer.getResourceVersion().isEmpty());
+ }
+
+ @Test
+ public void test_getCustomer_returnRequestError_ifCustomerNotInCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL);
+
+ final ResponseEntity<RequestError> actual = testRestTemplateService.invokeHttpGet(url, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_getServiceSubscription_ableToRetrieveServiceSubscriptionFromCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final ResponseEntity<ServiceSubscription> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceSubscription.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceSubscription actualServiceSubscription = actual.getBody();
+ assertEquals(SERVICE_TYPE, actualServiceSubscription.getServiceType());
+ assertNotNull(actualServiceSubscription.getRelationshipList());
+ assertFalse(actualServiceSubscription.getRelationshipList().getRelationship().isEmpty());
+ }
+
+ @Test
+ public void test_putSericeInstance_ableToRetrieveServiceInstanceFromCache() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final Optional<ServiceInstance> actual =
+ cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+
+ assertTrue(actual.isPresent());
+ final ServiceInstance actualServiceInstance = actual.get();
+
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceName_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?depth=2&service-instance-name=" + SERVICE_NAME;
+
+ final ResponseEntity<ServiceInstances> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, ServiceInstances.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstances actualServiceInstances = actual.getBody();
+ assertFalse(actualServiceInstances.getServiceInstance().isEmpty());
+
+ assertEquals(SERVICE_NAME, actualServiceInstances.getServiceInstance().get(0).getServiceInstanceName());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceName_returnRequestErrorIfnoServiceInstanceFound()
+ throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?depth=2&service-instance-name=" + SERVICE_NAME;
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ assertNotNull(actual.getBody().getServiceException());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceId_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final ResponseEntity<ServiceInstance> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usinginvalidServiceInstanceId_shouldReturnError() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String invalidServiceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL,
+ SERVICE_INSTANCES_URL + "/service-instance/" + UUID.randomUUID());
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(invalidServiceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingInvalidServiceInstanceName_shouldReturnError() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?service-instance-name=Dummy&depth=2";
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_PathSericeInstance_usingServiceInstanceId_OrchStatusChangedInCache() throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final HttpHeaders httpHeaders = testRestTemplateService.getHttpHeaders();
+ httpHeaders.add(X_HTTP_METHOD_OVERRIDE, HttpMethod.PATCH.toString());
+
+ final ResponseEntity<Void> orchStatuUpdateServiceInstanceResponse = testRestTemplateService
+ .invokeHttpPost(httpHeaders, url, TestUtils.getOrchStatuUpdateServiceInstance(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, orchStatuUpdateServiceInstanceResponse.getStatusCode());
+
+ final ResponseEntity<ServiceInstance> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+ assertEquals(ORCHESTRATION_STATUS, actualServiceInstance.getOrchestrationStatus());
+
+ }
+
+ @Test
+ public void test_putServiceSubscription_successfullyAddedToCache() throws Exception {
+ final String serviceSubscriptionurl =
+ getUrl(CUSTOMERS_URL, "/service-subscriptions/service-subscription/", FIREWALL_SERVICE_TYPE);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final ResponseEntity<Void> responseEntity = testRestTemplateService.invokeHttpPut(serviceSubscriptionurl,
+ TestUtils.getServiceSubscription(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final ResponseEntity<ServiceSubscription> actual =
+ testRestTemplateService.invokeHttpGet(serviceSubscriptionurl, ServiceSubscription.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceSubscription actualServiceSubscription = actual.getBody();
+ assertEquals(FIREWALL_SERVICE_TYPE, actualServiceSubscription.getServiceType());
+
+ }
+
+ @Test
+ public void test_putSericeInstanceRelatedTo_ableToRetrieveServiceInstanceFromCache() throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final String relationShipUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity2 = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getRelationShipJsonObject(), Relationship.class);
+
+ assertEquals(HttpStatus.ACCEPTED, responseEntity2.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnfs> actual = testRestTemplateService
+ .invokeHttpGet(url + RELATED_TO_URL + "?vnf-name=" + GENERIC_VNF_NAME, GenericVnfs.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+
+ assertTrue(actual.hasBody());
+ final GenericVnfs genericVnfs = actual.getBody();
+ assertFalse(genericVnfs.getGenericVnf().isEmpty());
+ final GenericVnf genericVnf = genericVnfs.getGenericVnf().get(0);
+ assertEquals(GENERIC_VNF_NAME, genericVnf.getVnfName());
+ }
+
+ @Test
+ public void test_DeleteSericeInstance_ServiceInstanceRemovedFromCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final Optional<ServiceInstance> optional =
+ cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+ final ServiceInstance serviceInstance = optional.get();
+
+ final ResponseEntity<Void> responseEntity = testRestTemplateService
+ .invokeHttpDelete(url + "?resource-version=" + serviceInstance.getResourceVersion(), Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID)
+ .isPresent());
+ }
+
+ private void invokeServiceInstanceEndPointAndAssertResponse() throws IOException {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+ private void invokeCustomerEndPointAndAssertResponse() throws Exception, IOException {
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), getCustomer(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java
new file mode 100755
index 000000000..29d03ce06
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java
@@ -0,0 +1,420 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_OWNER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_REGION_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_PASSWORD;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SERVICE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_TYEP;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_USERNAME;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VENDOR;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SYSTEM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.TENANTS_TENANT;
+import static org.onap.aaisimulator.utils.TestConstants.TENANT_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_URL;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.service.providers.CloudRegionCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class CloudRegionsControllerTest extends AbstractSpringBootTest {
+
+ private static final CloudRegionKey CLOUD_REGION_KEY = new CloudRegionKey(CLOUD_OWNER_NAME, CLOUD_REGION_NAME);
+
+ @Autowired
+ private CloudRegionCacheServiceProvider cloudRegionCacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ cloudRegionCacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putCloudRegion_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final ResponseEntity<CloudRegion> response = testRestTemplateService.invokeHttpGet(url, CloudRegion.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final CloudRegion cloudRegion = response.getBody();
+ assertEquals(CLOUD_OWNER_NAME, cloudRegion.getCloudOwner());
+ assertEquals(CLOUD_REGION_NAME, cloudRegion.getCloudRegionId());
+
+ assertNotNull("ResourceVersion should not be null", cloudRegion.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getCloudRegionWithDepthValue_shouldReturnMatchedCloudRegion() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final ResponseEntity<CloudRegion> response =
+ testRestTemplateService.invokeHttpGet(url + "?depth=2", CloudRegion.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final CloudRegion cloudRegion = response.getBody();
+ assertEquals(CLOUD_OWNER_NAME, cloudRegion.getCloudOwner());
+ assertEquals(CLOUD_REGION_NAME, cloudRegion.getCloudRegionId());
+
+ assertNotNull("ResourceVersion should not be null", cloudRegion.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final String relationShipUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<CloudRegion> optional = cloudRegionCacheServiceProvider.getCloudRegion(CLOUD_REGION_KEY);
+ assertTrue(optional.isPresent());
+
+ final CloudRegion actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+ @Test
+ public void test_putTenant_successfullyAddedToCache() throws Exception {
+ final String cloudRegionUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(cloudRegionUrl);
+
+ final String tenantUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME + TENANTS_TENANT + TENANT_ID);
+ addTenantAndAssertResponse(tenantUrl);
+
+ final ResponseEntity<Tenant> response = testRestTemplateService.invokeHttpGet(tenantUrl, Tenant.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Tenant tenant = response.getBody();
+ assertEquals(TENANT_ID, tenant.getTenantId());
+ assertEquals("admin", tenant.getTenantName());
+
+ assertNotNull("ResourceVersion should not be null", tenant.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putTenantRelationToGenericVnf_successfullyAddedToCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String cloudRegionUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(cloudRegionUrl);
+
+ final String tenantUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME,
+ TENANTS_TENANT + TENANT_ID);
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String tenantRelationShipUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME, TENANTS_TENANT + TENANT_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+
+ final ResponseEntity<Void> tenantRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(tenantRelationShipUrl, TestUtils.getGenericVnfRelatedLink(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, tenantRelationShipResponse.getStatusCode());
+
+ final Optional<Tenant> optional = cloudRegionCacheServiceProvider.getTenant(CLOUD_REGION_KEY, TENANT_ID);
+
+ assertTrue(optional.isPresent());
+ final Tenant actualTenant = optional.get();
+ final RelationshipList relationshipList = actualTenant.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.BELONGS_TO, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = relationship.getRelationshipData();
+
+ final RelationshipData relationshipData =
+ getRelationshipData(relationshipDataList, Constants.GENERIC_VNF_VNF_ID);
+ assertNotNull(relationshipData);
+ assertEquals(VNF_ID, relationshipData.getRelationshipValue());
+
+ final List<RelatedToProperty> relatedToPropertyList = relationship.getRelatedToProperty();
+
+ final RelatedToProperty property = getRelatedToProperty(relatedToPropertyList, Constants.GENERIC_VNF_VNF_NAME);
+ assertNotNull(property);
+ assertEquals(GENERIC_VNF_NAME, property.getPropertyValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipListGenericVnf = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipListGenericVnf);
+ assertFalse(relationshipListGenericVnf.getRelationship().isEmpty());
+
+ final Relationship relationshipGenericVnf = relationshipListGenericVnf.getRelationship().get(0);
+
+ assertEquals(Constants.BELONGS_TO, relationshipGenericVnf.getRelationshipLabel());
+ assertFalse(relationshipGenericVnf.getRelationshipData().isEmpty());
+ assertEquals(3, relationshipGenericVnf.getRelationshipData().size());
+
+ }
+
+ @Test
+ public void test_putEsrSystemInfo_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final String esrSystemInfoListUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME, ESR_SYSTEM_INFO_LIST_URL);
+
+ final String esrSystemInfoUrl = esrSystemInfoListUrl + "/esr-system-info/" + ESR_SYSTEM_INFO_ID;
+ final ResponseEntity<Void> esrSystemInfoResponse =
+ testRestTemplateService.invokeHttpPut(esrSystemInfoUrl, TestUtils.getEsrSystemInfo(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrSystemInfoResponse.getStatusCode());
+
+ final ResponseEntity<EsrSystemInfoList> response =
+ testRestTemplateService.invokeHttpGet(esrSystemInfoListUrl, EsrSystemInfoList.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final EsrSystemInfoList actualEsrSystemInfoList = response.getBody();
+
+ final List<EsrSystemInfo> esrSystemInfoList = actualEsrSystemInfoList.getEsrSystemInfo();
+ assertNotNull(esrSystemInfoList);
+ assertEquals(1, esrSystemInfoList.size());
+
+ final EsrSystemInfo esrSystemInfo = esrSystemInfoList.get(0);
+ assertEquals(ESR_SYSTEM_INFO_ID, esrSystemInfo.getEsrSystemInfoId());
+ assertEquals(SYSTEM_NAME, esrSystemInfo.getSystemName());
+ assertEquals(ESR_TYEP, esrSystemInfo.getType());
+ assertEquals(ESR_VENDOR, esrSystemInfo.getVendor());
+ assertEquals(ESR_SERVICE_URL, esrSystemInfo.getServiceUrl());
+ assertEquals(ESR_USERNAME, esrSystemInfo.getUserName());
+ assertEquals(ESR_PASSWORD, esrSystemInfo.getPassword());
+ assertEquals(ESR_SYSTEM_TYPE, esrSystemInfo.getSystemType());
+ }
+
+ @Test
+ public void test_putVServer_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+ addCustomerServiceAndGenericVnf();
+
+ final String tenantUrl = url + TENANTS_TENANT + TENANT_ID;
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String vServerUrl = tenantUrl + VSERVER_URL + VSERVER_ID;
+
+ final ResponseEntity<Void> vServerResponse =
+ testRestTemplateService.invokeHttpPut(vServerUrl, TestUtils.getVserver(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, vServerResponse.getStatusCode());
+
+ final ResponseEntity<Vserver> response = testRestTemplateService.invokeHttpGet(vServerUrl, Vserver.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final Vserver actualVserver = response.getBody();
+ assertEquals(VSERVER_NAME, actualVserver.getVserverName());
+ assertEquals(VSERVER_ID, actualVserver.getVserverId());
+ assertEquals("active", actualVserver.getProvStatus());
+ assertNotNull(actualVserver.getRelationshipList());
+ assertFalse(actualVserver.getRelationshipList().getRelationship().isEmpty());
+
+ final Optional<GenericVnf> optional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(optional.isPresent());
+ final GenericVnf genericVnf = optional.get();
+ assertNotNull(genericVnf.getRelationshipList());
+ assertFalse(genericVnf.getRelationshipList().getRelationship().isEmpty());
+
+ final Relationship expectedRelationShip = genericVnf.getRelationshipList().getRelationship().get(0);
+ assertEquals(VSERVER, expectedRelationShip.getRelatedTo());
+ assertNotNull(expectedRelationShip.getRelationshipData());
+ assertEquals(4, expectedRelationShip.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = expectedRelationShip.getRelationshipData();
+ final RelationshipData vServerrelationshipData =
+ getRelationshipData(relationshipDataList, Constants.VSERVER_VSERVER_ID);
+ assertNotNull(vServerrelationshipData);
+ assertEquals(VSERVER_ID, vServerrelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudOwnerRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_OWNER);
+ assertNotNull(cloudOwnerRelationshipData);
+ assertEquals(CLOUD_OWNER_NAME, cloudOwnerRelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudRegionIdRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_REGION_ID);
+ assertNotNull(cloudRegionIdRelationshipData);
+ assertEquals(CLOUD_REGION_NAME, cloudRegionIdRelationshipData.getRelationshipValue());
+
+ final RelationshipData tenantRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.TENANT_TENANT_ID);
+ assertNotNull(tenantRelationshipData);
+ assertEquals(TENANT_ID, tenantRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_deleteVServer_successfullyRemoveFromCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+ addCustomerServiceAndGenericVnf();
+
+ final String tenantUrl = url + TENANTS_TENANT + TENANT_ID;
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String vServerAddUrl = tenantUrl + VSERVER_URL + VSERVER_ID;
+
+ final ResponseEntity<Void> vServerAddResponse =
+ testRestTemplateService.invokeHttpPut(vServerAddUrl, TestUtils.getVserver(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, vServerAddResponse.getStatusCode());
+
+ final Optional<Vserver> optional =
+ cloudRegionCacheServiceProvider.getVserver(CLOUD_REGION_KEY, TENANT_ID, VSERVER_ID);
+ assertTrue(optional.isPresent());
+ final Vserver vserver = optional.get();
+
+ final String vServerRemoveUrl = vServerAddUrl + "?resource-version=" + vserver.getResourceVersion();
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpDelete(vServerRemoveUrl, Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(cloudRegionCacheServiceProvider.getVserver(CLOUD_REGION_KEY, TENANT_ID, VSERVER_ID).isPresent());
+
+
+ }
+
+ private void addTenantAndAssertResponse(final String tenantUrl) throws IOException {
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(tenantUrl, TestUtils.getTenant(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+
+ private void invokeCloudRegionHttpPutEndPointAndAssertResponse(final String url) throws IOException {
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getCloudRegion(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java
new file mode 100755
index 000000000..fb406ab48
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java
@@ -0,0 +1,233 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_PASSWORD;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SERVICE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_TYEP;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_USERNAME;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VENDOR;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VIM_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VNFM_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VNFM_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SYSTEM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.EsrVnfmList;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.service.providers.ExternalSystemCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class ExternalSystemEsrControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private ExternalSystemCacheServiceProvider externalSystemCacheServiceProvider;
+
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ externalSystemCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putEsrVnfm_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+
+ final ResponseEntity<EsrVnfm> response = testRestTemplateService.invokeHttpGet(esrVnfmUrl, EsrVnfm.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final EsrVnfm actualEsrVnfm = response.getBody();
+ assertEquals(ESR_VNFM_ID, actualEsrVnfm.getVnfmId());
+ assertEquals(ESR_VIM_ID, actualEsrVnfm.getVimId());
+
+ }
+
+ @Test
+ public void test_getEsrVnfmList_getAllEsrVnfmsFromCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+
+ final String esrVnfmListUrl = getUrl(TestConstants.EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL);
+ final ResponseEntity<EsrVnfmList> response =
+ testRestTemplateService.invokeHttpGet(esrVnfmListUrl, EsrVnfmList.class);
+
+ assertTrue(response.hasBody());
+
+ final EsrVnfmList actualEsrVnfmList = response.getBody();
+
+ final List<EsrVnfm> esrVnfmList = actualEsrVnfmList.getEsrVnfm();
+ assertNotNull(esrVnfmList);
+ assertEquals(1, esrVnfmList.size());
+ final EsrVnfm actualEsrVnfm = esrVnfmList.get(0);
+ assertEquals(ESR_VNFM_ID, actualEsrVnfm.getVnfmId());
+ assertEquals(ESR_VIM_ID, actualEsrVnfm.getVimId());
+
+ }
+
+ @Test
+ public void test_putEsrSystemInfo_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+ final String esrSystemInfoListUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID, ESR_SYSTEM_INFO_LIST_URL);
+
+ final String esrSystemInfoUrl = esrSystemInfoListUrl + "/esr-system-info/" + ESR_SYSTEM_INFO_ID;
+ final ResponseEntity<Void> esrSystemInfoResponse =
+ testRestTemplateService.invokeHttpPut(esrSystemInfoUrl, TestUtils.getEsrSystemInfo(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrSystemInfoResponse.getStatusCode());
+
+ final ResponseEntity<EsrSystemInfoList> response =
+ testRestTemplateService.invokeHttpGet(esrSystemInfoListUrl, EsrSystemInfoList.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final EsrSystemInfoList actualEsrSystemInfoList = response.getBody();
+
+ final List<EsrSystemInfo> esrSystemInfoList = actualEsrSystemInfoList.getEsrSystemInfo();
+ assertNotNull(esrSystemInfoList);
+ assertEquals(1, esrSystemInfoList.size());
+
+ final EsrSystemInfo esrSystemInfo = esrSystemInfoList.get(0);
+ assertEquals(ESR_SYSTEM_INFO_ID, esrSystemInfo.getEsrSystemInfoId());
+ assertEquals(SYSTEM_NAME, esrSystemInfo.getSystemName());
+ assertEquals(ESR_TYEP, esrSystemInfo.getType());
+ assertEquals(ESR_VENDOR, esrSystemInfo.getVendor());
+ assertEquals(ESR_SERVICE_URL, esrSystemInfo.getServiceUrl());
+ assertEquals(ESR_USERNAME, esrSystemInfo.getUserName());
+ assertEquals(ESR_PASSWORD, esrSystemInfo.getPassword());
+ assertEquals(ESR_SYSTEM_TYPE, esrSystemInfo.getSystemType());
+
+
+ }
+
+ @Test
+ public void test_putEsrRelationToGenericVnfm_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+ addCustomerServiceAndGenericVnf();
+
+ final String relationShipUrl = esrVnfmUrl + RELATIONSHIP_LIST_RELATIONSHIP_URL;
+
+ final ResponseEntity<Void> response = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelatedLink(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final Optional<EsrVnfm> optional = externalSystemCacheServiceProvider.getEsrVnfm(ESR_VNFM_ID);
+ assertTrue(optional.isPresent());
+
+ final EsrVnfm actualEsrVnfm = optional.get();
+ final RelationshipList relationshipList = actualEsrVnfm.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.DEPENDS_ON, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final RelationshipData relationshipData =
+ getRelationshipData(relationship.getRelationshipData(), Constants.GENERIC_VNF_VNF_ID);
+ assertNotNull(relationshipData);
+ assertEquals(VNF_ID, relationshipData.getRelationshipValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipListGenericVnf = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipListGenericVnf);
+ assertFalse(relationshipListGenericVnf.getRelationship().isEmpty());
+
+ final Relationship relationshipGenericVnf = relationshipListGenericVnf.getRelationship().get(0);
+
+ assertEquals(Constants.DEPENDS_ON, relationshipGenericVnf.getRelationshipLabel());
+ assertFalse(relationshipGenericVnf.getRelationshipData().isEmpty());
+ assertEquals(1, relationshipGenericVnf.getRelationshipData().size());
+
+ final RelationshipData esrRelationshipData =
+ getRelationshipData(relationshipGenericVnf.getRelationshipData(), Constants.ESR_VNFM_VNFM_ID);
+ assertNotNull(esrRelationshipData);
+ assertEquals(ESR_VNFM_ID, esrRelationshipData.getRelationshipValue());
+
+
+ }
+
+ private void addEsrVnfmAndAssertResponse(final String esrVnfmUrl) throws IOException {
+ final ResponseEntity<Void> esrVnfmResponse =
+ testRestTemplateService.invokeHttpPut(esrVnfmUrl, TestUtils.getEsrVnfm(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrVnfmResponse.getStatusCode());
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java
new file mode 100755
index 000000000..8ec26065a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java
@@ -0,0 +1,430 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_OWNER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_REGION_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class GenericVnfsControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @Autowired
+ private LinesOfBusinessCacheServiceProvider linesOfBusinessCacheServiceProvider;
+
+ @Autowired
+ private PlatformCacheServiceProvider platformVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ customerCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ platformVnfCacheServiceProvider.clearAll();
+ linesOfBusinessCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putGenericVnf_successfullyAddedToCache() throws Exception {
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnf> response =
+ testRestTemplateService.invokeHttpGet(genericVnfUrl, GenericVnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnf actualGenericVnf = response.getBody();
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelation_successfullyAddedToCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getRelationShip(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance actualServiceInstance = optional.get();
+ final RelationshipList actualRelationshipList = actualServiceInstance.getRelationshipList();
+ assertNotNull(actualRelationshipList);
+ assertFalse(actualRelationshipList.getRelationship().isEmpty());
+ final Relationship actualRelationShip = actualRelationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.COMPOSED_OF, actualRelationShip.getRelationshipLabel());
+ assertEquals(GENERIC_VNF_URL + VNF_ID, actualRelationShip.getRelatedLink());
+
+
+ assertFalse(actualRelationShip.getRelatedToProperty().isEmpty());
+ assertFalse(actualRelationShip.getRelationshipData().isEmpty());
+ final RelatedToProperty actualRelatedToProperty = actualRelationShip.getRelatedToProperty().get(0);
+ final RelationshipData actualRelationshipData = actualRelationShip.getRelationshipData().get(0);
+
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, actualRelatedToProperty.getPropertyKey());
+ assertEquals(GENERIC_VNF_NAME, actualRelatedToProperty.getPropertyValue());
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, actualRelationshipData.getRelationshipKey());
+ assertEquals(VNF_ID, actualRelationshipData.getRelationshipValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+ assertFalse(relationship.getRelatedToProperty().isEmpty());
+ assertEquals(3, relationship.getRelationshipData().size());
+ assertEquals(CUSTOMERS_URL + SERVICE_SUBSCRIPTIONS_URL + SERVICE_INSTANCE_URL, relationship.getRelatedLink());
+
+
+ final List<RelatedToProperty> relatedToProperty = relationship.getRelatedToProperty();
+ final RelatedToProperty firstRelatedToProperty = relatedToProperty.get(0);
+ assertEquals(Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_NAME, firstRelatedToProperty.getPropertyKey());
+ assertEquals(SERVICE_NAME, firstRelatedToProperty.getPropertyValue());
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipData, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipData, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipData, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToPlatform_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getPlatformRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+ assertEquals(TestConstants.PLATFORMS_URL + PLATFORM_NAME, relationship.getRelatedLink());
+
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData platformRelationshipData =
+ getRelationshipData(relationshipData, Constants.PLATFORM_PLATFORM_NAME);
+ assertNotNull(platformRelationshipData);
+ assertEquals(PLATFORM_NAME, platformRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToLineOfBusiness_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getLineOfBusinessRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.LINES_OF_BUSINESS_URL + LINE_OF_BUSINESS_NAME, relationship.getRelatedLink());
+
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData lineOfBusinessRelationshipData =
+ getRelationshipData(relationshipData, Constants.LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME);
+ assertNotNull(lineOfBusinessRelationshipData);
+ assertEquals(LINE_OF_BUSINESS_NAME, lineOfBusinessRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToCloudRegion_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getCloudRegion(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getCloudRegionRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.LOCATED_IN, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.CLOUD_REGIONS + CLOUD_OWNER_NAME + "/" + CLOUD_REGION_NAME,
+ relationship.getRelatedLink());
+
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(2, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = relationship.getRelationshipData();
+
+ final RelationshipData cloudOwnerRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_OWNER);
+ assertNotNull(cloudOwnerRelationshipData);
+ assertEquals(CLOUD_OWNER_NAME, cloudOwnerRelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudRegionIdRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_REGION_ID);
+ assertNotNull(cloudRegionIdRelationshipData);
+ assertEquals(CLOUD_REGION_NAME, cloudRegionIdRelationshipData.getRelationshipValue());
+
+ final List<RelatedToProperty> relatedToPropertyList = relationship.getRelatedToProperty();
+
+ final RelatedToProperty cloudRegionOwnerDefinedTypeProperty =
+ getRelatedToProperty(relatedToPropertyList, Constants.CLOUD_REGION_OWNER_DEFINED_TYPE);
+ assertNotNull(cloudRegionOwnerDefinedTypeProperty);
+ assertEquals("OwnerType", cloudRegionOwnerDefinedTypeProperty.getPropertyValue());
+
+ }
+
+ @Test
+ public void test_putBiDirectionalRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String relationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getTenantRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<GenericVnf> optional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(optional.isPresent());
+
+ final GenericVnf actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+ }
+
+ @Test
+ public void test_patchGenericVnf_usingVnfId_OrchStatusChangedInCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final HttpHeaders httpHeaders = testRestTemplateService.getHttpHeaders();
+ httpHeaders.add(X_HTTP_METHOD_OVERRIDE, HttpMethod.PATCH.toString());
+ httpHeaders.remove(HttpHeaders.CONTENT_TYPE);
+ httpHeaders.add(HttpHeaders.CONTENT_TYPE, Constants.APPLICATION_MERGE_PATCH_JSON);
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> orchStatuUpdateServiceInstanceResponse = testRestTemplateService
+ .invokeHttpPost(httpHeaders, genericVnfUrl, TestUtils.getGenericVnfOrchStatuUpdate(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, orchStatuUpdateServiceInstanceResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnf> response =
+ testRestTemplateService.invokeHttpGet(genericVnfUrl, GenericVnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnf actualGenericVnf = response.getBody();
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+ assertEquals("Assigned", actualGenericVnf.getOrchestrationStatus());
+
+ }
+
+ @Test
+ public void test_getGenericVnfs_usingSelfLink_getAllGenericVnfsInCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String selfLink = "http://localhost:9921/generic-vnf/" + VNF_ID;
+ final String url = getUrl(TestConstants.GENERIC_VNFS_URL_1) + "?selflink=" + selfLink;
+ final ResponseEntity<GenericVnfs> response = testRestTemplateService.invokeHttpGet(url, GenericVnfs.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnfs actualGenericVnfs = response.getBody();
+ final List<GenericVnf> genericVnfList = actualGenericVnfs.getGenericVnf();
+ assertNotNull(genericVnfList);
+ assertEquals(1, genericVnfList.size());
+ final GenericVnf actualGenericVnf = genericVnfList.get(0);
+ assertEquals(selfLink, actualGenericVnf.getSelflink());
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+ }
+
+ @Test
+ public void test_deleteGenericVnf_usingVnfIdAndResourceVersion_removedFromCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf genericVnf = genericVnfOptional.get();
+
+ final String genericVnfDeleteUrl =
+ getUrl(GENERIC_VNF_URL, genericVnf.getVnfId()) + "?resource-version=" + genericVnf.getResourceVersion();
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpDelete(genericVnfDeleteUrl, Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(genericVnfCacheServiceProvider.getGenericVnf(VNF_ID).isPresent());
+
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java
new file mode 100755
index 000000000..0b6cfb50f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java
@@ -0,0 +1,143 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class LinesOfBusinessControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private LinesOfBusinessCacheServiceProvider linesOfBusinessCacheServiceProvider;
+
+ @After
+ public void after() {
+ linesOfBusinessCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putLineOfBusiness_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> lineOfBusinessResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, lineOfBusinessResponse.getStatusCode());
+
+ final ResponseEntity<LineOfBusiness> response =
+ testRestTemplateService.invokeHttpGet(url, LineOfBusiness.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final LineOfBusiness actualLineOfBusiness = response.getBody();
+ assertEquals(LINE_OF_BUSINESS_NAME, actualLineOfBusiness.getLineOfBusinessName());
+ assertNotNull("resource version should not be null", actualLineOfBusiness.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getLineOfBusinessWithFormatCount() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> lineOfBusinessResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, lineOfBusinessResponse.getStatusCode());
+
+ final ResponseEntity<Results> response = testRestTemplateService
+ .invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Results result = response.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.LINE_OF_BUSINESS));
+ }
+
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final String relationShipUrl = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<LineOfBusiness> optional =
+ linesOfBusinessCacheServiceProvider.getLineOfBusiness(LINE_OF_BUSINESS_NAME);
+ assertTrue(optional.isPresent());
+
+ final LineOfBusiness actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java
new file mode 100755
index 000000000..12412872e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java
@@ -0,0 +1,156 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_LINK;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNFS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.Map;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class NodesControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private NodesCacheServiceProvider nodesCacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ nodesCacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_getNodesSericeInstance_usingServiceInstanceId_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerandServiceInstanceUrls();
+
+ final ResponseEntity<ServiceInstance> actual = testRestTemplateService
+ .invokeHttpGet(getUrl(TestConstants.NODES_URL, SERVICE_INSTANCE_URL), ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+
+ }
+
+ @Test
+ public void test_getNodesSericeInstance_usingServiceInstanceIdAndFormatPathed_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerandServiceInstanceUrls();
+
+ final ResponseEntity<Results> actual = testRestTemplateService.invokeHttpGet(
+ getUrl(TestConstants.NODES_URL, SERVICE_INSTANCE_URL) + "?format=" + Format.PATHED.getValue(),
+ Results.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final Results result = actual.getBody();
+
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ final Map<String, Object> actualMap = result.getValues().get(0);
+
+ assertEquals(CUSTOMERS_URL + SERVICE_SUBSCRIPTIONS_URL + SERVICE_INSTANCE_URL, actualMap.get(RESOURCE_LINK));
+ assertEquals(SERVICE_RESOURCE_TYPE, actualMap.get(RESOURCE_TYPE));
+
+ }
+
+ @Test
+ public void test_getNodesGenericVnfs_usingVnfName_ableToRetrieveItFromCache() throws Exception {
+ invokeCustomerandServiceInstanceUrls();
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final String nodeGenericVnfsUrl =
+ getUrl(TestConstants.NODES_URL, GENERIC_VNFS_URL) + "?vnf-name=" + GENERIC_VNF_NAME;
+
+ final ResponseEntity<GenericVnfs> actual =
+ testRestTemplateService.invokeHttpGet(nodeGenericVnfsUrl, GenericVnfs.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final GenericVnfs genericVnfs = actual.getBody();
+ assertEquals(1, genericVnfs.getGenericVnf().size());
+
+ final GenericVnf genericVnf = genericVnfs.getGenericVnf().get(0);
+ assertEquals(GENERIC_VNF_NAME, genericVnf.getVnfName());
+ assertEquals(VNF_ID, genericVnf.getVnfId());
+
+ }
+
+ private void invokeCustomerandServiceInstanceUrls() throws Exception, IOException {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final ResponseEntity<Void> response2 =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response2.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java
new file mode 100755
index 000000000..c5baad470
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java
@@ -0,0 +1,199 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.OwnEntityCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class OwningEntityControllerTest extends AbstractSpringBootTest {
+
+ private static final String OWN_ENTITY_ID_VALUE = "oe_1";
+ private static final String OWN_ENTITY_NAME_VALUE = "oe_2";
+
+ @Autowired
+ private OwnEntityCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putOwningEntity_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<OwningEntity> actualResponse =
+ testRestTemplateService.invokeHttpGet(url, OwningEntity.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final OwningEntity actualOwningEntity = actualResponse.getBody();
+ assertEquals(OWN_ENTITY_ID_VALUE, actualOwningEntity.getOwningEntityId());
+ assertEquals(OWN_ENTITY_NAME_VALUE, actualOwningEntity.getOwningEntityName());
+ assertNotNull(actualOwningEntity.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getOwningEntityCount_correctResult() throws Exception {
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Results> actualResponse = testRestTemplateService
+ .invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Results result = actualResponse.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.OWNING_ENTITY));
+ }
+
+ @Test
+ public void test_putOwningEntityRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerAndServiceInstance();
+
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final String owningEntityRelationshipUrl = url + RELATIONSHIP_LIST_RELATIONSHIP_URL;
+
+ final ResponseEntity<Void> putResponse = testRestTemplateService.invokeHttpPut(owningEntityRelationshipUrl,
+ TestUtils.getOwningEntityRelationship(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, putResponse.getStatusCode());
+
+ final ResponseEntity<OwningEntity> actualResponse =
+ testRestTemplateService.invokeHttpGet(url, OwningEntity.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final OwningEntity actualOwningEntity = actualResponse.getBody();
+ assertEquals(OWN_ENTITY_ID_VALUE, actualOwningEntity.getOwningEntityId());
+ assertEquals(OWN_ENTITY_NAME_VALUE, actualOwningEntity.getOwningEntityName());
+ assertNotNull(actualOwningEntity.getRelationshipList());
+ assertFalse(actualOwningEntity.getRelationshipList().getRelationship().isEmpty());
+ assertNotNull(actualOwningEntity.getRelationshipList().getRelationship().get(0));
+
+ final Relationship actualRelationship = actualOwningEntity.getRelationshipList().getRelationship().get(0);
+ final List<RelationshipData> relationshipDataList = actualRelationship.getRelationshipData();
+ assertEquals(Constants.BELONGS_TO, actualRelationship.getRelationshipLabel());
+ assertFalse(relationshipDataList.isEmpty());
+ assertEquals(3, relationshipDataList.size());
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance serviceInstance = optional.get();
+
+ assertNotNull(serviceInstance.getRelationshipList());
+ final List<Relationship> serviceRelationshipList = serviceInstance.getRelationshipList().getRelationship();
+ assertFalse(serviceRelationshipList.isEmpty());
+ assertEquals(1, serviceRelationshipList.size());
+ final Relationship relationship = serviceRelationshipList.get(0);
+ assertEquals(Constants.BELONGS_TO, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.OWNING_ENTITY_URL + OWN_ENTITY_ID_VALUE, relationship.getRelatedLink());
+
+ final List<RelationshipData> serviceRelationshipDataList = serviceRelationshipList.get(0).getRelationshipData();
+ assertFalse(serviceRelationshipDataList.isEmpty());
+ assertEquals(1, serviceRelationshipDataList.size());
+
+ final RelationshipData owningEntityRelationshipData =
+ getRelationshipData(serviceRelationshipDataList, Constants.OWNING_ENTITY_OWNING_ENTITY_ID);
+ assertNotNull(owningEntityRelationshipData);
+ assertEquals(OWN_ENTITY_ID_VALUE, owningEntityRelationshipData.getRelationshipValue());
+
+ }
+
+ private void addCustomerAndServiceInstance() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java
new file mode 100755
index 000000000..00c663884
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java
@@ -0,0 +1,142 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class PlatformControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private PlatformCacheServiceProvider platformCacheServiceProvider;
+
+ @After
+ public void after() {
+ platformCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putPlatform_successfullyAddedToCache() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final ResponseEntity<Platform> response = testRestTemplateService.invokeHttpGet(platformUrl, Platform.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Platform actualPlatform = response.getBody();
+ assertEquals(PLATFORM_NAME, actualPlatform.getPlatformName());
+ assertNotNull("resource version should not be null", actualPlatform.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getPlatformWithFormatCount() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final ResponseEntity<Results> response = testRestTemplateService.invokeHttpGet(
+ platformUrl + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Results result = response.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.PLATFORM));
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final String platformRelationShipUrl =
+ getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME, BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService
+ .invokeHttpPut(platformRelationShipUrl, TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<Platform> optional = platformCacheServiceProvider.getPlatform(PLATFORM_NAME);
+ assertTrue(optional.isPresent());
+
+ final Platform actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java
new file mode 100755
index 000000000..440c66d69
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java
@@ -0,0 +1,72 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aaisimulator.service.providers.PnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ *
+ */
+public class PnfsControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private PnfCacheServiceProvider cacheServiceProvider;
+
+ private final String PNF="test-008";
+ private final String PNF_URL= "/aai/v15/network/pnfs/pnf/";
+
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_pnf_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(PNF_URL, PNF);
+ final ResponseEntity<Void> pnfResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getPnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, pnfResponse.getStatusCode());
+
+ final ResponseEntity<Pnf> response =
+ testRestTemplateService.invokeHttpGet(url, Pnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Pnf actualPnf = response.getBody();
+ assertEquals("test-008", actualPnf.getPnfName());
+ assertEquals("5f2602dc-f647-4535-8f1d-9ec079e68a49", actualPnf.getPnfId());
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java
new file mode 100755
index 000000000..5478ef7c0
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java
@@ -0,0 +1,205 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.ProjectCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestRestTemplateService;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class ProjectControllerTest extends AbstractSpringBootTest {
+
+ private static final String PROJECT_NAME_VALUE = "PROJECT_NAME_VALUE";
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ private TestRestTemplateService testRestTemplateService;
+
+ @Autowired
+ private ProjectCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putProject_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Project> actualResponse = testRestTemplateService.invokeHttpGet(url, Project.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Project actualProject = actualResponse.getBody();
+ assertEquals(PROJECT_NAME_VALUE, actualProject.getProjectName());
+ assertNotNull(actualProject.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putProjectRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerAndServiceInstance();
+
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final String projectRelationshipUrl =
+ getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+
+ final ResponseEntity<Void> putResponse = testRestTemplateService.invokeHttpPut(projectRelationshipUrl,
+ TestUtils.getBusinessProjectRelationship(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, putResponse.getStatusCode());
+
+ final ResponseEntity<Project> actualResponse = testRestTemplateService.invokeHttpGet(url, Project.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Project actualProject = actualResponse.getBody();
+ assertEquals(PROJECT_NAME_VALUE, actualProject.getProjectName());
+ assertNotNull(actualProject.getRelationshipList());
+ assertFalse(actualProject.getRelationshipList().getRelationship().isEmpty());
+ assertNotNull(actualProject.getRelationshipList().getRelationship().get(0));
+
+ final Relationship actualRelationship = actualProject.getRelationshipList().getRelationship().get(0);
+ final List<RelationshipData> relationshipDataList = actualRelationship.getRelationshipData();
+ assertEquals(Constants.USES, actualRelationship.getRelationshipLabel());
+
+ assertFalse(relationshipDataList.isEmpty());
+ assertEquals(3, relationshipDataList.size());
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance serviceInstance = optional.get();
+
+ assertNotNull(serviceInstance.getRelationshipList());
+ final List<Relationship> serviceRelationshipList = serviceInstance.getRelationshipList().getRelationship();
+ assertFalse(serviceRelationshipList.isEmpty());
+ assertEquals(1, serviceRelationshipList.size());
+ final Relationship relationship = serviceRelationshipList.get(0);
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.PROJECT_URL + PROJECT_NAME_VALUE, relationship.getRelatedLink());
+
+
+ final List<RelationshipData> serviceRelationshipDataList = serviceRelationshipList.get(0).getRelationshipData();
+ assertFalse(serviceRelationshipDataList.isEmpty());
+ assertEquals(1, serviceRelationshipDataList.size());
+
+ final RelationshipData projectRelationshipData =
+ getRelationshipData(serviceRelationshipDataList, Constants.PROJECT_PROJECT_NAME);
+ assertNotNull(projectRelationshipData);
+ assertEquals(PROJECT_NAME_VALUE, projectRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_getProjectCount_correctResult() throws Exception {
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Results> actualResponse =
+ testRestTemplateService.invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=count", Results.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Results result = actualResponse.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.PROJECT));
+ }
+
+
+ private void addCustomerAndServiceInstance() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java
new file mode 100644
index 000000000..7a8909559
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java
@@ -0,0 +1,67 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.junit.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_DESIGN_AND_CREATION_URL;
+
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT,
+ properties = "SERVICE_DESIGN_AND_CREATION_RESPONSES_LOCATION=./src/test/resources/test-data/service-design-and-creation-responses")
+public class ServiceDesignAndCreationControllerTest extends AbstractSpringBootTest{
+
+ @Test
+ public void should_reply_sample_modelvers_response() {
+ final String url = getUrl(SERVICE_DESIGN_AND_CREATION_URL,
+ "/models/model/a51e2bef-961c-496f-b235-b4540400e885/model-vers");
+ ResponseEntity<String> actual = testRestTemplateService.invokeHttpGet(url, String.class);
+ String expectedXml = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<model-vers xmlns=\"http://org.onap.aai.inventory/v11\">\n" +
+ " <model-ver>\n" +
+ " <model-version-id>c0818142-324d-4a8c-8065-45a61df247a5</model-version-id>\n" +
+ " <model-name>EricService</model-name>\n" +
+ " <model-version>1.0</model-version>\n" +
+ " <model-description>blah</model-description>\n" +
+ " <resource-version>1594657102313</resource-version>\n" +
+ " </model-ver>\n" +
+ " <model-ver>\n" +
+ " <model-version-id>4442dfc1-0d2d-46b4-b0bc-a2ac10448269</model-version-id>\n" +
+ " <model-name>EricService</model-name>\n" +
+ " <model-version>2.0</model-version>\n" +
+ " <model-description>blahhhh</model-description>\n" +
+ " <resource-version>1594707742646</resource-version>\n" +
+ " </model-ver>\n" +
+ "</model-vers>";
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ MediaType contentType = actual.getHeaders().getContentType();
+ assertNotNull(contentType);
+ assertTrue(contentType.isCompatibleWith(MediaType.APPLICATION_XML));
+ assertEquals(expectedXml, actual.getBody());
+ }
+} \ No newline at end of file
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java
new file mode 100755
index 000000000..2e50d3d71
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java
@@ -0,0 +1,80 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller.configuration;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLSession;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.TrustStrategy;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.ssl.SSLContexts;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Profile("test")
+@Configuration
+public class TestRestTemplateConfigration {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TestRestTemplateConfigration.class);
+
+ @Bean
+ public TestRestTemplate testRestTemplate() throws Exception {
+ final TestRestTemplate testRestTemplate = new TestRestTemplate();
+ ((HttpComponentsClientHttpRequestFactory) testRestTemplate.getRestTemplate().getRequestFactory())
+ .setHttpClient(httpClient());
+ return testRestTemplate;
+
+ }
+
+ @Bean
+ public RestTemplate restTemplate() throws Exception {
+ final RestTemplate restTemplate = new RestTemplate();
+ restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory(httpClient()));
+ return restTemplate;
+ }
+
+ private CloseableHttpClient httpClient() throws Exception {
+ final TrustStrategy acceptingTrustStrategy = (cert, authType) -> true;
+
+ final SSLConnectionSocketFactory csf = new SSLConnectionSocketFactory(
+ SSLContexts.custom().loadTrustMaterial(null, acceptingTrustStrategy).build(), new HostnameVerifier() {
+ @Override
+ public boolean verify(final String hostname, final SSLSession session) {
+ LOGGER.warn("Skiping hostname verification ... ");
+ return true;
+ }
+
+ });
+
+ return HttpClients.custom().setSSLSocketFactory(csf).build();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java
new file mode 100755
index 000000000..942e8701c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java
@@ -0,0 +1,120 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class TestConstants {
+
+ public static final String BASE_URL_V17 = "/aai/v17";
+
+ public static final String SERVICE_INSTANCES_URL = "/service-instances";
+
+ public static final String SERVICE_NAME = "ServiceTest";
+
+ public static final String SERVICE_INSTANCE_ID = "ccece8fe-13da-456a-baf6-41b3a4a2bc2b";
+
+ public static final String SERVICE_INSTANCE_URL =
+ SERVICE_INSTANCES_URL + "/service-instance/" + SERVICE_INSTANCE_ID;
+
+ public static final String SERVICE_TYPE = "vCPE";
+
+ public static final String SERVICE_SUBSCRIPTIONS_URL =
+ "/service-subscriptions/service-subscription/" + SERVICE_TYPE;
+
+ public static final String GLOBAL_CUSTOMER_ID = "DemoCustomer";
+
+ public static final String CUSTOMERS_URL = BASE_URL_V17 + "/business/customers/customer/" + GLOBAL_CUSTOMER_ID;
+
+ public static final String VNF_ID = "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701";
+
+ public static final String GENERIC_VNF_NAME = "EsyVnfInstantiationTest2";
+
+ public static final String GENERIC_VNF_URL = BASE_URL_V17 + "/network/generic-vnfs/generic-vnf/";
+
+ public static final String GENERIC_VNFS_URL = "/generic-vnfs";
+
+ public static final String RELATED_TO_URL = "/related-to" + GENERIC_VNFS_URL;
+
+ public static final String PLATFORM_NAME = "PLATFORM_APP_ID_1";
+
+ public static final String LINE_OF_BUSINESS_NAME = "LINE_OF_BUSINESS_1";
+
+ public static final String CLOUD_OWNER_NAME = "CloudOwner";
+
+ public static final String CLOUD_REGION_NAME = "PnfSwUCloudRegion";
+
+ public static final String TENANT_ID = "693c7729b2364a26a3ca602e6f66187d";
+
+ public static final String TENANTS_TENANT = "/tenants/tenant/";
+
+ public static final String ESR_VNFM_URL = BASE_URL_V17 + "/external-system/esr-vnfm-list/esr-vnfm/";
+
+ public static final String EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL = BASE_URL_V17 + "/external-system/esr-vnfm-list";
+
+ public static final String ESR_VNFM_ID = "c5e99cee-1996-4606-b697-838d51d4e1a3";
+
+ public static final String ESR_VIM_ID = "PnfSwUVimId";
+
+ public static final String ESR_SYSTEM_INFO_LIST_URL = "/esr-system-info-list";
+
+ public static final String ESR_SYSTEM_INFO_ID = "5c067098-f2e3-40f7-a7ba-155e7c61e916";
+
+ public static final String ESR_SYSTEM_TYPE = "VNFM";
+
+ public static final String ESR_PASSWORD = "123456";
+
+ public static final String ESR_USERNAME = "vnfmadapter";
+
+ public static final String ESR_SERVICE_URL = "https://so-vnfm-simulator.onap:9095/vnflcm/v1";
+
+ public static final String ESR_VENDOR = "EST";
+
+ public static final String ESR_TYEP = "simulator";
+
+ public static final String SYSTEM_NAME = "vnfmSimulator";
+
+ public static final String VSERVER_URL = "/vservers/vserver/";
+
+ public static final String VSERVER_NAME = "CsitVServer";
+
+ public static final String VSERVER_ID = "f84fdb9b-ad7c-49db-a08f-e443b4cbd033";
+
+ public static final String OWNING_ENTITY_URL = BASE_URL_V17 + "/business/owning-entities/owning-entity/";
+
+ public static final String LINES_OF_BUSINESS_URL = BASE_URL_V17 + "/business/lines-of-business/line-of-business/";
+
+ public static final String PLATFORMS_URL = BASE_URL_V17 + "/business/platforms/platform/";
+
+ public static final String CLOUD_REGIONS = BASE_URL_V17 + "/cloud-infrastructure/cloud-regions/cloud-region/";
+
+ public static final String GENERIC_VNFS_URL_1 = BASE_URL_V17 + "/network/generic-vnfs";
+
+ public static final String NODES_URL = BASE_URL_V17 + "/nodes";
+
+ public static final String PROJECT_URL = BASE_URL_V17 + "/business/projects/project/";
+
+ public static final String SERVICE_DESIGN_AND_CREATION_URL = BASE_URL_V17 + "/service-design-and-creation";
+
+ private TestConstants() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java
new file mode 100755
index 000000000..2e068bcea
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java
@@ -0,0 +1,79 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import org.onap.aaisimulator.model.UserCredentials;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+
+@Service
+public class TestRestTemplateService {
+
+ @Autowired
+ private TestRestTemplate restTemplate;
+
+ @Autowired
+ private UserCredentials userCredentials;
+
+
+ public <T> ResponseEntity<T> invokeHttpGet(final String url, final Class<T> clazz) {
+ return restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(getHttpHeaders()), clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPut(final String url, final Object obj, final Class<T> clazz) {
+ final HttpEntity<?> httpEntity = getHttpEntity(obj);
+ return restTemplate.exchange(url, HttpMethod.PUT, httpEntity, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpDelete(final String url, final Class<T> clazz) {
+ final HttpEntity<?> request = new HttpEntity<>(getHttpHeaders());
+ return restTemplate.exchange(url, HttpMethod.DELETE, request, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPost(final String url, final Object obj, final Class<T> clazz) {
+ final HttpEntity<?> httpEntity = getHttpEntity(obj);
+ return restTemplate.exchange(url, HttpMethod.POST, httpEntity, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPost(final HttpHeaders headers, final String url, final Object obj,
+ final Class<T> clazz) {
+ final HttpEntity<Object> entity = new HttpEntity<>(obj, headers);
+ return restTemplate.exchange(url, HttpMethod.POST, entity, clazz);
+ }
+
+ private HttpEntity<?> getHttpEntity(final Object obj) {
+ return new HttpEntity<>(obj, getHttpHeaders());
+ }
+
+ public HttpHeaders getHttpHeaders() {
+ return TestUtils.getHttpHeaders(userCredentials.getUsers().iterator().next().getUsername());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java
new file mode 100755
index 000000000..e8dc9df22
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java
@@ -0,0 +1,186 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.Base64;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.web.util.UriComponentsBuilder;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class TestUtils {
+
+ private static final String PASSWORD = "aai.onap.org:demo123456!";
+
+ public static HttpHeaders getHttpHeaders(final String username) {
+ final HttpHeaders requestHeaders = new HttpHeaders();
+ requestHeaders.add("Authorization", getBasicAuth(username));
+ requestHeaders.setContentType(MediaType.APPLICATION_JSON);
+ return requestHeaders;
+ }
+
+ public static File getFile(final String file) throws IOException {
+ return new ClassPathResource(file).getFile();
+ }
+
+ public static String getJsonString(final String file) throws IOException {
+ return new String(Files.readAllBytes(getFile(file).toPath()));
+ }
+
+ public static <T> T getObjectFromFile(final File file, final Class<T> clazz) throws Exception {
+ final ObjectMapper mapper = new ObjectMapper();
+ mapper.registerModule(new JaxbAnnotationModule());
+
+ return mapper.readValue(file, clazz);
+ }
+
+ public static String getBasicAuth(final String username) {
+ return "Basic " + new String(Base64.getEncoder().encodeToString((username + ":" + PASSWORD).getBytes()));
+ }
+
+ public static String getBaseUrl(final int port) {
+ return "https://localhost:" + port;
+ }
+
+ public static String getCustomer() throws Exception, IOException {
+ return getJsonString("test-data/business-customer.json");
+ }
+
+ public static String getServiceSubscription() throws IOException {
+ return getJsonString("test-data/service-subscription.json");
+ }
+
+ public static String getServiceInstance() throws IOException {
+ return getJsonString("test-data/service-instance.json");
+ }
+
+ public static String getGenericVnf() throws IOException {
+ return getJsonString("test-data/generic-vnf.json");
+ }
+
+ public static String getPnf() throws IOException {
+ return getJsonString("test-data/pnf.json");
+ }
+
+ public static String getRelationShip() throws IOException {
+ return getJsonString("test-data/relation-ship.json");
+ }
+
+ public static String getPlatformRelatedLink() throws IOException {
+ return getJsonString("test-data/platform-related-link.json");
+ }
+
+ public static String getLineOfBusinessRelatedLink() throws IOException {
+ return getJsonString("test-data/line-of-business-related-link.json");
+ }
+
+ public static String getPlatform() throws IOException {
+ return getJsonString("test-data/platform.json");
+ }
+
+ public static String getGenericVnfRelationShip() throws IOException {
+ return getJsonString("test-data/generic-vnf-relationship.json");
+ }
+
+ public static String getLineOfBusiness() throws IOException {
+ return getJsonString("test-data/line-of-business.json");
+ }
+
+ public static String getBusinessProject() throws IOException {
+ return getJsonString("test-data/business-project.json");
+ }
+
+ public static String getBusinessProjectRelationship() throws IOException {
+ return getJsonString("test-data/business-project-relation-ship.json");
+ }
+
+ public static String getOwningEntityRelationship() throws IOException {
+ return getJsonString("test-data/owning-entity-relation-ship.json");
+ }
+
+ public static String getOwningEntity() throws IOException {
+ return getJsonString("test-data/owning-entity.json");
+ }
+
+ public static String getOrchStatuUpdateServiceInstance() throws IOException {
+ return getJsonString("test-data/service-instance-orch-status-update.json");
+ }
+
+ public static String getRelationShipJsonObject() throws IOException {
+ return getJsonString("test-data/service-Instance-relationShip.json");
+ }
+
+ public static String getCloudRegion() throws IOException {
+ return getJsonString("test-data/cloud-region.json");
+ }
+
+ public static String getTenant() throws IOException {
+ return getJsonString("test-data/tenant.json");
+ }
+
+ public static String getCloudRegionRelatedLink() throws IOException {
+ return getJsonString("test-data/cloud-region-related-link.json");
+ }
+
+ public static String getGenericVnfRelatedLink() throws IOException {
+ return getJsonString("test-data/generic-vnf-related-link.json");
+ }
+
+ public static String getTenantRelationShip() throws IOException {
+ return getJsonString("test-data/tenant-relationship.json");
+ }
+
+ public static String getGenericVnfOrchStatuUpdate() throws IOException {
+ return getJsonString("test-data/generic-vnf-orch-status-update.json");
+ }
+
+ public static String getEsrVnfm() throws IOException {
+ return getJsonString("test-data/esr-vnfm.json");
+ }
+
+ public static String getEsrSystemInfo() throws IOException {
+ return getJsonString("test-data/esr-system-info.json");
+ }
+
+ public static String getVserver() throws IOException {
+ return getJsonString("test-data/vServer.json");
+ }
+
+
+ public static String getUrl(final int port, final String... urls) {
+ final UriComponentsBuilder baseUri = UriComponentsBuilder.fromUriString("https://localhost:" + port);
+ for (final String url : urls) {
+ baseUri.path(url);
+ }
+ return baseUri.toUriString();
+ }
+
+ private TestUtils() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json
new file mode 100755
index 000000000..d64fd4acc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json
@@ -0,0 +1,73 @@
+{
+ "global-customer-id": "DemoCustomer",
+ "subscriber-name": "DemoCustomer",
+ "subscriber-type": "INFRA",
+ "service-subscriptions": {
+ "service-subscription": [
+ {
+ "service-type": "vLB",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/xyzcloud/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "xyzcloud"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "service-type": "vCPE",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/xyzcloud/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "xyzcloud"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ]
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json
new file mode 100755
index 000000000..1f0787d79
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json
@@ -0,0 +1,3 @@
+{
+ "project-name": "PROJECT_NAME_VALUE"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json
new file mode 100755
index 000000000..3e3371d33
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/PnfSwUCloudRegion"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json
new file mode 100755
index 000000000..98d3127e3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json
@@ -0,0 +1,10 @@
+{
+ "cloud-owner": "CloudOwner",
+ "cloud-region-id": "PnfSwUCloudRegion",
+ "cloud-type": "openstack",
+ "owner-defined-type": "OwnerType",
+ "cloud-region-version": "1.0",
+ "cloud-zone": "CloudZone",
+ "complex-name": "clli1",
+ "cloud-extra-info": ""
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json
new file mode 100755
index 000000000..449ae1714
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json
@@ -0,0 +1,12 @@
+{
+ "esr-system-info-id": "5c067098-f2e3-40f7-a7ba-155e7c61e916",
+ "system-name": "vnfmSimulator",
+ "type": "simulator",
+ "vendor": "EST",
+ "version": "V1.0",
+ "service-url": "https://so-vnfm-simulator.onap:9095/vnflcm/v1",
+ "user-name": "vnfmadapter",
+ "password": "123456",
+ "system-type": "VNFM",
+ "resource-version": "1564774459055"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json
new file mode 100755
index 000000000..4a117c3e7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json
@@ -0,0 +1,5 @@
+{
+ "vnfm-id": "c5e99cee-1996-4606-b697-838d51d4e1a3",
+ "vim-id": "PnfSwUVimId",
+ "certificate-url": ""
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json
new file mode 100755
index 000000000..022eb9839
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json
@@ -0,0 +1,4 @@
+{
+ "vnf-id": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "orchestration-status": "Assigned"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json
new file mode 100755
index 000000000..67dc905c9
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json
new file mode 100755
index 000000000..eafd44d59
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json
@@ -0,0 +1,17 @@
+{
+ "related-to": "generic-vnf",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v17/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "EsyVnfInstantiationTest2"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json
new file mode 100755
index 000000000..c91bbb7ca
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json
@@ -0,0 +1,14 @@
+{
+ "vnf-id": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "vnf-name": "EsyVnfInstantiationTest2",
+ "vnf-type": "Sol004Zip4Service/Sol004Zip3VSP 0",
+ "service-id": "f13844f4-dbf8-4d0e-a979-45204f3ddb4e",
+ "prov-status": "PREPROV",
+ "orchestration-status": "Inventoried",
+ "model-invariant-id": "b0f14066-2b65-40d2-b5a4-c8f2116fb5fc",
+ "model-version-id": "84b9649a-4eb9-4967-9abe-e8702f55518b",
+ "model-customization-id": "50a90cd7-a84e-4ee1-b5ba-bfa5a26f5e15",
+ "nf-type": "vnflcm",
+ "nf-role": "vnflcm",
+ "selflink": "http://localhost:9921/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json
new file mode 100755
index 000000000..93c160356
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/lines-of-business/line-of-business/LINE_OF_BUSINESS_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json
new file mode 100755
index 000000000..34ab4a5c1
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json
@@ -0,0 +1,3 @@
+{
+ "line-of-business-name": "LINE_OF_BUSINESS_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json
new file mode 100755
index 000000000..13d9e0b24
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json
@@ -0,0 +1,4 @@
+{
+ "owning-entity-id": "oe_1",
+ "owning-entity-name": "oe_2"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json
new file mode 100755
index 000000000..e4baea6c0
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/platforms/platform/PLATFORM_APP_ID_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json
new file mode 100755
index 000000000..3ee5c4c69
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json
@@ -0,0 +1,3 @@
+{
+ "platform-name": "PLATFORM_APP_ID_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json
new file mode 100755
index 000000000..d0c1f142f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json
@@ -0,0 +1,16 @@
+{
+ "pnf-name2": "example-pnf-name2-val-78244",
+ "pnf-name2-source": "example-pnf-name2-source-val-99275",
+ "equip-type": "example-equip-type-val-20348",
+ "equip-vendor": "example-equip-vendor-val-52182",
+ "equip-model": "example-equip-model-val-8370",
+ "management-option": "example-management-option-val-72881",
+ "ipaddress-v4-oam": "10.12.25.73",
+ "ipaddress-v6-oam": "x:x:x:x:x:X",
+ "target-software-version": "xxxxXXX",
+ "pnf-name": "test-008",
+ "pnf-id": "5f2602dc-f647-4535-8f1d-9ec079e68a49",
+ "in-maint": false,
+ "resource-version": "1570117118905",
+ "selflink": "http://localhost:9921/pnf/test-008"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json
new file mode 100755
index 000000000..c23221e54
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json
@@ -0,0 +1,17 @@
+{
+ "related-to": "generic-vnf",
+ "relationship-label": "org.onap.relationships.inventory.ComposedOf",
+ "related-link": "/aai/v15/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "EsyVnfInstantiationTest2"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml
new file mode 100644
index 000000000..55247be6f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<model-vers xmlns="http://org.onap.aai.inventory/v11">
+ <model-ver>
+ <model-version-id>c0818142-324d-4a8c-8065-45a61df247a5</model-version-id>
+ <model-name>EricService</model-name>
+ <model-version>1.0</model-version>
+ <model-description>blah</model-description>
+ <resource-version>1594657102313</resource-version>
+ </model-ver>
+ <model-ver>
+ <model-version-id>4442dfc1-0d2d-46b4-b0bc-a2ac10448269</model-version-id>
+ <model-name>EricService</model-name>
+ <model-version>2.0</model-version>
+ <model-description>blahhhh</model-description>
+ <resource-version>1594707742646</resource-version>
+ </model-ver>
+</model-vers> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json
new file mode 100755
index 000000000..9f845ba21
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json
@@ -0,0 +1,5 @@
+{
+ "service-instance-id": "ccece8fe-13da-456a-baf6-41b3a4a2bc2b",
+ "service-instance-name": "ServiceTest",
+ "orchestration-status": "Active"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json
new file mode 100755
index 000000000..8962aa4c8
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json
@@ -0,0 +1,9 @@
+{
+ "service-instance-id": "ccece8fe-13da-456a-baf6-41b3a4a2bc2b",
+ "service-instance-name": "ServiceTest",
+ "environment-context": "General_Revenue-Bearing",
+ "workload-context": "Production",
+ "model-invariant-id": "e9acd081-9c89-4b4d-bcb3-e0e2b9715b2a",
+ "model-version-id": "c112a499-6148-488b-ba82-3f5938cf26d2",
+ "orchestration-status": "Inventoried"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json
new file mode 100755
index 000000000..41627be1f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json
@@ -0,0 +1,3 @@
+{
+ "service-type": "Firewall"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json
new file mode 100755
index 000000000..3c142fda6
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json
@@ -0,0 +1,25 @@
+{
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.BelongsTo",
+ "related-link": "/aai/v15/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/PnfSwUCloudRegion/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "PnfSwUCloudRegion"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json
new file mode 100755
index 000000000..57bdf2e4c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json
@@ -0,0 +1,4 @@
+{
+ "tenant-id": "693c7729b2364a26a3ca602e6f66187d",
+ "tenant-name": "admin"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json
new file mode 100755
index 000000000..682586599
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json
@@ -0,0 +1,26 @@
+{
+ "vserver-id": "f84fdb9b-ad7c-49db-a08f-e443b4cbd033",
+ "vserver-name": "CsitVServer",
+ "prov-status": "active",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v15/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "58157d7e-d50d-4a7d-aebe-ae6e41ca1d9f"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "Test"
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/test/mocks/aai-simulator/common/pom.xml b/test/mocks/aai-simulator/common/pom.xml
new file mode 100755
index 000000000..ae13363de
--- /dev/null
+++ b/test/mocks/aai-simulator/common/pom.xml
@@ -0,0 +1,38 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <artifactId>common</artifactId>
+ <properties>
+ <version.equalsverifier>2.5.1</version.equalsverifier>
+ <version.openpojo>0.8.6</version.openpojo>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-security</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ </exclusion>
+ </exclusions>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>nl.jqno.equalsverifier</groupId>
+ <artifactId>equalsverifier</artifactId>
+ <version>${version.equalsverifier}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.openpojo</groupId>
+ <artifactId>openpojo</artifactId>
+ <version>${version.openpojo}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java
new file mode 100755
index 000000000..ca50f786b
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.cache.provider;
+
+import java.util.concurrent.ConcurrentHashMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@ericsson.com)
+ */
+public abstract class AbstractCacheServiceProvider {
+
+ private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+
+ private final CacheManager cacheManager;
+
+ public AbstractCacheServiceProvider(final CacheManager cacheManager) {
+ this.cacheManager = cacheManager;
+ }
+
+ protected void clearCache(final String name) {
+ final Cache cache = cacheManager.getCache(name);
+ if (cache != null) {
+ final ConcurrentHashMap<?, ?> nativeCache = (ConcurrentHashMap<?, ?>) cache.getNativeCache();
+ LOGGER.info("Clear all entries from cahce: {}", cache.getName());
+ nativeCache.clear();
+ }
+ }
+
+ protected Cache getCache(final String name) {
+ return cacheManager.getCache(name);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java
new file mode 100755
index 000000000..0fcdbae81
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configuration;
+
+import java.util.List;
+import org.onap.aaisimulator.model.User;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.authentication.configurers.provisioning.InMemoryUserDetailsManagerConfigurer;
+import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
+import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public abstract class SimulatorSecurityConfigurer extends WebSecurityConfigurerAdapter {
+ private static final Logger LOGGER = LoggerFactory.getLogger(SimulatorSecurityConfigurer.class);
+
+
+ private final List<User> users;
+
+ public SimulatorSecurityConfigurer(final List<User> users) {
+ this.users = users;
+ }
+
+ @Bean
+ public BCryptPasswordEncoder passwordEncoder() {
+ return new BCryptPasswordEncoder();
+ }
+
+ @Autowired
+ public void configureGlobal(final AuthenticationManagerBuilder auth) throws Exception {
+ final InMemoryUserDetailsManagerConfigurer<AuthenticationManagerBuilder> inMemoryAuthentication =
+ auth.inMemoryAuthentication().passwordEncoder(passwordEncoder());
+ for (int index = 0; index < users.size(); index++) {
+ final User user = users.get(index);
+ LOGGER.info("Adding {} to InMemoryUserDetailsManager ...", user);
+ inMemoryAuthentication.withUser(user.getUsername()).password(user.getPassword()).roles(user.getRole());
+ if (index < users.size()) {
+ inMemoryAuthentication.and();
+ }
+ }
+ }
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java
new file mode 100755
index 000000000..d273570e0
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java
@@ -0,0 +1,101 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import static org.springframework.util.ObjectUtils.nullSafeEquals;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class User {
+ private String username;
+ private String password;
+ private String role;
+
+ /**
+ * @return the username
+ */
+ public String getUsername() {
+ return username;
+ }
+
+ /**
+ * @param username the username to set
+ */
+ public void setUsername(final String username) {
+ this.username = username;
+ }
+
+ /**
+ * @return the password
+ */
+ public String getPassword() {
+ return password;
+ }
+
+ /**
+ * @param password the password to set
+ */
+ public void setPassword(final String password) {
+ this.password = password;
+ }
+
+ /**
+ * @return the role
+ */
+ public String getRole() {
+ return role;
+ }
+
+ /**
+ * @param role the role to set
+ */
+ public void setRole(final String role) {
+ this.role = role;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((password == null) ? 0 : password.hashCode());
+ result = prime * result + ((role == null) ? 0 : role.hashCode());
+ result = prime * result + ((username == null) ? 0 : username.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj instanceof User) {
+ final User other = (User) obj;
+ return nullSafeEquals(this.username, other.username) && nullSafeEquals(this.password, other.password)
+ && nullSafeEquals(this.role, other.role);
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "UserCredential [username=" + username + ", password=" + password + ", role=" + role + "]";
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java
new file mode 100755
index 000000000..d1c331b74
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java
@@ -0,0 +1,66 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Component
+@ConfigurationProperties(prefix = "spring.security")
+public class UserCredentials {
+
+ private final List<User> users = new ArrayList<>();
+
+ public List<User> getUsers() {
+ return users;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((users == null) ? 0 : users.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+
+ if (obj instanceof UserCredentials) {
+ final UserCredentials other = (UserCredentials) obj;
+ return ObjectUtils.nullSafeEquals(users, other.users);
+ }
+
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "UserCredentials [userCredentials=" + users + "]";
+ }
+
+}
diff --git a/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java b/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java
new file mode 100755
index 000000000..0954047e4
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java
@@ -0,0 +1,60 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import org.junit.Test;
+import org.onap.aaisimulator.model.UserCredentials;
+import com.openpojo.reflection.impl.PojoClassFactory;
+import com.openpojo.validation.Validator;
+import com.openpojo.validation.ValidatorBuilder;
+import com.openpojo.validation.test.impl.GetterTester;
+import com.openpojo.validation.test.impl.SetterTester;
+import nl.jqno.equalsverifier.EqualsVerifier;
+import nl.jqno.equalsverifier.Warning;
+
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class PojoClassesTest {
+
+ @Test
+ public void test_UserCredentials_class() throws ClassNotFoundException {
+ verify(UserCredentials.class);
+ validate(UserCredentials.class);
+ }
+
+ @Test
+ public void test_User_class() throws ClassNotFoundException {
+ verify(User.class);
+ validate(User.class);
+ }
+
+ private void validate(final Class<?> clazz) {
+ final Validator validator = ValidatorBuilder.create().with(new SetterTester()).with(new GetterTester()).build();
+ validator.validate(PojoClassFactory.getPojoClass(clazz));
+ }
+
+ private void verify(final Class<?> clazz) {
+ EqualsVerifier.forClass(clazz).suppress(Warning.STRICT_INHERITANCE, Warning.NONFINAL_FIELDS).verify();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/package/docker/pom.xml b/test/mocks/aai-simulator/package/docker/pom.xml
new file mode 100755
index 000000000..821a95152
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/pom.xml
@@ -0,0 +1,87 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>package</artifactId>
+ <groupId>org.onap.so.simulators</groupId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>docker</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <build>
+ <finalName>${project.artifactId}-${project.version}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>io.fabric8</groupId>
+ <artifactId>docker-maven-plugin</artifactId>
+ <version>0.28.0</version>
+ <configuration>
+ <verbose>true</verbose>
+ <apiVersion>1.23</apiVersion>
+ <pullRegistry>${docker.pull.registry}</pullRegistry>
+ <pushRegistry>${docker.push.registry}</pushRegistry>
+ <images>
+ 
+ </images>
+ </configuration>
+ <executions>
+ <execution>
+ <id>clean-images</id>
+ <phase>pre-clean</phase>
+ <goals>
+ <goal>remove</goal>
+ </goals>
+ <configuration>
+ <removeAll>true</removeAll>
+ </configuration>
+ </execution>
+ <execution>
+ <id>generate-images</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>build</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.8</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>${project.parent.groupId}</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image
new file mode 100755
index 000000000..bf570c7d7
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image
@@ -0,0 +1,34 @@
+FROM openjdk:8-jdk-alpine
+
+ARG http_proxy
+ARG https_proxy
+ENV HTTP_PROXY=$http_proxy
+ENV HTTPS_PROXY=$https_proxy
+ENV http_proxy=$HTTP_PROXY
+ENV https_proxy=$HTTPS_PROXY
+
+# Update the package list and upgrade installed packages
+RUN apk update && apk upgrade
+
+# Install commonly needed tools
+RUN apk --no-cache add curl netcat-openbsd sudo nss
+
+# Create 'so' user
+RUN addgroup -g 1000 so && adduser -S -u 1000 -G so -s /bin/sh so
+
+RUN mkdir /app && mkdir /app/config && mkdir /app/logs && mkdir /app/ca-certificates
+
+COPY maven/app.jar /app
+COPY configs/logging/logback-spring.xml /app
+COPY scripts/start-app.sh /app
+
+RUN chown -R so:so /app && chmod 700 /app/*.sh
+
+# Springboot configuration (required)
+VOLUME /app/config
+
+# CA certificates
+VOLUME /app/ca-certificates
+
+WORKDIR /app
+CMD ["/app/start-app.sh"]
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml
new file mode 100755
index 000000000..13c918797
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+
+ <property name="LOGS" value="./logs" />
+
+ <appender name="Console"
+ class="ch.qos.logback.core.ConsoleAppender">
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <Pattern>
+ %black(%d{ISO8601}) %highlight(%-5level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable
+ </Pattern>
+ </layout>
+ </appender>
+
+ <appender name="RollingFile"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logs_dir:-.}/spring-boot-logger.log</file>
+ <encoder
+ class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+ <Pattern>%d %p %C{1.} [%t] %m%n</Pattern>
+ </encoder>
+
+ <rollingPolicy
+ class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <!-- rollover daily and when the file reaches 10 MegaBytes -->
+ <fileNamePattern>${logs_dir:-.}/archived/spring-boot-logger-%d{yyyy-MM-dd}.%i.log
+ </fileNamePattern>
+ <timeBasedFileNamingAndTriggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+ <maxFileSize>10MB</maxFileSize>
+ </timeBasedFileNamingAndTriggeringPolicy>
+ </rollingPolicy>
+ </appender>
+
+ <!-- LOG everything at INFO level -->
+ <root level="info">
+ <appender-ref ref="RollingFile" />
+ <appender-ref ref="Console" />
+ </root>
+
+ <logger name="org.onap" level="trace" additivity="false">
+ <appender-ref ref="RollingFile" />
+ <appender-ref ref="Console" />
+ </logger>
+
+</configuration>
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh
new file mode 100755
index 000000000..eb8ee2e52
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2019 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+# @author Waqas Ikram (waqas.ikram@est.tech)
+
+touch /app/app.jar
+
+if [ "$(ls -1 /app/ca-certificates)" ]; then
+ needUpdate=FALSE
+ for certificate in `ls -1 /app/ca-certificates`; do
+ echo "Installing $certificate in /usr/local/share/ca-certificates"
+ cp /app/ca-certificates/$certificate /usr/local/share/ca-certificates/$certificate
+ needUpdate=TRUE
+ done
+ if [ $needUpdate = TRUE ]; then
+ echo "Updating ca-certificates . . ."
+ update-ca-certificates --fresh
+ fi
+fi
+
+if [ -z "$APP" ]; then
+ echo "CONFIG ERROR: APP environment variable not set"
+ exit 1
+fi
+
+echo "Starting $APP simulator ... "
+
+if [ -z "${CONFIG_PATH}" ]; then
+ export CONFIG_PATH=/app/config/override.yaml
+fi
+
+if [ -z "${LOG_PATH}" ]; then
+ export LOG_PATH="logs/${APP}"
+fi
+
+if [ "${SSL_DEBUG}" = "log" ]; then
+ export SSL_DEBUG="-Djavax.net.debug=all"
+else
+ export SSL_DEBUG=
+fi
+
+
+jvmargs="${JVM_ARGS} -Dlogs_dir=${LOG_PATH} -Dlogging.config=/app/logback-spring.xml -Dspring.config.additional-location=$CONFIG_PATH ${SSL_DEBUG} ${DISABLE_SNI}"
+
+echo "JVM Arguments: ${jvmargs}"
+
+java ${jvmargs} -jar app.jar
+rc=$?
+
+echo "Application exiting with status code $rc"
+
+exit $rc
diff --git a/test/mocks/aai-simulator/package/pom.xml b/test/mocks/aai-simulator/package/pom.xml
new file mode 100755
index 000000000..93c46dcdb
--- /dev/null
+++ b/test/mocks/aai-simulator/package/pom.xml
@@ -0,0 +1,15 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>aai-simulator</artifactId>
+ <groupId>org.onap.aai-simulator</groupId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>package</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <modules>
+ <module>docker</module>
+ </modules>
+</project>
diff --git a/test/mocks/aai-simulator/pom.xml b/test/mocks/aai-simulator/pom.xml
new file mode 100755
index 000000000..a11ddeffc
--- /dev/null
+++ b/test/mocks/aai-simulator/pom.xml
@@ -0,0 +1,84 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <version>1.0-SNAPSHOT</version>
+ <properties>
+ <jax.ws.rs>2.1</jax.ws.rs>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ <maven.compiler.source>1.8</maven.compiler.source>
+ <maven.compiler.target>1.8</maven.compiler.target>
+ <jaxb.version>2.3.0</jaxb.version>
+ <javax.version>1.1.1</javax.version>
+ <java.version>1.8</java.version>
+ </properties>
+ <modules>
+ <module>common</module>
+ <module>aai-sim</module>
+ <module>package</module>
+ </modules>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-parent</artifactId>
+ <version>2.1.5.RELEASE</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-core</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-impl</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.activation</groupId>
+ <artifactId>activation</artifactId>
+ <version>${javax.version}</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-actuator</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-aop</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.ws.rs</groupId>
+ <artifactId>javax.ws.rs-api</artifactId>
+ <version>${jax.ws.rs}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
index dd1daea54..09c7f1cf4 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPS."
+TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPES."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="300"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
index 15852057f..4265d1b8a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
index 7c7d3543f..719af3c6c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
@@ -31,15 +31,15 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
-consul_config_app 2 "../simulator-group/consul/c14_feed3_LOG.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c14_feed3_LOG.yaml"
-consul_config_app 3 "../simulator-group/consul/c15_feed1_PM_feed4_TEST.json"
+dfc_config_app 3 "../simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml"
-consul_config_app 2 "../simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml"
mr_print ""
@@ -462,4 +462,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
index f1d6f093c..30f4aa87d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
index c162a2a16..e51f690e1 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
index 9d9665bb2..a7365838f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -45,7 +45,7 @@ start_dfc 0
dr_equal ctr_published_files 5 900
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_events 100 1800
mr_equal ctr_unique_files 100
@@ -62,4 +62,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
index 18db3b288..ce3674398 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
index a33f37c22..4cc915e49 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
index 93e348e12..c776e3c9d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1KB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
index 99646b369..eed03da9a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
index 44238c31d..133f02424 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
index cb2f71a25..0eba6f12b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
index 9eef5ae95..e3ca92b83 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
index 0b1828966..407a45256 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -107,4 +107,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
index df9b57d3f..501a49e9c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPS, from poll to publish."
+TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="105"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
index 5291b6815..36f502267 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -91,4 +91,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
index 2eb9abc97..cb0610a5e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 30
@@ -80,4 +80,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
index 84db1d8c8..208de1d18 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
index 380d3ed0f..2a642a566 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
index 2776399c3..dddccc16b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -108,4 +108,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
index b1ab48224..f95bfd6d8 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -74,4 +74,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
index 338a20da0..f17e29493 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPS). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
+TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPES). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
. ../common/testcase_common.sh $1 $2
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
index 93dd69c0c..43d3e799e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
index f7b67d51b..cb84a8df7 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
new file mode 100755
index 000000000..093e764e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc400"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
new file mode 100755
index 000000000..4daeb3c02
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc401"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
new file mode 100755
index 000000000..ed76d23b2
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc402"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
new file mode 100755
index 000000000..01bca1311
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS client certificate authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc403"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
new file mode 100755
index 000000000..7370d82d4
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS no clientt authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc404"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
index 3de577eee..594fdba82 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="5MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
index 6efa32244..c41a743c9 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -73,4 +73,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
index fd3977348..1e7c41e78 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
index cc3839bec..637e55860 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
index f16c442f2..05e735beb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Kill FTPS sever for 10+ sec during download"
+TC_ONELINE_DESCR="Kill FTPES sever for 10+ sec during download"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="2"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -42,9 +42,9 @@ start_dfc 0
dr_greater ctr_published_files 100 200
-stop_ftps 0
+stop_ftpes 0
sleep_wait 30
-start_ftps 0
+start_ftpes 0
dr_equal ctr_published_files 1400 400
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
new file mode 100755
index 000000000..6b9bd2f28
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc300"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
index 0a5b3f1d4..4de28e3b6 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
index add145492..dd29b7eb0 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
new file mode 100755
index 000000000..547900969
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc301"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
index 960ea9679..9a264fc56 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -96,4 +96,4 @@ print_all
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
index 9734d9714..901f57cfb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed3_PM_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -84,4 +84,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
new file mode 100755
index 000000000..a78b693b3
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc302"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
index 50da063a4..9ecda185f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
@@ -29,8 +29,8 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -95,4 +95,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
index 08d4d9ea2..fd1b886bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Maximum number of 1MB FTPS files during 24h, 700 PNFs. 100 new files per event."
+TC_ONELINE_DESCR="Maximum number of 1MB FTPES files during 24h, 700 PNFs. 100 new files per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="4000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -102,4 +102,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
index 1bc88ef95..e902119bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/README.md b/test/mocks/datafilecollector-testharness/auto-test/README.md
index b73067dee..f6ccd52cb 100644
--- a/test/mocks/datafilecollector-testharness/auto-test/README.md
+++ b/test/mocks/datafilecollector-testharness/auto-test/README.md
@@ -1,54 +1,61 @@
-## Running automated test case and test suites
+# Running automated test case and test suites
+
Test cases run a single test case and test suites run one or more test cases in a sequence.
The test cases and test suites are possible to run on both Ubuntu and Mac-OS.
-##Overall structure and setup
+## Overall structure and setup
+
Test cases and test suites are written as bash scripts which call predefined functions in two other bash scripts
located in ../common dir.
The functions are described further below.
The integration repo is needed as well as docker.
-If needed setup the ``DFC_LOCAL_IMAGE`` and ``DFC_REMOTE_IMAGE`` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag.
+If needed setup the `DFC_LOCAL_IMAGE` and `DFC_REMOTE_IMAGE` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag.
The predefined images should be ok for current usage:
-``DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
+`DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
-``DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
+`DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
-If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the ``SIM_GROUP`` env var need to point to the ``simulator-group`` dir.
+If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the `SIM_GROUP` env var need to point to the `simulator-group` dir.
See instructions in the test_env.sh. The ../common dir is needed as well in the case. That is, it is possible to have auto-test dir (and the common dir) somewhere else
than in the integration repo but the simulator-group and common dir need to be available.
-##Test cases and test suites naming.
-Each file filename should have the format ``<tc-id>.sh`` for test cases and ``<ts-id>.sh`` for test suite. The tc-id and ts-id are the
+## Test cases and test suites naming
+
+Each file filename should have the format `<tc-id>.sh` for test cases and `<ts-id>.sh` for test suite. The tc-id and ts-id are the
identify of the test case or test suite. Example FTC2.sh, FTC2 is the id of the test case. Just the contents of the files determines if
it is a test case or test suite so good to name the file so it is easy to see if it is a test case or a test suite.
-A simple way to list all test cases/suite along with the description is to do ``grep ONELINE_DESCR *.sh`` in the shell.
+A simple way to list all test cases/suite along with the description is to do `grep ONELINE_DESCR *.sh` in the shell.
-##Logs from containers and test cases
-All logs from each test cases are stored under ``logs/<tc-id>/``.
+## Logs from containers and test cases
+
+All logs from each test cases are stored under `logs/<tc-id>/`.
The logs include the application.log and the container log from dfc, the container logs from each simulator and the test case log (same as the screen output).
In the test cases the logs are stored with a prefix so the logs can be stored at different steps during the test. All test cases contains an entry to save all logs with prefix 'END' at the end of each test case.
-##Execution##
-Test cases and test suites are executed by: `` [sudo] ./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app``</br>
-**local** - uses the dfc image pointed out by ``DFC_LOCAL_IMAGE`` in the test_env, should be the dfc image built locally in your docker registry.</br>
-**remote** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry.</br>
-**remote-remove** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br>
-**manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br>
-**manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br>
+
+## Execution
+
+Test cases and test suites are executed by: ` [sudo] ./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app`</br>
+
+- **local** - uses the dfc image pointed out by `DFC_LOCAL_IMAGE` in the test_env, should be the dfc image built locally in your docker registry.</br>
+- **remote** - uses the dfc image pointed out by `DFC_REMOTE_IMAGE` in the test_env, should be the dfc nexus image in your docker registry.</br>
+- **remote-remove** - uses the dfc image pointed out by `DFC_REMOTE_IMAGE` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br>
+- **manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br>
+- **manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br>
When running dfc manually, either as a container or an app the ports need to be set to map the instance id of the dfc. Most test cases start dfc with index 0, then the test case expects the ports of dfc to be mapped to the standar port number.
However, if a higher instance id than 0 is used then the mapped ports need add that index to the port number (eg, if index 2 is used the dfc need to map port 8102 and 8435 instead of the standard 8100 and 8433).
-##Test case file##
+## Test case file
+
A test case file contains a number of steps to verify a certain functionality.
-A description of the test case should be given to the ``TC_ONELINE_DESCR`` var. The description will be printed in the test result.
+A description of the test case should be given to the `TC_ONELINE_DESCR` var. The description will be printed in the test result.
The empty template for a test case files looks like this:
-(Only the parts noted with < and > shall be changed.)
+(Only the parts noted with &lt; and > shall be changed.)
------------------------------------------------------------
```
#!/bin/bash
@@ -69,20 +76,18 @@ store_logs END
print_result
```
------------------------------------------------------------
The ../common/testcase_common.sh contains all functions needed for the test case file. See the README.md file in the ../common dir for a description of all available functions.
+## Test suite files
-##Test suite files##
A test suite file contains one or more test cases to run in sequence.
-A description of the test case should be given to the ``TS_ONELINE_DESCR`` var. The description will be printed in the test result.
+A description of the test case should be given to the `TS_ONELINE_DESCR` var. The description will be printed in the test result.
The empty template for a test suite files looks like this:
-(Only the parts noted with ``<`` and ``>`` shall be changed.)
+(Only the parts noted with `<` and `>` shall be changed.)
------------------------------------------------------------
```
#!/bin/bash
@@ -104,11 +109,11 @@ suite_complete
```
------------------------------------------------------------
The ../common/testsuite_common.sh contains all functions needed for a test suite file. See the README.md file in the ../common dir for a description of all available functions.
-##Known limitations##
+## Known limitations
+
When DFC has polled a new event from the MR simulator, DFC starts to check each file whether it has been already published or not. This check is done per file towards the DR simulator.
If the event contains a large amount of files, there is a risk that DFC will flood the DR simulator with requests for these checks. The timeout in DFC for the response is currently 4 sec and the DR simulator may not be able to answer all request within the timeout.
DR simulator is single threaded. This seem to be a problem only for the first polled event. For subsequent events these requests seem to be spread out in time by DFC so the DR simulator can respond in time.
@@ -117,4 +122,4 @@ A number of the test script will report failure due to this limitation in the DR
The FTP servers may deny connection when too many file download requests are made in a short time from DFC.
This is visible in the DFC application log as WARNINGs for failed downloads. However, DFC always retry the failed download a number of times to
-minimize the risk of giving up download completely for these files. \ No newline at end of file
+minimize the risk of giving up download completely for these files.
diff --git a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
index 9e3d59c84..0593c52bb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
TS_ONELINE_DESCR="Single file tests suite"
@@ -8,12 +17,20 @@ suite_setup
############# TEST CASES #################
-./FTC1.sh $1 $2
-./FTC2.sh $1 $2
-./FTC3.sh $1 $2
-./FTC4.sh $1 $2
-./FTC5.sh $1 $2
-./FTC6.sh $1 $2
+./FTC1.sh "$1" "$2"
+./FTC2.sh "$1" "$2"
+./FTC3.sh "$1" "$2"
+./FTC4.sh "$1" "$2"
+./FTC5.sh "$1" "$2"
+./FTC6.sh "$1" "$2"
+./FTC7.sh "$1" "$2"
+./FTC8.sh "$1" "$2"
+./FTC9.sh "$1" "$2"
+./FTC400.sh "$1" "$2"
+./FTC401.sh "$1" "$2"
+./FTC402.sh "$1" "$2"
+./FTC403.sh "$1" "$2"
+./FTC404.sh "$1" "$2"
##########################################
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
index 6e3368518..b6fe01430 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPS. All new files (100) in first event from PNF, then one new 1 new file per event."
+TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPES. All new files (100) in first event from PNF, then one new 1 new file per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="1000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
index baafc906d..5584c6304 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
new file mode 100644
index 000000000..b876f2a99
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
@@ -0,0 +1,28 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCTVPk0SJYjfGLZ
+ToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuXpoC4Y7w79civ
+1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbeaQbFLOJw+1ri
+6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMhvv0SzDt7YwNv
+fOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjDurRpPFqwyXB7
+VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQxJ9Ce43Slxs+j
+lONjPfSVAgMBAAECggEAIBEGjFKgGukfupxUmayldZXkg0LSv3YbFB+uri1+UgjL
+/loR/FvBvejLyhphmbrIqCEdMwTCG2rLWzorJ62uBBRf8zvikQSqh/uiHn/J39RM
+K9FuJsGtF8bzkNw6ERxT5OSHDLNQNbb/eROqZTkXWyWddDiaBTqeoRaqjQjnvXYf
+2nchun8UfNrNO1Hnxf1shYNMgYpdSIYybh6+BmNgUpm1R22as7oD/o+xtTJhp8/s
+k8PybdV4a3JufZcPZKCCA4+XPtxLejDBIpV8ndoriaz+qcR3pd0VaXVPC8qSGOoX
+IaYAQQx9UvenOIcpPfUMmtO7FilEZDaK7IQXPsTMoQKBgQDmqsTL3aL7a3OufCEo
+3dC5iv5bm3IcTPNA2kINh0k8n3nllfKQwZzyKqtT7uzKxo3XuQMF2NL9q6ZcwpPG
+BZCDBLoOGgnRZF5KzPArHoLUeI1KINGcVBpYZpxpS6ys3CNQFhov8wC/E7dys7+j
+jxZ70BKzKb+OceuVBzT3mrsRRwKBgQCjgzpIO2hidnhd1sRlI8Cd84HZzjy1mXvE
+g/q7f2Dyti6eHaCbrBysb/Dg+OLiJ0qarV+0qx63lgyYDyWLrYeIfaVIlKAwKeJB
+5/6fNZ0vpPgGKUuPSxnxY+0plQzznO6ldwPWox1nj11pQlCCbnLyIsN03N6BT/Hu
+B1uwk+OZQwKBgQDdULvBXsYhtNcpcq/eJfU+EL475sl1nd9HOiJmGIeMQvcR8Ozr
+Ntj/ATGhNny7kgZGFJ1x3reR7D+SgJ6IQI6HJuHc5d7FqSdPXZKRzJR6h7AIj7SN
+6aPdbZZk8NachBrdnFdD6kOtEZ3Rz+TvaTqJUPqgLE4+vc7rDh8j8rHJwQKBgAJ5
+mgg93faflHLXLWHaiK/bX7vMQ178U8NFvCXaZ71ExK/gAu5YTJbPmvXMzqJdteNh
+fHFfpbdhrg8fK5JRrhuCy12t4j7YY3Rb7p66UQbHmHl/ZoVkvZ/Jw209tFR7q6EV
+jBlTnr5SjTdqqY1P3q2LmSnLrhKHA0J3GgwyMN/BAoGAbwJrqrTD5al5GZDmQLpF
+18ncYSp26/l4Na0vzAU+0JzNvbTyDLfWXXfx1+Ap4omI12gKp+hPVzvy4R2Pvw29
+vrSmKIluW/8NhCwyndJqmR3/TfLJNbVoCCP5PoXCJScCNia/4syxBHd+8B/Mbh/Q
+Vh3VsUe1aj3w3k/zgiHM7Ec=
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
new file mode 100644
index 000000000..c541ef03a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
@@ -0,0 +1,26 @@
+-----BEGIN CERTIFICATE-----
+MIIEcTCCAtmgAwIBAgIUOGJE5uY0d4BxflhwhgzVZnYRZcwwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMjMwMjEwMTUxMjA3WjB3MREwDwYDVQQD
+DAhvbmFwLm9yZzENMAsGA1UECwwET05BUDEZMBcGA1UECgwQTGludXgtRm91bmRh
+dGlvbjEWMBQGA1UEBwwNU2FuLUZyYW5jaXNjbzETMBEGA1UECAwKQ2FsaWZvcm5p
+YTELMAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCT
+VPk0SJYjfGLZToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuX
+poC4Y7w79civ1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbe
+aQbFLOJw+1ri6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMh
+vv0SzDt7YwNvfOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjD
+urRpPFqwyXB7VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQx
+J9Ce43Slxs+jlONjPfSVAgMBAAGjgYowgYcwDAYDVR0TAQH/BAIwADAfBgNVHSME
+GDAWgBSVNWKlCol8dwbm9DGRVjhySQviKTAnBgNVHSUEIDAeBggrBgEFBQcDAgYI
+KwYBBQUHAwQGCCsGAQUFBwMBMB0GA1UdDgQWBBQft80LFwbI2ltsHHs80O/Rre0Y
+LjAOBgNVHQ8BAf8EBAMCBeAwDQYJKoZIhvcNAQELBQADggGBAAIwbJHtize60YkA
+jW8r93wqcWA6xyTKXiR5JW5TDjboOjwwrpns/cga4rIAN+a1jxhM2dfQUbNiafAG
++4BwAxa3Oe/jgGGaKvB1RFaNZpbQ3zR9A97KB9LMK9jIPPZq4vOUIXmcpoKcW/wI
+Ubn6eXqPM+ikL4+NZyCgf/+WWoYUe57E9D1ftsZBDrxy5jGxngNYPtjOVir05bmd
+mLW0IPYRfrtyBowrK8tMksChvsxaSoODZBl7t2OSg7dZ8c808jQSMBcs2S+6+xDU
+37PwLcmwkq7jtSl5ujmR9WtHUpZApwazSboiGmxAoZBPpp9wTKWgy1xIATqcUCdx
+hkLWtdkOh4Kas5AZR3wDVzOLaLvzcdfZ7MD3+0hF5R4gFv4fgpwUm3rWV1eEu7xj
+nAO1gZNnVVdRpYY2Six9lpOpG81klBnd2DpcrZeP5eGi4ka3mqqSXW51jxUBk1dA
+rrgs3EMb/0h2a1HPJ5Vx7qfPMtUrouDUwtlE4R4QtXI+gPDYBA==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
new file mode 100644
index 000000000..bdc921182
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
@@ -0,0 +1,28 @@
+-----BEGIN CERTIFICATE-----
+MIIEszCCAxugAwIBAgIUXdztVMaxBJq+K0DnVEn21jUaVUUwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMzEwMjEwMTUyMDI5WjBhMSMwIQYKCZIm
+iZPyLGQBAQwTYy0wNGE3N2E3MzE5NjFmNjAzOTEVMBMGA1UEAwwMTWFuYWdlbWVu
+dENBMSMwIQYDVQQKDBpFSkJDQSBDb250YWluZXIgUXVpY2tzdGFydDCCAaIwDQYJ
+KoZIhvcNAQEBBQADggGPADCCAYoCggGBAMm52gFqo3WJliqiCdL9DpfaOgJI+S4e
+lp45i0laGUwCv4c93qECYvauV1W6bR2wMIb1684j7LBpr25TMyKT6ZeZ1qVwB9ri
+6XgdptVxw0ijGtUUKpf2ewbLqOyyiX20EEvfBD00luyhQizwsg8OcbbZcc/7pl/e
+o1GgQV9loF6bV9wBQeDt0KtZMnXeQoFejhqkYRDB61PXefqhHqpSxi1NVJJiSSIB
+udkFqdzhAVCu2QknNYRzt9zn1qchzwFuzZt5ureiVKk7ih7yIuw8eBm9RgFJBZO2
+48ZxlAQXlG5AUQN1sWSg0fPzgwO9AZLUP9b0iLhTQozXGEKhtjzF2EhUL2MvL/JY
+nx+tO88j1EdgmqUsoUUhBQsewju+8a5z3eqdtxqRhn0q2AM3WFdEqzMI43L0/Lwj
+jcPWqn9FmNXwtbNNK8EI3IxFLsooMAWceHpz9BQ9UNcq5jGyjE8ED8bGuorDxncl
+pCEkmjrbdpmk3YmKgDZ8hPY7O3eoEhES+QIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFJU1YqUKiXx3Bub0MZFWOHJJC+IpMB0GA1UdDgQWBBSV
+NWKlCol8dwbm9DGRVjhySQviKTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQEL
+BQADggGBAHVm2xjIPtD3qjHgGWBjT+4wwjbc2oAYtQoGzXGztvqtmaLLkMEV+F6p
+p1qQTNXn28fDC1hAhzI921xjIo4uya1mctnRvrcXy/tNq/nFqAGrTOxg0iO2Y+yJ
+Cwi7G3WooHgEsxBTOMIlD9uoUd1sowq6AHA2usKUHtAf7AMf1zHX082/GsD7w5wh
+gcB8pP8EBghYoVZ6NQLyzlBOAyacbWo5q505fDRs3bDeVVLVNN/pgS+uIFHhHhQ8
+PLYukbDJ09hPvPc+k4zTrbvQcOh7ftdKp5W3xRUDjmszMiXu7B7DXK48LGnD/vdg
+HQAII84zpu9JC1xlJAZfFIUvoLBjyYda3B6nxXr32bih1Sjpn72P9QVDmvKtpHUp
+f5pAzL8/y/bEuiaCvzauqC+eoXRi8hlOMzQ0S0xIANlJrQdwj/r/qwzeBW4Vbdo/
+k/VKx1KR8cfSXrXuTz0CITbZAcq5S6kD+z9iFmJrx2wdtTwXog9XLp1UcATUxxki
+w+5qVOtR4w==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
new file mode 100644
index 000000000..6bd0e2759
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
new file mode 100644
index 000000000..a3ecdf21b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
@@ -0,0 +1 @@
+B9BWYIw8YAHPRcF1lU9rZZUc \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
new file mode 100644
index 000000000..fb3fbf57d
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
@@ -0,0 +1,4 @@
+KEYSTORE_SOURCE_PATHS=/opt/app/datafile/etc/keystore.p12:/opt/app/datafile/etc/keystore.pass
+TRUSTSTORES_PASSWORDS_PATHS=/opt/app/datafile/etc/cert/trust.pass:/opt/app/datafile/etc/truststore.pass
+TRUSTSTORES_PATHS=/opt/app/datafile/etc/cert/trust.jks:/opt/app/datafile/etc/truststore.p12
+KEYSTORE_DESTINATION_PATHS=/opt/app/datafile/etc/cert/cert.p12:/opt/app/datafile/etc/cert/p12.pass
diff --git a/test/mocks/datafilecollector-testharness/common/README.md b/test/mocks/datafilecollector-testharness/common/README.md
index bcd345739..13cbd46fd 100644
--- a/test/mocks/datafilecollector-testharness/common/README.md
+++ b/test/mocks/datafilecollector-testharness/common/README.md
@@ -1,220 +1,226 @@
-##Common test scripts and env file for test
+## Common test scripts and env file for test
-**test_env.sh**</br>
-Common env variables for test in the auto-test dir. Used by the auto test cases/suites but could be used for other test script as well.
+**test_env.sh**: Common env variables for test in the auto-test dir.
+Used by the auto test cases/suites but could be used for other test script as well.
-**testcase_common.sh**</br>
-Common functions for auto test cases in the auto-test dir. A subset of the functions could be used in other test scripts as well.
+**testcase_common.sh**: Common functions for auto test cases in the auto-test dir.
+A subset of the functions could be used in other test scripts as well.
-**testsuite_common.sh**</br>
-Common functions for auto test suites in the auto-test dir.
+**testsuite_common.sh**: Common functions for auto test suites in the auto-test dir.
-##Descriptions of functions in testcase_common.sh
+## Descriptions of functions in testcase_common.sh
The following is a list of the available functions in a test case file. Please see some of the defined test cases for examples.
-**log_sim_settings**</br>
+**log_sim_settings**:
Print the env variables needed for the simulators and their setup
-**clean_containers**</br>
+**clean_containers**:
Stop and remove all containers including dfc apps and simulators
-**start_simulators**</br>
+**start_simulators**:
Start all simulators in the simulator group
-**start_dfc <dfc-instance-id>**</br>
-Start the dfc application. The arg shall be an integer from 0 to 5 representing the dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
+**start_dfc \<dfc-instance-id>**:
+Start the dfc application. The arg shall be an integer from 0 to 5 representing the
+dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
-**kill_dfc <dfc-instance-id> **</br>
+**kill_dfc \<dfc-instance-id>**:
Stop and remove the dfc app container with the instance id.
-**consul_config_app <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with app config for a dfc instance using the dfc instance id and the json file.
+**dfc_config_app \<dfc-instance-id> \<yaml-file-path>**:
+Apply app configuration for a dfc instance using the dfc
+instance id and the yaml file.
-**consul_config_dmaap <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with dmaap config for a dfc instance using the dfc instance id and the json file.
-
-**kill_dr**</br>
+**kill_dr**:
Stop and remove the DR simulator container
-**kill_drr**</br>
+**kill_drr**:
Stop and remove the DR redir simulator container
-**kill_mr**</br>
+**kill_mr**:
Stop and remove the MR simulator container
-**kill_sftp <sftp-instance-id>**</br>
+**kill_sftp \<sftp-instance-id>**:
Stop and remove a SFTP container with the supplied instance id (0-5).
-**stop_sftp <sftp-instance-id>**</br>
+**stop_sftp \<sftp-instance-id>**:
Stop a SFTP container with the supplied instance id (0-5).
-**start_sftp <sftp-instance-id>**</br>
+**start_sftp \<sftp-instance-id>**:
Start a previously stopped SFTP container with the supplied instance id (0-5).
-**kill_ftps <ftps-instance-id>**</br>
-Stop and remove a FTPS container with the supplied instance id (0-5).
+**kill_ftpes \<ftpes-instance-id>**:
+Stop and remove a FTPES container with the supplied instance id (0-5).
+
+**stop_ftpes \<ftpes-instance-id>**:
+Stop a FTPES container with the supplied instance id (0-5).
+
+**start_ftpes \<ftpes-instance-id>**:
+Start a previously stopped FTPES container with the supplied instance id (0-5).
-**stop_ftps <ftps-instance-id>**</br>
-Stop a FTPS container with the supplied instance id (0-5).
+**kill_http_https \<http-instance-id>**:
+Stop and remove a HTTP/HTTPS container with the supplied instance id (0-5).
-**start_ftps <ftps-instance-id>**</br>
-Start a previously stopped FTPS container with the supplied instance id (0-5).
+**stop_http_https \<http-instance-id>**:
+Stop a HTTP/HTTPS container with the supplied instance id (0-5).
-**mr_print <vaiable-name>**</br>
+**start_http_https \<http-instance-id>**:
+Start a previously stopped HTTP/HTTPS container with the supplied instance id (0-5).
+
+**mr_print \<variable-name>**:
Print a variable value from the MR simulator.
-**dr_print <vaiable-name>**</br>
+**dr_print \<variable-name>**:
Print a variable value from the DR simulator.
-**drr_print <vaiable-name>**</br>
+**drr_print \<variable-name>**:
Print a variable value from the DR redir simulator.
-**dfc_print <dfc-instance-id> <vaiable-name>**</br>
+**dfc_print \<dfc-instance-id> <variable-name>**:
Print a variable value from an dfc instance with the supplied instance id (0-5).
-**mr_read <vaiable-name>**</br>
+**mr_read \<variable-name>**:
Read a variable value from MR sim and send to stdout
-**dr_read <vaiable-name>**</br>
+**dr_read \<variable-name>**:
Read a variable value from DR sim and send to stdout
-**drr_read <vaiable-name>**</br>
+**drr_read \<variable-name>**:
Read a variable value from DR redir sim and send to stdout
-**sleep_wait <sleep-time-in-sec>**</br>
+**sleep_wait \<sleep-time-in-sec>**:
Sleep for a number of seconds
-**sleep_heartbeat <sleep-time-in-sec>**</br>
+**sleep_heartbeat \<sleep-time-in-sec>**:
Sleep for a number of seconds and prints dfc heartbeat output every 30 sec
-**mr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the targer or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**mr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**mr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**mr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_equal <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**dr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_greater <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**dr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_less <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**dr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**drr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**drr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**drr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**drr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dfc_contain_str <variable-name> <substring-in-quotes>**</br>
+**dfc_contain_str \<variable-name> \<substring-in-quotes>**:
Test if a variable in the DFC contains a substring.
-**store_logs <log-prefix>**</br>
+**store_logs \<log-prefix>**:
Store all dfc app and simulators log to the test case log dir. All logs get a prefix to
separate logs stored at different steps in the test script.
If logs need to be stored in several locations, use different prefix to easily identify the location
when the logs where taken.
-**check_dfc_log**</br>
+**check_dfc_log**:
Check the dfc application log for WARN and ERR messages and print the count.
-**print_result**</br>
+**print_result**:
Print the test result. Only once at the very end of the script.
-**print_all**</br>
+**print_all**:
Print all variables from the simulators and the dfc heartbeat.
In addition, comment in the file can be added using the normal comment sign in bash '#'.
-Comments that shall be visible on the screen as well as in the test case log, use ``echo "<msg>"``.
-
+Comments that shall be visible on the screen as well as in the test case log, use `echo "<msg>"`.
-##Descriptions of functions in testsuite_common.sh
+## Descriptions of functions in testsuite_common.sh
The following is a list of the available functions in a test suite file. Please see a existing test suite for examples.
-**suite_setup**</br>
+**suite_setup**:
Sets up the test suite and print out a heading.
-**run_tc <tc-script> <$1 from test suite script> <$2 from test suite script>**</br>
+**run_tc \<tc-script> <$1 from test suite script> <$2 from test suite script>**:
Execute a test case with arg from test suite script
-**suite_complete**</br>
-Print out the overall result of the executed test cases. \ No newline at end of file
+**suite_complete**:
+Print out the overall result of the executed test cases.
diff --git a/test/mocks/datafilecollector-testharness/common/test_env.sh b/test/mocks/datafilecollector-testharness/common/test_env.sh
index 1a97ffc73..f76af323f 100644
--- a/test/mocks/datafilecollector-testharness/common/test_env.sh
+++ b/test/mocks/datafilecollector-testharness/common/test_env.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# This env variable is only needed if the auto test scripts tests are executed in a different folder than 'auto-test' in the integration repo
# Change '<local-path>' to your path to the integration repo. In addition to the auto-test, the 'common' dir is needed if not executed in the
@@ -22,9 +31,8 @@ DFC_PORT=8100 #Up to five dfc apps can be used, dfc_app
DFC_PORT_SECURE=8433 #Up to five dfc apps can be used, dfc_app0 will be mapped to 8433 on local machine for hhtps, dfc_app1 mapped to 8434 etc
DFC_LOGPATH="/var/log/ONAP/application.log" #Path the application log in the dfc container
DOCKER_SIM_NWNAME="dfcnet" #Name of docker private network
-CONSUL_HOST="consul-server" #Host name of consul
-CONSUL_PORT=8500 #Port number of consul
CONFIG_BINDING_SERVICE="config-binding-service" #Host name of CBS
+CONFIG_BINDING_SERVICE_SERVICE_PORT=10000 #CBS port
MR_PORT=2222 #MR simulator port number http
DR_PORT=3906 #DR simulator port number http
DR_PORT_SECURE=3907 #DR simulator port number for https
@@ -34,24 +42,46 @@ DFC_APP_BASE="dfc_app" #Base name of the dfc containers. Instanc
DFC_MAX_NUM=5 #Max number of dfc containers to run in paralell in auto test
DFC_MAX_IDX=$(($DFC_MAX_NUM - 1)) #Max index of the dfc containers
SFTP_BASE="dfc_sftp-server" #Base name of the dfc_sftp-server containers. Instance 0 will be named dfc_sftp-server0, instance 1 will named dfc_sftp-server1 etc
-FTPS_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
-FTP_MAX_NUM=5 #Max number of sftp and ftps containers to run in paralell in auto test
-FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftps containers
+FTPES_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
+HTTP_HTTPS_BASE="dfc_http-https-server" #Base name of the dfc_http-https-server containers. Instance 0 will be named dfc_http-https-server0, instance 1 will named dfc_http-https-server1 etc
+FTP_MAX_NUM=5 #Max number of sftp and ftpes containers to run in paralell in auto test
+HTTP_MAX_NUM=5 #Max number of http/https containers to run in paralell in auto test
+FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftpes containers
+HTTP_MAX_IDX=$(($HTTP_MAX_NUM - 1)) #Max index of http/https containers
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
SFTP_SIMS_CONTAINER="sftp-server0:22,sftp-server1:22,sftp-server2:22,sftp-server3:22,sftp-server4:22"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
-FTPS_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+FTPES_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
+HTTP_SIMS_CONTAINER="http-https-server0:80,http-https-server1:80,http-https-server2:80,http-https-server3:80,http-https-server4:80"
+HTTP_JWT_SIMS_CONTAINER="http-https-server0:32000,http-https-server1:32000,http-https-server2:32000,http-https-server3:32000,http-https-server4:32000"
+HTTPS_SIMS_CONTAINER="http-https-server0:443,http-https-server1:443,http-https-server2:443,http-https-server3:443,http-https-server4:443"
+HTTPS_SIMS_NO_AUTH_CONTAINER="http-https-server0:8080,http-https-server1:8080,http-https-server2:8080,http-https-server3:8080,http-https-server4:8080"
+HTTPS_JWT_SIMS_CONTAINER="http-https-server0:32100,http-https-server1:32100,http-https-server2:32100,http-https-server3:32100,http-https-server4:32100"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
SFTP_SIMS_LOCALHOST="localhost:1022,localhost:1023,localhost:1024,localhost:1025,localhost:1026"
-#List of ftps server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
-FTPS_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+#List of ftpes server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+FTPES_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+HTTP_SIMS_LOCALHOST="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85"
+HTTP_JWT_SIMS_LOCALHOST="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005"
+HTTPS_SIMS_LOCALHOST="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448"
+HTTPS_SIMS_NO_AUTH_LOCALHOST="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085"
+HTTPS_JWT_SIMS_LOCALHOST="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105"
export SFTP_SIMS=$SFTP_SIMS_CONTAINER #This env will be set to SFTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
-export FTPS_SIMS=$FTPS_SIMS_CONTAINER #This env will be set to FTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export FTPES_SIMS=$FTPES_SIMS_CONTAINER #This env will be set to FTPES_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_SIMS=$HTTP_SIMS_CONTAINER #This env will be set to HTTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_CONTAINER #This env will be set to HTTP_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS=$HTTPS_SIMS_CONTAINER #This env will be set to HTTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS_NO_AUTH=$HTTPS_SIMS_NO_AUTH_CONTAINER #This env will be set to HTTPS_SIMS_NO_AUTH_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_CONTAINER #This env will be set to HTTPS_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
#Host name of the DR redirect simulator
export DR_REDIR_SIM="drsim_redir" #This env will be set to 'localhost' if auto test is executed with arg 'manual-app'
diff --git a/test/mocks/datafilecollector-testharness/common/testcase_common.sh b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
index a1e092157..ba665f655 100755
--- a/test/mocks/datafilecollector-testharness/common/testcase_common.sh
+++ b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
. ../common/test_env.sh
@@ -151,39 +160,44 @@ fi
echo ""
-echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPS simulators"
+echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPES."
+echo "For HTTP simulator prebuilt containers exist in nexus repo."
curdir=$PWD
cd $SIM_GROUP
cd ../dr-sim
docker build -t drsim_common:latest . &> /dev/null
cd ../mr-sim
docker build -t mrsim:latest . &> /dev/null
-cd ../ftps-sftp-server
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps . &> /dev/null
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes . &> /dev/null
cd $curdir
echo ""
echo "Local registry images for simulators:"
-echo "MR simulator " $(docker images | grep mrsim)
-echo "DR simulator: " $(docker images | grep drsim_common)
-echo "DR redir simulator: " $(docker images | grep drsim_common)
-echo "SFTP: " $(docker images | grep atmoz/sftp)
-echo "FTPS: " $(docker images | grep ftps_vsftpd)
-echo "Consul: " $(docker images | grep consul)
-echo "CBS: " $(docker images | grep platform.configbinding.app)
+echo "MR simulator " $(docker images | grep mrsim)
+echo "DR simulator: " $(docker images | grep drsim_common)
+echo "DR redir simulator: " $(docker images | grep drsim_common)
+echo "SFTP: " $(docker images | grep atmoz/sftp)
+echo "FTPES: " $(docker images | grep ftpes_vsftpd)
+echo "HTTP/HTTPS/HTTPS no auth: " $(docker images | grep http_https_httpd)
echo ""
#Configure MR sim to use correct host:port for running dfc as an app or as a container
#Configure DR sim with correct address for DR redirect simulator
if [ $START_ARG == "manual-app" ]; then
export SFTP_SIMS=$SFTP_SIMS_LOCALHOST
- export FTPS_SIMS=$FTPS_SIMS_LOCALHOST
+ export FTPES_SIMS=$FTPES_SIMS_LOCALHOST
+ export HTTP_SIMS=$HTTP_SIMS_LOCALHOST
+ export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_LOCALHOST
+ export HTTPS_SIMS=$HTTPS_SIMS_LOCALHOST
+ export HTTPS_SIMS_NO_AUTH=HTTPS_SIMS_NO_AUTH_LOCALHOST
+ export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_LOCALHOST
export DR_REDIR_SIM="localhost"
fi
#else
# export SFTP_SIMS=$SFTP_SIMS_CONTAINER
-# export FTPS_SIMS=$FTPS_SIMS_CONTAINER
+# export FTPES_SIMS=$FTPES_SIMS_CONTAINER
# export DR_REDIR_SIM="drsim_redir"
#fi
@@ -204,7 +218,7 @@ __do_curl() {
echo "<no-response-from-server>"
return 1
else
- if [ $http_code -lt 200 ] && [ $http_code -gt 299]; then
+ if [ $http_code -lt 200 ] && [ $http_code -gt 299 ]; then
echo "<not found, resp:${http_code}>"
return 1
fi
@@ -370,12 +384,12 @@ __start_dfc_image() {
localport=$(($DFC_PORT + $2))
localport_secure=$(($DFC_PORT_SECURE + $2))
- echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
+ echo "Creating docker network "$DOCKER_SIM_NWNAME", if needed"
- docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+ docker network ls| grep "$DOCKER_SIM_NWNAME" > /dev/null || docker network create "$DOCKER_SIM_NWNAME"
echo "Starting DFC: " $appname " with ports mapped to " $localport " and " $localport_secure " in docker network "$DOCKER_SIM_NWNAME
- docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONSUL_HOST=$CONSUL_HOST -e CONSUL_PORT=$CONSUL_PORT -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e HOSTNAME=$appname --name $appname $DFC_IMAGE
+ docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ --volume $(pwd)/../simulator-group/dfc_config_volume/:/app-config/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e CONFIG_BINDING_SERVICE_SERVICE_PORT=$CONFIG_BINDING_SERVICE_SERVICE_PORT -e HOSTNAME=$appname --name $appname $DFC_IMAGE
sleep 3
set +x
dfc_started=false
@@ -473,8 +487,6 @@ __wait_for_dfc() {
http=$(($DFC_PORT+$2))
https=$((DFC_PORT_SECURE+$2))
echo "The app is expected to listen to http port ${http} and https port ${https}"
- echo "The app shall use 'localhost' and '8500' for CONSUL_HOST and CONSUL_PORT."
- echo "The app shale use 'config-binding-service-localhost' for CONFIG_BINDING_SERVICE"
echo "The app shall use ${1} for HOSTNAME."
read -p "Press enter to continue when app mapping to ${1} has been manually started"
}
@@ -501,13 +513,22 @@ log_sim_settings() {
echo "DR_REDIR_FEEDS= "$DR_REDIR_FEEDS
echo "NUM_FTPFILES= "$NUM_FTPFILES
+ echo "NUM_HTTPFILES= "$NUM_HTTPFILES
echo "NUM_PNFS= "$NUM_PNFS
echo "FILE_SIZE= "$FILE_SIZE
echo "FTP_TYPE= "$FTP_TYPE
+ echo "HTTP_TYPE= "$HTTP_TYPE
echo "FTP_FILE_PREFIXES= "$FTP_FILE_PREFIXES
+ echo "HTTP_FILE_PREFIXES= "$HTTP_FILE_PREFIXES
echo "NUM_FTP_SERVERS= "$NUM_FTP_SERVERS
+ echo "NUM_HTTP_SERVERS= "$NUM_HTTP_SERVERS
echo "SFTP_SIMS= "$SFTP_SIMS
- echo "FTPS_SIMS= "$FTPS_SIMS
+ echo "FTPES_SIMS= "$FTPES_SIMS
+ echo "HTTP_SIMS= "$HTTP_SIMS
+ echo "HTTP_JWT_SIMS= "$HTTP_JWT_SIMS
+ echo "HTTPS_SIMS= "$HTTPS_SIMS
+ echo "HTTPS_SIMS_NO_AUTH= "$HTTPS_SIMS_NO_AUTH
+ echo "HTTPS_JWT_SIMS= "$HTTPS_JWT_SIMS
echo ""
}
@@ -517,6 +538,7 @@ clean_containers() {
docker stop $(docker ps -q --filter name=dfc_) &> /dev/null
echo "Removing all containers, dfc app and simulators with name prefix 'dfc_'"
docker rm $(docker ps -a -q --filter name=dfc_) &> /dev/null
+ docker rm -f $(docker ps -a -q --filter name=oom-certservice-post-processor) &> /dev/null
echo "Removing unused docker networks with substring 'dfc' in network name"
docker network rm $(docker network ls -q --filter name=dfc)
echo ""
@@ -528,6 +550,7 @@ start_simulators() {
echo "Starting all simulators"
curdir=$PWD
cd $SIM_GROUP
+ export SIM_GROUP=$SIM_GROUP
$SIM_GROUP/simulators-start.sh
cd $curdir
echo ""
@@ -557,12 +580,12 @@ start_dfc() {
fi
}
-# Configure consul with dfc config, args <dfc-instance-id> <json-file-path>
+# Configure volume with dfc config, args <dfc-instance-id> <yaml-file-path>
# Not intended to be called directly by test scripts.
-__consul_config() {
+__dfc_config() {
if [ $# != 2 ]; then
- __print_err "need two args, <dfc-instance-id> <json-file-path>"
+ __print_err "need two args, <dfc-instance-id> <yaml-file-path>"
exit 1
fi
@@ -571,26 +594,27 @@ __consul_config() {
exit 1
fi
if ! [ -f $2 ]; then
- __print_err "json file does not extis: "$2
+ __print_err "yaml file does not exist: "$2
exit 1
fi
appname=$DFC_APP_BASE$1
- echo "Configuring consul for " $appname " from " $2
- curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$2 >/dev/null
+ echo "Applying configuration for " $appname " from " $2
+ mkdir $(pwd)/../simulator-group/dfc_config_volume/
+ cp $2 $(pwd)/../simulator-group/dfc_config_volume/application_config.yaml
}
-# Configure consul with dfc app config, args <dfc-instance-id> <json-file-path>
-consul_config_app() {
+# Configure volume with dfc app config, args <dfc-instance-id> <yaml-file-path>
+dfc_config_app() {
if [ $START_ARG == "manual-app" ]; then
- echo "Replacing 'mrsim' with 'localhost' in json app config for consul"
- sed 's/mrsim/localhost/g' $2 > .tmp_app.json
- echo "Replacing 'drsim' with 'localhost' in json dmaap config for consul"
- sed 's/drsim/localhost/g' .tmp_app.json > .app.json
- __consul_config $1 .app.json
+ echo "Replacing 'mrsim' with 'localhost' in yaml app config"
+ sed 's/mrsim/localhost/g' $2 > .tmp_app.yaml
+ echo "Replacing 'drsim' with 'localhost' in yaml dmaap config"
+ sed 's/drsim/localhost/g' .tmp_app.yaml > .app.yaml
+ __dfc_config $1 .app.yaml
else
- __consul_config $1 $2
+ __dfc_config $1 $2
fi
}
@@ -618,6 +642,8 @@ kill_dfc() {
elif [ $START_ARG == "manual-app" ]; then
__wait_for_dfc_gone $appname
fi
+
+ rm -rf $(pwd)/../simulator-group/dfc_config_volume
}
# Stop and remove the DR simulator container
@@ -699,11 +725,11 @@ start_sftp() {
__docker_start $appname
}
-# Stop and remove the FTPS container, arg: <ftps-instance-id>
-kill_ftps() {
+# Stop and remove the FTPES container, arg: <ftpes-instance-id>
+kill_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftpS-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -711,19 +737,19 @@ kill_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Killing FTPS, instance id: "$1
+ echo "Killing FTPES, instance id: "$1
__docker_stop $appname
__docker_rm $appname
}
-# Stop FTPS container, arg: <ftps-instance-id>
-stop_ftps() {
+# Stop FTPES container, arg: <ftpes-instance-id>
+stop_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -731,18 +757,18 @@ stop_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Stopping FTPS, instance id: "$1
+ echo "Stopping FTPES, instance id: "$1
__docker_stop $appname
}
-# Starts a stopped FTPS container, arg: <ftps-instance-id>
-start_ftps() {
+# Starts a stopped FTPES container, arg: <ftpes-instance-id>
+start_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -750,9 +776,67 @@ start_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Starting FTPS, instance id: "$1
+ echo "Starting FTPES, instance id: "$1
+
+ __docker_start $appname
+}
+
+# Stop and remove the HTTP container, arg: <http-instance-id>
+kill_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Killing HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+ __docker_rm $appname
+}
+
+# Stop HTTP container, arg: <http-instance-id>
+stop_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Stopping HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+}
+
+# Starts a stopped HTTP container, arg: <http-instance-id>
+start_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Starting HTTP/HTTPS, instance id: "$1
__docker_start $appname
}
@@ -1128,12 +1212,15 @@ store_logs() {
for (( i=0; i<=$FTP_MAX_IDX; i++ )); do
appname=$SFTP_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
- appname=$FTPS_BASE$i
+ appname=$FTPES_BASE$i
+ docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
+ done
+
+ for (( i=0; i<=$HTTP_MAX_IDX; i++ )); do
+ appname=$HTTP_HTTPS_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
done
- docker logs dfc_consul > $TESTLOGS/$ATC/$1_consul.log 2>&1
- docker logs dfc_cbs > $TESTLOGS/$ATC/$1_cbs.log 2>&1
}
# Check the dfc application log, for all dfc instances, for WARN and ERR messages and print the count.
check_dfc_logs() {
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
index cbe30366c..b429c72fe 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
@@ -1,6 +1,6 @@
#Common image for both dmmapDR and dmaapDR_redir
-FROM node:12
+FROM node:14
WORKDIR /app
@@ -17,4 +17,4 @@ RUN npm install argparse
#Ports for DR redir
#EXPOSE 3908
-#EXPOSE 3909 \ No newline at end of file
+#EXPOSE 3909
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/README.md b/test/mocks/datafilecollector-testharness/dr-sim/README.md
index a258ed46d..4e7273a11 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/dr-sim/README.md
@@ -1,77 +1,106 @@
-###Run DR simulators as docker container
-1. Build docker container with ```docker build -t drsim_common:latest .```
-2. Run the container ```docker-compose up```
+# Run DR simulators as docker container
+
+1. Build docker container with `docker build -t drsim_common:latest .`
+2. Run the container `docker-compose up`
3. For specific behavior of of the simulators, add arguments to the `command` entries in the `docker-compose.yml`.
+
For example `command: node dmaapDR.js --tc no_publish` . (No argument will assume '--tc normal'). Run `node dmaapDR.js --printtc`
and `node dmaapDR-redir.js --printtc` for details or see further below for the list of possible arg to the simulator
-###Run DR simulators and all other simulators as one group
+# Run DR simulators and all other simulators as one group
+
See the README in the 'simulator-group' dir.
-###Run DR simulators from cmd line
+# Run DR simulators from cmd line
+
1. install nodejs
2. install npm
+
Make sure that you run these commands in the application directory "dr-sim"
+
3. `npm install express`
4. `npm install argparse`
5. `node dmaapDR.js` #keep it in the foreground, see below for a list for arg to the simulator
6. `node dmaapDR_redir.js` #keep it in the foreground, see below for a list for arg to the simulator
-###Arg to control the behavior of the simulators
+# Arg to control the behavior of the simulators
+
+## DR
+
+\--tc tc_normal Normal case, query response based on published files. Publish respond with ok/redirect depending on if file is published or not.</br>
+
+\--tc tc_none_published Query respond 'ok'. Publish respond with redirect.</br>
+
+\--tc tc_all_published Query respond with filename. Publish respond with 'ok'.</br>
+
+\--tc tc_10p_no_response 10% % no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_10first_no_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_100first_no_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_all_delay_1s All responses delayed 1s (both query and publish).</br>
+
+\--tc tc_all_delay_10s All responses delayed 10s (both query and publish).</br>
+
+\--tc tc_10p_delay_10s 10% of responses delayed 10s, (both query and publish).</br>
-**DR**
+\--tc tc_10p_error_response 10% error response for query and publish. Otherwise normal case.</br>
- --tc tc_normal Normal case, query response based on published files. Publish respond with ok/redirect depending on if file is published or not.</br>
- --tc tc_none_published Query respond 'ok'. Publish respond with redirect.</br>
- --tc tc_all_published Query respond with filename. Publish respond with 'ok'.</br>
- --tc tc_10p_no_response 10% % no response for query and publish. Otherwise normal case.</br>
- --tc tc_10first_no_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_100first_no_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_all_delay_1s All responses delayed 1s (both query and publish).</br>
- --tc tc_all_delay_10s All responses delayed 10s (both query and publish).</br>
- --tc tc_10p_delay_10s 10% of responses delayed 10s, (both query and publish).</br>
- --tc tc_10p_error_response 10% error response for query and publish. Otherwise normal case.</br>
- --tc tc_10first_error_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_100first_error_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+\--tc tc_10first_error_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+\--tc tc_100first_error_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
-**DR Redirect**
+## DR Redirect
- --tc_normal Normal case, all files publish and DR updated.</br>
- --tc_no_publish Ok response but no files published.</br>
- --tc_10p_no_response 10% % no response (file not published).</br>
- --tc_10first_no_response 10 first requests give no response (files not published).</br>
- --tc_100first_no_response 100 first requests give no response (files not published).</br>
- --tc_all_delay_1s All responses delayed 1s, normal publish.</br>
- --tc_all_delay_10s All responses delayed 10s, normal publish.</br>
- --tc_10p_delay_10s 10% of responses delayed 10s, normal publish.</br>
- --tc_10p_error_response 10% error response (file not published).</br>
- --tc_10first_error_response 10 first requests give error response (file not published).</br>
- --tc_100first_error_response 100 first requests give error responses (file not published).</br>
+\--tc_normal Normal case, all files publish and DR updated.</br>
+\--tc_no_publish Ok response but no files published.</br>
-###Needed environment
+\--tc_10p_no_response 10% % no response (file not published).</br>
-DR
+\--tc_10first_no_response 10 first requests give no response (files not published).</br>
- DRR_SIM_IP Set to host name of the DR Redirect simulator "drsim_redir" if running the simulators in a docker private network. Otherwise to "localhost"
- DR_FEEDS A comma separated list of configured feednames and filetypes. Example "1:A,2:B:C" - Feed 1 for filenames beginning with A and feed2 for filenames beginning with B or C.
+\--tc_100first_no_response 100 first requests give no response (files not published).</br>
+
+\--tc_all_delay_1s All responses delayed 1s, normal publish.</br>
+
+\--tc_all_delay_10s All responses delayed 10s, normal publish.</br>
+
+\--tc_10p_delay_10s 10% of responses delayed 10s, normal publish.</br>
+
+\--tc_10p_error_response 10% error response (file not published).</br>
+
+\--tc_10first_error_response 10 first requests give error response (file not published).</br>
+
+\--tc_100first_error_response 100 first requests give error responses (file not published).</br>
+
+# Needed environment
+
+## DR
+
+```
+DRR_SIM_IP Set to host name of the DR Redirect simulator "drsim_redir" if running the simulators in a docker private network. Otherwise to "localhost"
+DR_FEEDS A comma separated list of configured feednames and filetypes. Example "1:A,2:B:C" - Feed 1 for filenames beginning with A and feed2 for filenames beginning with B or C.
+```
`DRR_SIM_IP` is needed for the redirected publish request to be redirected to the DR redirect server.
-DR Redirect (DRR for short)
+## DR Redirect (DRR for short)
- DR_SIM_IP Set to host name of the DR simulator "drsim" if running the simulators in a docker private network. Otherwise to "localhost"
- DR_REDIR_FEEDS Same contentd as DR_FEEDS for DR.
+```
+DR_SIM_IP Set to host name of the DR simulator "drsim" if running the simulators in a docker private network. Otherwise to "localhost"
+DR_REDIR_FEEDS Same contentd as DR_FEEDS for DR.
+```
The DR Redirect server send callback to DR server to update the list of successfully published files.
When running as container (using an ip address from the `dfc_net` docker network) the env shall be set to 'drsim'. . When running the servers from command line, set the env variable `DR_SIM_IP=localhost`
-###APIs for statistic readout
-The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
+# APIs for statistic readout
-DR
+The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
+## DR
`curl localhost:3906/` - returns 'ok'
@@ -135,9 +164,7 @@ DR
`curl localhost:3906/ctr_publish_query_bad_file_prefix/<feed>` - returns a list of the number of publish queries with bad file prefix for a feed
-
-DR Redirect
-
+## DR Redirect
`curl localhost:3908/` - returns 'ok'
@@ -178,6 +205,3 @@ DR Redirect
`curl localhost:3908/feeds/dwl_volume` - returns a list of the number of bytes of the published files for each feed
`curl localhost:3908/dwl_volume/<feed>` - returns the number of bytes of the published files for a feed
-
-
-
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml b/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
index 4d98c708e..e60a742c6 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
@@ -3,15 +3,15 @@ services:
drsim:
image: drsim_common:latest
ports:
- - "3906:3906"
- - "3907:3907"
+ - "3906:3906"
+ - "3907:3907"
container_name: drsim
command: node dmaapDR.js
drsim_redir:
image: drsim_common:latest
ports:
- - "3908:3908"
- - "3909:3909"
+ - "3908:3908"
+ - "3909:3909"
container_name: drsim_redir
- command: node dmaapDR_redir.js \ No newline at end of file
+ command: node dmaapDR_redir.js
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/package.json b/test/mocks/datafilecollector-testharness/dr-sim/package.json
index faebcc929..ad96f0a78 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/package.json
+++ b/test/mocks/datafilecollector-testharness/dr-sim/package.json
@@ -12,9 +12,9 @@
}
},
"argparse": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
- "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"requires": {
"sprintf-js": "~1.0.2"
}
@@ -105,38 +105,38 @@
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
},
"express": {
- "version": "4.16.4",
- "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz",
- "integrity": "sha512-j12Uuyb4FMrd/qQAm6uCHAkPtO8FDTRJZBDd5D2KOL2eLaz1yUNdUB/NOIyq0iU4q4cFarsUCrnFDPBcnksuOg==",
+ "version": "4.17.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
+ "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
"requires": {
- "accepts": "~1.3.5",
+ "accepts": "~1.3.7",
"array-flatten": "1.1.1",
- "body-parser": "1.18.3",
- "content-disposition": "0.5.2",
+ "body-parser": "1.19.0",
+ "content-disposition": "0.5.3",
"content-type": "~1.0.4",
- "cookie": "0.3.1",
+ "cookie": "0.4.0",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "~1.1.2",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
- "finalhandler": "1.1.1",
+ "finalhandler": "~1.1.2",
"fresh": "0.5.2",
"merge-descriptors": "1.0.1",
"methods": "~1.1.2",
"on-finished": "~2.3.0",
- "parseurl": "~1.3.2",
+ "parseurl": "~1.3.3",
"path-to-regexp": "0.1.7",
- "proxy-addr": "~2.0.4",
- "qs": "6.5.2",
- "range-parser": "~1.2.0",
+ "proxy-addr": "~2.0.5",
+ "qs": "6.7.0",
+ "range-parser": "~1.2.1",
"safe-buffer": "5.1.2",
- "send": "0.16.2",
- "serve-static": "1.13.2",
- "setprototypeof": "1.1.0",
- "statuses": "~1.4.0",
- "type-is": "~1.6.16",
+ "send": "0.17.1",
+ "serve-static": "1.14.1",
+ "setprototypeof": "1.1.1",
+ "statuses": "~1.5.0",
+ "type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
}
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
index bd6c5bed8..bd6c5bed8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
index a0d6cfafc..a0d6cfafc 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
index 3bd67404a..44d329e76 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
@@ -1,27 +1,29 @@
-###Deployment of certificates: (in case of update)
+# Deployment of certificates: (in case of update)
This folder is prepared with a set of keys matching DfC for test purposes.
Copy from datafile-app-server/config/keys to the ./tls/ the following files:
-* dfc.crt
-* ftp.crt
-* ftp.key
+- dfc.crt
+- ftp.crt
+- ftp.key
-###Docker preparations
-Source: https://docs.docker.com/install/linux/linux-postinstall/
+# Docker preparations
+
+Source: <https://docs.docker.com/install/linux/linux-postinstall/>
`sudo usermod -aG docker $USER`
then logout-login to activate it.
-###Prepare files for the simulator
+# Prepare files for the simulator
+
Run `prepare.sh` with an argument found in `test_cases.yml` (or add a new tc in that file) to create files (1MB, 5MB and 50MB files) and a large number of
symbolic links to these files to simulate PM files. The files names maches the files in
the events produced by the MR simulator. The dirs with the files will be mounted
by the ftp containers, defined in the docker-compse file, when started
-###Starting/stopping the FTPS/SFTP server(s)
+# Starting/stopping the FTPES/SFTP server(s)
Start: `docker-compose up`
@@ -30,6 +32,6 @@ Stop: Ctrl +C, then `docker-compose down` or `docker-compose down --remove-orph
If you experience issues (or port collision), check the currently running other containers
by using 'docker ps' and stop them if necessary.
+# Cleaning docker structure
-###Cleaning docker structure
-Deep cleaning: `docker system prune` \ No newline at end of file
+Deep cleaning: `docker system prune`
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
index 0a24e38a8..0a24e38a8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
index 466ca5642..e644f1e62 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
@@ -4,7 +4,7 @@ services:
sftp-server1:
container_name: sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
volumes:
@@ -30,7 +30,6 @@ services:
- ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
- ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
- - ./files/onap/ftps/:/srv/
+ - ./files/onap/ftpes/:/srv/
restart: on-failure
command: vsftpd /etc/vsftpd_ssl.conf
-
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
index 086d43a49..086d43a49 100755
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
index 61275dfe2..e07e3a0c6 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
@@ -1,16 +1,16 @@
-# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftps directory,
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftpes directory,
# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in sftp directory
TC1:
size_files: 1 5 10
number_files: 10 30 10
- directory_files: ftps sftp sftp
+ directory_files: ftpes sftp sftp
TC2:
size_files: 0.5 1 5
number_files: 2 3 1
- directory_files: ftps ftps sftp
+ directory_files: ftpes ftpes sftp
TC_10000:
size_files: 1 1 5 5 50 50
number_files: 10000 10000 10000 10000 1 1
- directory_files: ftps sftp ftps sftp ftps sftp \ No newline at end of file
+ directory_files: ftpes sftp ftpes sftp ftpes sftp
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
index 5edfeddec..5edfeddec 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
index f747f20bb..f747f20bb 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
index f412d013c..f412d013c 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
index f90c781d3..f90c781d3 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/.gitignore b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
new file mode 100644
index 000000000..8605df3ea
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
@@ -0,0 +1,2 @@
+files
+logs
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/README.md b/test/mocks/datafilecollector-testharness/http-https-server/README.md
new file mode 100644
index 000000000..3f2e11492
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/README.md
@@ -0,0 +1,34 @@
+# ejbca certs
+
+There are needed certificates generated using CMPv2 server to properly run the https server and dfc being able to work with
+https protocol. For that reason, pre-generated certs were prepared and stored in `certservice/generated-certs` directory.
+If HTTP server has to work with standalone ONAP installation, certs has to be obtained directly from CMPv2 server from ONAP
+unit.
+
+# Docker preparations
+
+Source: <https://docs.docker.com/install/linux/linux-postinstall/>
+
+`sudo usermod -aG docker $USER`
+
+then logout-login to activate it.
+
+# Prepare files for the simulator
+
+Run `prepare.sh` with an argument found in `test_cases.yml` (or add a new tc in that file) to create files (1MB,
+5MB and 50MB files) and a large number of symbolic links to these files to simulate PM files. The files names
+matches the files in the events produced by the MR simulator. The dirs with the files will be mounted
+by the ftp containers, defined in the docker-compse file, when started
+
+# Starting/stopping the HTTP/HTTPS server(s)
+
+Start: `docker-compose up`
+
+Stop: Ctrl +C, then `docker-compose down` or `docker-compose down --remove-orphans`
+
+If you experience issues (or port collision), check the currently running other containers
+by using 'docker ps' and stop them if necessary.
+
+# Cleaning docker structure
+
+Deep cleaning: `docker system prune`
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
new file mode 100644
index 000000000..e64908d96
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
@@ -0,0 +1,24 @@
+version: '3'
+
+services:
+
+ http-https-server:
+ container_name: http-https-server-httpd
+ image: nexus3.onap.org:10001/onap/org.onap.integration.simulators.httpserver:1.0.5
+ environment:
+ APACHE_LOG_DIR: /usr/local/apache2/logs
+ ports:
+ - "80:80"
+ - "443:443"
+ - "8080:8080"
+ - "32000:32000"
+ - "32100:32100"
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs:ro
+ - ./files/onap/http:/usr/local/apache2/htdocs
+ command: bash -c "
+ echo 'Http Server start';
+ touch /usr/local/apache2/htdocs/index.html;
+ /usr/sbin/apache2ctl -D FOREGROUND;
+ "
+ restart: on-failure
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
new file mode 100755
index 000000000..937033c90
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+# EXAMPLE: Run test case TC2 using the command "./prepare.sh TC2"
+MAIN_DIRECTORY=./files/onap
+TEST_FILE=./test_cases.yml
+TEST=$1
+echo "Generating files for test case:" "$TEST"
+
+sf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'size_files')
+sf=${sf//*size_files: /}
+sf_array=($sf)
+echo "size_files=""$sf"
+
+nf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'number_files')
+nf=${nf//*number_files: /}
+nf_array=($nf)
+echo "number_files=""$nf"
+
+df=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'directory_files')
+df=${df//*directory_files: /}
+df_array=($df)
+echo "directory_files=""$df"
+
+rm -rf $MAIN_DIRECTORY/*
+if [ "${#sf_array[@]}" = "${#nf_array[@]}" ] && [ "${#nf_array[@]}" = "${#df_array[@]}" ];
+then
+ N_ELEMENTS=${#df_array[@]}
+ for ((n=0;n<$N_ELEMENTS;n++))
+ do
+ # Create directory
+ DIRECTORY=$MAIN_DIRECTORY/${df_array[$n]}
+ mkdir -p "$DIRECTORY"
+
+ # Create original file
+ FILE_SIZE=${sf_array[$n]}
+ FILE_NAME=$FILE_SIZE"MB.tar.gz"
+ dd if=/dev/urandom of=$DIRECTORY/$FILE_NAME bs=1k count=$(echo $FILE_SIZE*1000/1 | bc)
+
+ # Create symlinks
+ N_SYMLINKS=${nf_array[$n]}-1
+ for ((l=0;l<=$N_SYMLINKS;l++))
+ do
+ SYMLINK_NAME=$FILE_SIZE"MB_"$l".tar.gz"
+ ln -s ./$FILE_NAME $DIRECTORY/$SYMLINK_NAME
+ done
+ done
+else
+echo "ERROR: The number of parameters in size_files, number_files, and directory_files must be equal!"
+fi
+
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
new file mode 100644
index 000000000..d27bb9384
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
@@ -0,0 +1,16 @@
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in http directory,
+# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in http directory
+TC1:
+ size_files: 1 5 10
+ number_files: 10 30 10
+ directory_files: http http http
+
+TC2:
+ size_files: 0.5 1 5
+ number_files: 2 3 1
+ directory_files: http http http
+
+TC_10000:
+ size_files: 1 1 5 5 50 50
+ number_files: 10000 10000 10000 10000 1 1
+ directory_files: http http http http http http
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
index c54713e7f..e0c580ddf 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
@@ -1,9 +1,9 @@
-FROM python:3.6-alpine
+FROM nexus3.onap.org:10001/onap/integration-python:8.0.0
COPY . /app
WORKDIR /app
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
EXPOSE 2222 2223
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md
index d3ca91c87..11f53df95 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md
@@ -1,45 +1,49 @@
-#MR-simulator
-This readme contains:
+# MR-simulator
-**Introduction**
+This readme contains:
-**Building and running**
+- Introduction
+- Building and running
+- Configuration
-**Configuration**
+## Introduction
-###Introduction###
The MR-sim is a python script delivering batches of events including one or more fileReady for one or more PNFs.
It is possible to configure number of events, PNFs, consumer groups, exising or missing files, file prefixes and change identifier.
-In addition, MR sim can be configured to deliver file url for up to 5 FTP servers (simulating the PNFs).
+In addition, MR sim can be configured to deliver file url for up to 5 FTP and 5 HTTP/HTTPS/HTTPS with no auth servers (simulating the PNFs).
+
+## Building and running
-###Building and running###
It is possible build and run MR-sim manually as a container if needed. In addition MR-sim can be executed as python script, see instuctions further down.
Otherwise it is recommended to use the test scripts in the auto-test dir or run all simulators in one go using scripts in the simulator-group dir.
To build and run manually as a docker container:
-1. Build docker container with ```docker build -t mrsim:latest .```
-2. Run the container ```docker-compose up```
-###Configuration###
+1. Build docker container with `docker build -t mrsim:latest .`
+2. Run the container `docker-compose up`
+
+## Configuration
+
The event pattern, called TC, of the MR-sim is controlled with a arg to python script. See section TC info for available patterns.
All other configuration is done via envrionment variables.
The simulator listens to port 2222.
The following envrionment vaiables are used:
-**FTPS_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftps file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
-
-**SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
-
-**NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **FTPES_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftpes file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_SIMS** - A comma-separated list of hostname:port for the HTTP servers to generate http file urls for. If not set MR sim will assume 'localhost:81'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_JWT_SIMS** - A comma-separated list of hostname:port for the HTTP servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32000'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (configured for client certificate authentication and basic authentication; certificates were obtained using CMPv2 server) to generate http file urls for. If not set MR sim will assume 'localhost:444'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_JWT_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32100'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS_NO_AUTH** - A comma-separated list of hostname:port for the HTTPS servers with no autorization to generate http file urls for. If not set MR sim will assume 'localhost:8081'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **NUM_HTTP_SERVERS** - Number of HTTP/HTTPS/HTTPS with no authorization servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **MR_GROUPS** - A comma-separated list of consummer-group:changeId\[:changeId]\*. Defines which change identifier that should be used for each consumer group. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+- **MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
-**MR_GROUPS** - A comma-separated list of consummer-group:changeId[:changeId]*. Defines which change identifier that should be used for each consumer gropu. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+## Statistics read-out and commands
-**MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
-
-
-
-###Statistics read-out and commands###
The simulator can be queried for statistics and started/stopped (use curl from cmd line or open in browser, curl used below):
`curl localhost:2222` - Just returns 'Hello World'.
@@ -60,70 +64,63 @@ The simulator can be queried for statistics and started/stopped (use curl from
`curl localhost:2222/fileprefixes` - returns the setting of env var MR_FILE_PREFIX_MAPPING.
-
`curl localhost:2222/ctr_requests` - returns an integer of the number of get requests, for all groups, to the event poll path
`curl localhost:2222/groups/ctr_requests` - returns a list of integers of the number of get requests, for each consumer group, to the event poll path
`curl localhost:2222/ctr_requests/<consumer-group>` - returns an integer of the number of get requests, for the specified consumer group, to the event poll path
-
`curl localhost:2222/ctr_responses` - returns an integer of the number of get responses, for all groups, to the event poll path
`curl localhost:2222/groups/ctr_responses` - returns a list of integers of the number of get responses, for each consumer group, to the event poll path
`curl localhost:2222/ctr_responses/<consumer-group>` - returns an integer of the number of get responses, for the specified consumer group, to the event poll path
-
`curl localhost:2222/ctr_files` - returns an integer of the number generated files for all groups
`curl localhost:2222/groups/ctr_files` - returns a list of integers of the number generated files for each group
`curl localhost:2222/ctr_files/<consumer-group>` - returns an integer or the number generated files for the specified group
-
`curl localhost:2222/ctr_unique_files` - returns an integer of the number generated unique files for all groups
`curl localhost:2222/groups/ctr_unique_files` - returns a list of integers of the number generated unique files for each group
`curl localhost:2222/ctr_unique_files/<consumer-group>` - returns an integer or the number generated unique files for the specified group
-
-
`curl localhost:2222/ctr_events` - returns the total number of events for all groups
`curl localhost:2222/groups/ctr_events` - returns a list the integer of the total number of events for each group
`curl localhost:2222/ctr_events/<consumer-group>` - returns the total number of events for a specified group
-
`curl localhost:2222/exe_time_first_poll` - returns the execution time in mm:ss from the first poll
`curl localhost:2222/groups/exe_time_first_poll` - returns a list of the execution time in mm:ss from the first poll for each group
`curl localhost:2222/exe_time_first_poll/<consumer-group>` - returns the execution time in mm:ss from the first poll for the specified group
-
`curl localhost:2222/ctr_unique_PNFs` - returns the number of unique PNFS in all events.
`curl localhost:2222/groups/ctr_unique_PNFs` - returns a list of the number of unique PNFS in all events for each group.
`curl localhost:2222/ctr_unique_PNFs/<consumer-group>` - returns the number of unique PNFS in all events for the specified group.
+## Alternative to running python (as described below) on your machine, use the docker files
-#Alternative to running python (as described below) on your machine, use the docker files.
-1. Build docker container with ```docker build -t mrsim:latest .```
-2. Run the container ```docker-compose up```
-The behavior can be changed by argument to the python script in the docker-compose.yml
+1. Build docker container with `docker build -t mrsim:latest .`
+2. Run the container `docker-compose up`
+ The behavior can be changed by argument to the python script in the docker-compose.yml
+## Common TC info
-##Common TC info
File names for 1MB, 5MB and 50MB files
-Files in the format: <size-in-mb>MB_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB_12.tar.gz
+Files in the format: <size-in-mb>MB\_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB\_12.tar.gz
The sequence numbers are stepped so that all files have unique names
-Missing files (files that are not expected to be found in the ftp server. Format: MissingFile_<sequence-number>.tar.gz
+Missing files (files that are not expected to be found in the ftp server. Format: MissingFile\*<sequence-number>.tar.gz
+
+When the number of events are exhausted, empty replies are returned '\[]', for the limited test cases. For endless tc no empty replies will be given.
-When the number of events are exhausted, empty replies are returned '[]', for the limited test cases. For endless tc no empty replies will be given.
Test cases are limited unless noted as 'endless'.
TC100 - One ME, SFTP, 1 1MB file, 1 event
@@ -140,7 +137,6 @@ TC112 - One ME, SFTP, 5MB files, 100 files per event, 100 events, 1 event per po
TC113 - One ME, SFTP, 1MB files, 100 files per event, 100 events. All events in one poll.
-
TC120 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json
TC121 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files
@@ -185,46 +181,48 @@ TC1302 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events
TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h
-Changing the first digit in tc number will change the test case to run FTPS instead. Eg. TC201 is FTPS version of TC101.
-
-TC2XX is same as TC1XX but with FTPS
+Changing the first digit in tc number will change the test case to run FTPES or HTTP instead. Eg. TC201 is FTPES version of TC101.
-TC6XX is same as TC5XX but with FTPS
+TC2XX is same as TC1XX but with FTPES, TC3XX is same as TC1XX but with HTTP, TC4XX is same as TC1XX but with HTTPS
+(with basic authorization). Note, in the case of HTTPS, some tests may not have direct correspondence in FTP tests
+(TC303, TC403, TC404, TC405 described in the end of this section).
-TC8XX is same as TC7XX but with FTPS
+TC6XX is same as TC5XX but with FTPES
-TC2XXX is same as TC1XXX but with FTPS
+TC8XX is same as TC7XX but with FTPES
+TC2XXX is same as TC1XXX but with FTPES
-## Developer workflow
-
-1. ```sudo apt install python3-venv```
-2. ```source .env/bin/activate/```
-3. ```pip3 install "anypackage"``` #also include in source code
-4. ```pip3 freeze | grep -v "pkg-resources" > requirements.txt``` #to create a req file
-5. ```FLASK_APP=mr-sim.py flask run```
+TC303 - One ME, HTTP with JWT authentication, 1 1MB file, 1 event
- or
+TC403 - One ME, HTTPS with client certificate authentication, 1 1MB file, 1 event
- ```python3 mr-sim.py ```
+TC404 - One ME, HTTPS with no client authentication, 1 1MB file, 1 event
-6. Check/lint/format the code before commit/amed by ```autopep8 --in-place --aggressive --aggressive mr-sim.py```
+TC405 - One ME, HTTPS with JWT authentication, 1 1MB file, 1 event
+## Developer workflow
-## User workflow on *NIX
+1. `sudo apt install python3-venv`
+2. `source .env/bin/activate/`
+3. `pip3 install "anypackage"` #also include in source code
+4. `pip3 freeze | grep -v "pkg-resources" > requirements.txt` #to create a req file
+5. `FLASK_APP=mr-sim.py flask run`
+ or
+ ` python3 mr-sim.py `
+6. Check/lint/format the code before commit/amed by `autopep8 --in-place --aggressive --aggressive mr-sim.py`
+## User workflow on \*NIX
When cloning/fetching from the repository first time:
-1. `git clone`
-2. `cd "..." ` #navigate to this folder
-3. `source setup.sh ` #setting up virtualenv and install requirements
- you'll get a sourced virtualenv shell here, check prompt
+1. `git clone`
+2. ` cd "..." ` #navigate to this folder
+3. ` source setup.sh ` #setting up virtualenv and install requirements
+ you'll get a sourced virtualenv shell here, check prompt
4. `(env) $ python3 mr-sim.py --help`
-
- alternatively
-
- `(env) $ python3 mr-sim.py --tc1`
+ alternatively
+ `(env) $ python3 mr-sim.py --tc1`
Every time you run the script, you'll need to step into the virtualenv by following step 3 first.
@@ -241,4 +239,4 @@ When cloning/fetching from the repository first time:
7. 'pip3 install -r requirements.txt' #this will install in the local environment then
8. 'python3 dfc-sim.py'
-Every time you run the script, you'll need to step into the virtualenv by step 2+6. \ No newline at end of file
+Every time you run the script, you'll need to step into the virtualenv by step 2+6.
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml b/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
index a02e6a423..cc7cafdbe 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
@@ -4,8 +4,8 @@ services:
mrsim:
image: mrsim:latest
ports:
- - "2222:2222"
- - "2223:2223"
+ - "2222:2222"
+ - "2223:2223"
container_name: mrsim
command: python mr-sim.py --tc100
-# Change -tc100 to other tc number for desired behavior. \ No newline at end of file
+# Change -tc100 to other tc number for desired behavior.
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
index 6345ab69f..cdf9bad4a 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
+++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
@@ -1,229 +1,270 @@
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# COPYRIGHT NOTICE ENDS HERE
import argparse
-import json
import os
import sys
import time
from time import sleep
-from flask import Flask, render_template, request
-from werkzeug import secure_filename
+from flask import Flask
app = Flask(__name__)
-#Server info
+# Server info
HOST_IP = "0.0.0.0"
HOST_PORT = 2222
HOST_PORT_TLS = 2223
-sftp_hosts=[]
-sftp_ports=[]
-ftps_hosts=[]
-ftps_ports=[]
-num_ftp_servers=1
+sftp_hosts = []
+sftp_ports = []
+ftpes_hosts = []
+ftpes_ports = []
+http_hosts = []
+http_ports = []
+http_jwt_hosts = []
+http_jwt_ports = []
+https_hosts = []
+https_ports = []
+https_jwt_hosts = []
+https_jwt_ports = []
+https_hosts_no_auth = []
+https_ports_no_auth = []
+num_ftp_servers = 1
+num_http_servers = 1
def sumList(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+ctrArray[i];
+ tmp = tmp + ctrArray[i]
+
+ return str(tmp)
- return str(tmp);
def sumListLength(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+len(ctrArray[i]);
+ tmp = tmp + len(ctrArray[i])
+
+ return str(tmp)
- return str(tmp);
-#Test function to check server running
+# Test function to check server running
@app.route('/',
- methods=['GET'])
+ methods=['GET'])
def index():
return 'Hello world'
-#Returns the list of configured groups
+
+# Returns the list of configured groups
@app.route('/groups',
- methods=['GET'])
+ methods=['GET'])
def group_ids():
global configuredGroups
return configuredGroups
-#Returns the list of configured changeids
+
+# Returns the list of configured changeids
@app.route('/changeids',
- methods=['GET'])
+ methods=['GET'])
def change_ids():
global configuredChangeIds
return configuredChangeIds
-#Returns the list of configured fileprefixes
+
+# Returns the list of configured fileprefixes
@app.route('/fileprefixes',
- methods=['GET'])
+ methods=['GET'])
def fileprefixes():
global configuredPrefixes
return configuredPrefixes
-#Returns number of polls
+# Returns number of polls
@app.route('/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def counter_requests():
global ctr_requests
return sumList(ctr_requests)
-#Returns number of polls for all groups
+
+# Returns number of polls for all groups
@app.route('/groups/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def group_counter_requests():
global ctr_requests
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_requests[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_requests[i])
return tmp
-#Returns the total number of polls for a group
+
+# Returns the total number of polls for a group
@app.route('/ctr_requests/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_requests_group(groupId):
global ctr_requests
global groupNameIndexes
return str(ctr_requests[groupNameIndexes[groupId]])
-#Returns number of poll replies
+
+# Returns number of poll replies
@app.route('/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def counter_responses():
global ctr_responses
return sumList(ctr_responses)
-#Returns number of poll replies for all groups
+
+# Returns number of poll replies for all groups
@app.route('/groups/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def group_counter_responses():
global ctr_responses
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_responses[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_responses[i])
return tmp
-#Returns the total number of poll replies for a group
+
+# Returns the total number of poll replies for a group
@app.route('/ctr_responses/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_responses_group(groupId):
global ctr_responses
global groupNameIndexes
return str(ctr_responses[groupNameIndexes[groupId]])
-#Returns the total number of files
+
+# Returns the total number of files
@app.route('/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def counter_files():
global ctr_files
return sumList(ctr_files)
-#Returns the total number of file for all groups
+
+# Returns the total number of file for all groups
@app.route('/groups/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_files():
global ctr_files
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_files[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_files[i])
return tmp
-#Returns the total number of files for a group
+
+# Returns the total number of files for a group
@app.route('/ctr_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_files_group(groupId):
global ctr_files
global groupNameIndexes
return str(ctr_files[groupNameIndexes[groupId]])
-#Returns number of unique files
+# Returns number of unique files
@app.route('/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles():
global fileMap
return sumListLength(fileMap)
-#Returns number of unique files for all groups
+
+# Returns number of unique files for all groups
@app.route('/groups/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquefiles():
global fileMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(fileMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(fileMap[i]))
return tmp
-#Returns the total number of unique files for a group
+
+# Returns the total number of unique files for a group
@app.route('/ctr_unique_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles_group(groupId):
global fileMap
global groupNameIndexes
return str(len(fileMap[groupNameIndexes[groupId]]))
-#Returns tc info
+
+# Returns tc info
@app.route('/tc_info',
- methods=['GET'])
+ methods=['GET'])
def testcase_info():
global tc_num
return tc_num
-#Returns number of events
+
+# Returns number of events
@app.route('/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def counter_events():
global ctr_events
return sumList(ctr_events)
-#Returns number of events for all groups
+
+# Returns number of events for all groups
@app.route('/groups/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def group_counter_events():
global ctr_events
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_events[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_events[i])
return tmp
-#Returns the total number of events for a group
+
+# Returns the total number of events for a group
@app.route('/ctr_events/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_events_group(groupId):
global ctr_events
global groupNameIndexes
return str(ctr_events[groupNameIndexes[groupId]])
-#Returns execution time in mm:ss
+
+# Returns execution time in mm:ss
@app.route('/execution_time',
- methods=['GET'])
+ methods=['GET'])
def exe_time():
global startTime
stopTime = time.time()
- minutes, seconds = divmod(stopTime-startTime, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(stopTime - startTime, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll
+# Returns the timestamp for first poll
@app.route('/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll():
global firstPollTime
@@ -234,92 +275,100 @@ def exe_time_first_poll():
if (tmp == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-tmp, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - tmp, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll for all groups
+# Returns the timestamp for first poll for all groups
@app.route('/groups/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def group_exe_time_first_poll():
global firstPollTime
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
+ tmp = tmp + ','
if (firstPollTime[i] == 0):
- tmp=tmp+ "--:--"
+ tmp = tmp + "--:--"
else:
- minutes, seconds = divmod(time.time()-firstPollTime[i], 60)
- tmp=tmp+"{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[i], 60)
+ tmp = tmp + "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
return tmp
-#Returns the timestamp for first poll for a group
+
+# Returns the timestamp for first poll for a group
@app.route('/exe_time_first_poll/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll_group(groupId):
global ctr_requests
global groupNameIndexes
if (firstPollTime[groupNameIndexes[groupId]] == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-firstPollTime[groupNameIndexes[groupId]], 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[groupNameIndexes[groupId]], 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
-#Starts event delivery
+
+# Starts event delivery
@app.route('/start',
- methods=['GET'])
+ methods=['GET'])
def start():
global runningState
- runningState="Started"
+ runningState = "Started"
return runningState
-#Stops event delivery
+
+# Stops event delivery
@app.route('/stop',
- methods=['GET'])
+ methods=['GET'])
def stop():
global runningState
- runningState="Stopped"
+ runningState = "Stopped"
return runningState
-#Returns the running state
+
+# Returns the running state
@app.route('/status',
- methods=['GET'])
+ methods=['GET'])
def status():
global runningState
return runningState
-#Returns number of unique PNFs
+
+# Returns number of unique PNFs
@app.route('/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs():
global pnfMap
return sumListLength(pnfMap)
-#Returns number of unique PNFs for all groups
+
+# Returns number of unique PNFs for all groups
@app.route('/groups/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquePNFs():
global pnfMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(pnfMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(pnfMap[i]))
return tmp
-#Returns the unique PNFs for a group
+
+# Returns the unique PNFs for a group
@app.route('/ctr_unique_PNFs/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs_group(groupId):
global pnfMap
global groupNameIndexes
return str(len(pnfMap[groupNameIndexes[groupId]]))
-#Messages polling function
+# Messages polling function
@app.route(
"/events/unauthenticated.VES_NOTIFICATION_OUTPUT/<consumerGroup>/<consumerId>",
methods=['GET'])
@@ -332,12 +381,14 @@ def MR_reply(consumerGroup, consumerId):
global groupNameIndexes
global changeIds
global filePrefixes
+ print("Received request at /events/unauthenticated.VES_NOTIFICATION_OUTPUT/ for consumerGroup: " + consumerGroup +
+ " with consumerId: " + consumerId)
groupIndex = groupNameIndexes[consumerGroup]
print("Setting groupIndex: " + str(groupIndex))
reqCtr = ctr_requests[groupIndex]
- changeId = changeIds[groupIndex][reqCtr%len(changeIds[groupIndex])]
+ changeId = changeIds[groupIndex][reqCtr % len(changeIds[groupIndex])]
print("Setting changeid: " + changeId)
filePrefix = filePrefixes[changeId]
print("Setting file name prefix: " + filePrefix)
@@ -352,165 +403,193 @@ def MR_reply(consumerGroup, consumerId):
ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
return buildOkResponse("[]")
-
-
if args.tc100:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc101:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc102:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc110:
- return tc110(groupIndex, changeId, filePrefix, "sftp")
+ return tc110(groupIndex, changeId, filePrefix, "sftp")
elif args.tc111:
- return tc111(groupIndex, changeId, filePrefix, "sftp")
+ return tc111(groupIndex, changeId, filePrefix, "sftp")
elif args.tc112:
- return tc112(groupIndex, changeId, filePrefix, "sftp")
+ return tc112(groupIndex, changeId, filePrefix, "sftp")
elif args.tc113:
- return tc113(groupIndex, changeId, filePrefix, "sftp")
+ return tc113(groupIndex, changeId, filePrefix, "sftp")
elif args.tc120:
- return tc120(groupIndex, changeId, filePrefix, "sftp")
+ return tc120(groupIndex, changeId, filePrefix, "sftp")
elif args.tc121:
- return tc121(groupIndex, changeId, filePrefix, "sftp")
+ return tc121(groupIndex, changeId, filePrefix, "sftp")
elif args.tc122:
- return tc122(groupIndex, changeId, filePrefix, "sftp")
+ return tc122(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1000:
- return tc1000(groupIndex, changeId, filePrefix, "sftp")
+ return tc1000(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1001:
- return tc1001(groupIndex, changeId, filePrefix, "sftp")
+ return tc1001(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1100:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1101:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1102:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1200:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1201:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1202:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1300:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1301:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1302:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1500:
- return tc1500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc500:
- return tc500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc501:
- return tc500(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc502:
- return tc500(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc510:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc511:
- return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
elif args.tc550:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc710:
- return tc710(groupIndex, changeId, filePrefix, "sftp")
+ return tc710(groupIndex, changeId, filePrefix, "sftp")
elif args.tc200:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc201:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc202:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc210:
- return tc110(groupIndex, changeId, filePrefix, "ftps")
+ return tc110(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc211:
- return tc111(groupIndex, changeId, filePrefix, "ftps")
+ return tc111(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc212:
- return tc112(groupIndex, changeId, filePrefix, "ftps")
+ return tc112(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc213:
- return tc113(groupIndex, changeId, filePrefix, "ftps")
+ return tc113(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc220:
- return tc120(groupIndex, changeId, filePrefix, "ftps")
+ return tc120(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc221:
- return tc121(groupIndex, changeId, filePrefix, "ftps")
+ return tc121(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc222:
- return tc122(groupIndex, changeId, filePrefix, "ftps")
+ return tc122(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2000:
- return tc1000(groupIndex, changeId, filePrefix, "ftps")
+ return tc1000(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2001:
- return tc1001(groupIndex, changeId, filePrefix, "ftps")
+ return tc1001(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2100:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2101:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2102:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2200:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2201:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2202:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2300:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2301:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2302:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2500:
- return tc1500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc600:
- return tc500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc601:
- return tc500(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc602:
- return tc500(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc610:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc611:
- return tc511(groupIndex, changeId, filePrefix, "ftps", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "ftpes", "1KB")
elif args.tc650:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc810:
- return tc710(groupIndex, changeId, filePrefix, "ftps")
+ return tc710(groupIndex, changeId, filePrefix, "ftpes")
+
+ elif args.tc300:
+ return tc100(groupIndex, changeId, filePrefix, "http", "1MB")
+ elif args.tc301:
+ return tc100(groupIndex, changeId, filePrefix, "http", "5MB")
+ elif args.tc302:
+ return tc100(groupIndex, changeId, filePrefix, "http", "50MB")
+ elif args.tc303:
+ return tc100(groupIndex, changeId, filePrefix, "httpJWT", "1MB")
+
+ elif args.tc400:
+ return tc100(groupIndex, changeId, filePrefix, "https", "1MB")
+ elif args.tc401:
+ return tc100(groupIndex, changeId, filePrefix, "https", "5MB")
+ elif args.tc402:
+ return tc100(groupIndex, changeId, filePrefix, "https", "50MB")
+ elif args.tc403:
+ return tc100(groupIndex, changeId, filePrefix, "httpsCAuth", "1MB")
+ elif args.tc404:
+ return tc100(groupIndex, changeId, filePrefix, "httpsNoAuth", "1MB")
+ elif args.tc405:
+ return tc100(groupIndex, changeId, filePrefix, "httpsJWT", "1MB")
#### Test case functions
-def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
-
-
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+def tc100(groupIndex, changeId, filePrefix, schemeType, fileSize):
+ global ctr_responses
+ global ctr_events
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
-#def tc101(groupIndex, ftpType):
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "http") or (schemeType == "https") \
+ or (schemeType == "httpsCAuth") or (schemeType == "httpsNoAuth"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "demo", "demo123456!",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "httpJWT") or (schemeType == "httpsJWT"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "", "",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
+
+
+# def tc101(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -527,7 +606,7 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# ctr_events[groupIndex] = ctr_events[groupIndex]+1
# return buildOkResponse("["+msg+"]")
#
-#def tc102(groupIndex, ftpType):
+# def tc102(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -545,583 +624,580 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# return buildOkResponse("["+msg+"]")
def tc110(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, ftpType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
def tc111(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc112(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc113(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = ""
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = ""
- for evts in range(100): # build 100 evts
- if (evts > 0):
- msg = msg + ","
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # build 100 files
- seqNr = i+evts+100*(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for evts in range(100): # build 100 evts
+ if (evts > 0):
+ msg = msg + ","
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # build 100 files
+ seqNr = i + evts + 100 * (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc120(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] % 10 == 2):
- return # Return nothing
+ if (ctr_responses[groupIndex] % 10 == 2):
+ return # Return nothing
- if (ctr_responses[groupIndex] % 10 == 3):
- return buildOkResponse("") # Return empty message
+ if (ctr_responses[groupIndex] % 10 == 3):
+ return buildOkResponse("") # Return empty message
- if (ctr_responses[groupIndex] % 10 == 4):
- return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
+ if (ctr_responses[groupIndex] % 10 == 4):
+ return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
- if (ctr_responses[groupIndex] % 10 == 5):
- return buildEmptyResponse(404) # Return empty message with status code
+ if (ctr_responses[groupIndex] % 10 == 5):
+ return buildEmptyResponse(404) # Return empty message with status code
- if (ctr_responses[groupIndex] % 10 == 6):
- sleep(60)
+ if (ctr_responses[groupIndex] % 10 == 6):
+ sleep(60)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc121(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ fileName = ""
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if (seqNr % 10 == 0): # Every 10th file is "missing"
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- fileName = ""
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if (seqNr%10 == 0): # Every 10th file is "missing"
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc122(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
+ for i in range(100):
+ fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- fileMap[groupIndex][0] = 0
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ fileMap[groupIndex][0] = 0
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1000(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1001(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1100(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ msg = ""
- msg = ""
+ batch = (ctr_responses[groupIndex] - 1) % 20
- batch = (ctr_responses[groupIndex]-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1200(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event, all new files
- seqNr = i+100 * int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event, all new files
+ seqNr = i + 100 * int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1300(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- #Start a event deliver for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0) & (rop_counter > 0):
- rop_timestamp = rop_timestamp+900
+ # Start a event deliver for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0) & (rop_counter > 0):
+ rop_timestamp = rop_timestamp + 900
- rop_counter = rop_counter+1
+ rop_counter = rop_counter + 1
- msg = ""
+ msg = ""
- batch = (rop_counter-1)%20;
+ batch = (rop_counter - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] <= 2000 ): #first 25h of event doess not care of 15min rop timer
+ if (ctr_responses[groupIndex] <= 2000): # first 25h of event doess not care of 15min rop timer
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- if (seqNr < 100):
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ if (seqNr < 100):
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ rop_counter = rop_counter + 1
+ return buildOkResponse("[" + msg + "]")
- rop_counter = rop_counter+1
- return buildOkResponse("["+msg+"]")
+ # Start an event delivery for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0):
+ rop_timestamp = time.time()
- #Start an event delivery for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0):
- rop_timestamp = time.time()
+ rop_counter = rop_counter + 1
- rop_counter = rop_counter+1
+ msg = ""
- msg = ""
+ batch = (rop_counter - 1) % 20
- batch = (rop_counter-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
+ for pnfs in range(700):
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(700):
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(2):
+ seqNr = i
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(2):
- seqNr = i;
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc510(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc511(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc710(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
#### Functions to build json messages and respones ####
def createNodeName(index):
- return "PNF"+str(index);
+ return "PNF" + str(index)
+
def createFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
+
def createMissingFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"MissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "MissingFile_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
# Function to build fixed beginning of an event
def getEventHead(groupIndex, changeId, nodename):
- global pnfMap
- pnfMap[groupIndex].add(nodename)
- headStr = """
- {
+ global pnfMap
+ pnfMap[groupIndex].add(nodename)
+ headStr = """
+ '{
"event": {
"commonEventHeader": {
"startEpochMicrosec": 8745745764578,
@@ -1146,132 +1222,216 @@ def getEventHead(groupIndex, changeId, nodename):
"changeIdentifier": \"""" + changeId + """",
"arrayOfNamedHashMap": [
"""
- return headStr
+ return headStr
+
# Function to build the variable part of an event
-def getEventName(fn,type,user,passwd, nodeIndex):
- nodeIndex=nodeIndex%num_ftp_servers
+def getEventName(fn, type, user, passwd, nodeIndex):
+ nodeIndex = nodeIndex % num_ftp_servers
port = sftp_ports[nodeIndex]
ip = sftp_hosts[nodeIndex]
- if (type == "ftps"):
- port = ftps_ports[nodeIndex]
- ip = ftps_hosts[nodeIndex]
-
- nameStr = """{
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ token = ""
+ if type == "ftpes":
+ port = ftpes_ports[nodeIndex]
+ ip = ftpes_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "http":
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_ports[nodeIndex]
+ ip = http_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpJWT":
+ alt_type = "http"
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_jwt_ports[nodeIndex]
+ ip = http_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwidXNlciI6Imp3dFVzZXIiLCJpc3MiOiJvbmFwIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMn0.dZUtnGlr6Z42MehhZTGHYSVFaAggRjob9GyvnGpEc6o"
+ elif type == "https":
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpsJWT":
+ alt_type = "https"
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_jwt_ports[nodeIndex]
+ ip = https_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA"
+ elif type == "httpsCAuth":
+ alt_type = "https"
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ elif type == "httpsNoAuth":
+ alt_type = "https"
+ port = https_ports_no_auth[nodeIndex]
+ ip = https_hosts_no_auth[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+
+ nameStr = """{
"name": \"""" + fn + """",
"hashMap": {
"fileFormatType": "org.3GPP.32.435#measCollec",
- "location": \"""" + type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port) + """/""" + fn + """",
+ "location": \"""" + location_variant + """/""" + fn + token + """",
"fileFormatVersion": "V10",
"compression": "gzip"
}
} """
return nameStr
+
# Function to build fixed end of an event
def getEventEnd():
- endStr = """
+ endStr = """
]
}
}
- }
+ }'
"""
return endStr
+
# Function to build an OK reponse from a message string
def buildOkResponse(msg):
- response = app.response_class(
- response=str.encode(msg),
- status=200,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(msg),
+ status=200,
+ mimetype='application/json')
+ return response
+
# Function to build an empty message with status
def buildEmptyResponse(status_code):
- response = app.response_class(
- response=str.encode(""),
- status=status_code,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(""),
+ status=status_code,
+ mimetype='application/json')
+ return response
if __name__ == "__main__":
# IP addresses to use for ftp servers, using localhost if not env var is set
sftp_sims = os.environ.get('SFTP_SIMS', 'localhost:1022')
- ftps_sims = os.environ.get('FTPS_SIMS', 'localhost:21')
+ ftpes_sims = os.environ.get('FTPES_SIMS', 'localhost:21')
+ http_sims = os.environ.get('HTTP_SIMS', 'localhost:81')
+ http_jwt_sims = os.environ.get('HTTP_JWT_SIMS', 'localhost:32000')
+ https_sims = os.environ.get('HTTPS_SIMS', 'localhost:444')
+ https_sims_no_auth = os.environ.get('HTTPS_SIMS_NO_AUTH', 'localhost:8081')
+ https_jwt_sims = os.environ.get('HTTPS_JWT_SIMS', 'localhost:32100')
num_ftp_servers = int(os.environ.get('NUM_FTP_SERVERS', 1))
+ num_http_servers = int(os.environ.get('NUM_HTTP_SERVERS', 1))
print("Configured sftp sims: " + sftp_sims)
- print("Configured ftps sims: " + ftps_sims)
+ print("Configured ftpes sims: " + ftpes_sims)
+ print("Configured http sims: " + http_sims)
+ print("Configured http JWT sims: " + http_jwt_sims)
+ print("Configured https sims: " + https_sims)
+ print("Configured https with no authorization sims: " + https_sims_no_auth)
+ print("Configured https JWT sims: " + https_jwt_sims)
print("Configured number of ftp servers: " + str(num_ftp_servers))
+ print("Configured number of http/https/https with no auth/JWT servers: " + str(num_http_servers) + " each")
- tmp=sftp_sims.split(',')
+ tmp = sftp_sims.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
+ hp = tmp[i].split(':')
sftp_hosts.append(hp[0])
sftp_ports.append(hp[1])
- tmp=ftps_sims.split(',')
+ tmp = ftpes_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ ftpes_hosts.append(hp[0])
+ ftpes_ports.append(hp[1])
+
+ tmp = http_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_hosts.append(hp[0])
+ http_ports.append(hp[1])
+
+ tmp = http_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_jwt_hosts.append(hp[0])
+ http_jwt_ports.append(hp[1])
+
+ tmp = https_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_hosts.append(hp[0])
+ https_ports.append(hp[1])
+
+ tmp = https_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_jwt_hosts.append(hp[0])
+ https_jwt_ports.append(hp[1])
+
+ tmp = https_sims_no_auth.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
- ftps_hosts.append(hp[0])
- ftps_ports.append(hp[1])
+ hp = tmp[i].split(':')
+ https_hosts_no_auth.append(hp[0])
+ https_ports_no_auth.append(hp[1])
+
groups = os.environ.get('MR_GROUPS', 'OpenDcae-c12:PM_MEAS_FILES')
- print("Groups detected: " + groups )
+ print("Groups detected: " + groups)
configuredPrefixes = os.environ.get('MR_FILE_PREFIX_MAPPING', 'PM_MEAS_FILES:A')
- if (len(groups) == 0 ):
- groups='OpenDcae-c12:PM_MEAS_FILES'
+ if not groups:
+ groups = 'OpenDcae-c12:PM_MEAS_FILES'
print("Using default group: " + groups)
else:
print("Configured groups: " + groups)
- if (len(configuredPrefixes) == 0 ):
- configuredPrefixes='PM_MEAS_FILES:A'
+ if not configuredPrefixes:
+ configuredPrefixes = 'PM_MEAS_FILES:A'
print("Using default changeid to file prefix mapping: " + configuredPrefixes)
else:
print("Configured changeid to file prefix mapping: " + configuredPrefixes)
- #Counters
+ # Counters
ctr_responses = []
ctr_requests = []
- ctr_files=[]
+ ctr_files = []
ctr_events = []
startTime = time.time()
firstPollTime = []
runningState = "Started"
- #Keeps all responded file names
+ # Keeps all responded file names
fileMap = []
- #Keeps all responded PNF names
+ # Keeps all responded PNF names
pnfMap = []
- #Handles rop periods for tests that deliveres events every 15 min
+ # Handles rop periods for tests that deliveres events every 15 min
rop_counter = 0
rop_timestamp = time.time()
- #List of configured group names
+ # List of configured group names
groupNames = []
- #Mapping between group name and index in groupNames
+ # Mapping between group name and index in groupNames
groupNameIndexes = {}
- #String of configured groups
+ # String of configured groups
configuredGroups = ""
- #String of configured change identifiers
+ # String of configured change identifiers
configuredChangeIds = ""
- #List of changed identifiers
+ # List of changed identifiers
changeIds = []
- #List of filePrefixes
+ # List of filePrefixes
filePrefixes = {}
- tmp=groups.split(',')
+ tmp = groups.split(',')
for i in range(len(tmp)):
- g=tmp[i].split(':')
+ g = tmp[i].split(':')
for j in range(len(g)):
g[j] = g[j].strip()
if (j == 0):
- if (len(configuredGroups) > 0):
- configuredGroups=configuredGroups+","
- configuredGroups=configuredGroups+g[0]
+ if configuredGroups:
+ configuredGroups = configuredGroups + ","
+ configuredGroups = configuredGroups + g[0]
groupNames.append(g[0])
groupNameIndexes[g[0]] = i
changeIds.append({})
@@ -1282,18 +1442,18 @@ if __name__ == "__main__":
firstPollTime.append(0)
pnfMap.append(set())
fileMap.append({})
- if (len(configuredChangeIds) > 0):
- configuredChangeIds=configuredChangeIds+","
+ if configuredGroups:
+ configuredChangeIds = configuredChangeIds + ","
else:
- changeIds[i][j-1]=g[j]
+ changeIds[i][j - 1] = g[j]
if (j > 1):
- configuredChangeIds=configuredChangeIds+":"
- configuredChangeIds=configuredChangeIds+g[j]
+ configuredChangeIds = configuredChangeIds + ":"
+ configuredChangeIds = configuredChangeIds + g[j]
# Create a map between changeid and file name prefix
- tmp=configuredPrefixes.split(',')
+ tmp = configuredPrefixes.split(',')
for i in range(len(tmp)):
- p=tmp[i].split(':')
+ p = tmp[i].split(':')
filePrefixes[p[0]] = p[1]
tc_num = "Not set"
@@ -1301,7 +1461,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser()
-#SFTP TCs with single ME
+ # SFTP TCs with single ME
parser.add_argument(
'--tc100',
action='store_true',
@@ -1354,7 +1514,7 @@ if __name__ == "__main__":
action='store_true',
help='TC1001 - One ME, SFTP, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-# SFTP TCs with multiple MEs
+ # SFTP TCs with multiple MEs
parser.add_argument(
'--tc500',
action='store_true',
@@ -1434,142 +1594,183 @@ if __name__ == "__main__":
action='store_true',
help='TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
-# FTPS TCs with single ME
+ # FTPES TCs with single ME
parser.add_argument(
'--tc200',
action='store_true',
- help='TC200 - One ME, FTPS, 1 1MB file, 1 event')
+ help='TC200 - One ME, FTPES, 1 1MB file, 1 event')
parser.add_argument(
'--tc201',
action='store_true',
- help='TC201 - One ME, FTPS, 1 5MB file, 1 event')
+ help='TC201 - One ME, FTPES, 1 5MB file, 1 event')
parser.add_argument(
'--tc202',
action='store_true',
- help='TC202 - One ME, FTPS, 1 50MB file, 1 event')
+ help='TC202 - One ME, FTPES, 1 50MB file, 1 event')
parser.add_argument(
'--tc210',
action='store_true',
- help='TC210 - One ME, FTPS, 1MB files, 1 file per event, 100 events, 1 event per poll.')
+ help='TC210 - One ME, FTPES, 1MB files, 1 file per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc211',
action='store_true',
- help='TC211 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC211 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc212',
action='store_true',
- help='TC212 - One ME, FTPS, 5MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC212 - One ME, FTPES, 5MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc213',
action='store_true',
- help='TC213 - One ME, FTPS, 1MB files, 100 files per event, 100 events. All events in one poll.')
+ help='TC213 - One ME, FTPES, 1MB files, 100 files per event, 100 events. All events in one poll.')
parser.add_argument(
'--tc220',
action='store_true',
- help='TC220 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
+ help='TC220 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
parser.add_argument(
'--tc221',
action='store_true',
- help='TC221 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
+ help='TC221 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
parser.add_argument(
'--tc222',
action='store_true',
- help='TC222 - One ME, FTPS, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
+ help='TC222 - One ME, FTPES, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
parser.add_argument(
'--tc2000',
action='store_true',
- help='TC2000 - One ME, FTPS, 1MB files, 100 files per event, endless number of events, 1 event per poll')
+ help='TC2000 - One ME, FTPES, 1MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2001',
action='store_true',
- help='TC2001 - One ME, FTPS, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-
+ help='TC2001 - One ME, FTPES, 5MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2100',
action='store_true',
- help='TC2100 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2100 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2101',
action='store_true',
- help='TC2101 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2101 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2102',
action='store_true',
- help='TC2102 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2102 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2200',
action='store_true',
- help='TC2200 - 700 ME, FTPS, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2200 - 700 ME, FTPES, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2201',
action='store_true',
- help='TC2201 - 700 ME, FTPS, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2201 - 700 ME, FTPES, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2202',
action='store_true',
- help='TC2202 - 700 ME, FTPS, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2202 - 700 ME, FTPES, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2300',
action='store_true',
- help='TC2300 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2300 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2301',
action='store_true',
- help='TC2301 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2301 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2302',
action='store_true',
- help='TC2302 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2302 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2500',
action='store_true',
- help='TC2500 - 700 ME, FTPS, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
+ help='TC2500 - 700 ME, FTPES, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
parser.add_argument(
'--tc600',
action='store_true',
- help='TC600 - 700 MEs, FTPS, 1MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC600 - 700 MEs, FTPES, 1MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc601',
action='store_true',
- help='TC601 - 700 MEs, FTPS, 5MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC601 - 700 MEs, FTPES, 5MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc602',
action='store_true',
- help='TC602 - 700 MEs, FTPS, 50MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC602 - 700 MEs, FTPES, 50MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc610',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc611',
action='store_true',
- help='TC611 - 700 MEs, FTPS, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC611 - 700 MEs, FTPES, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc650',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc810',
action='store_true',
- help='TC810 - 700 MEs, FTPS, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
+ help='TC810 - 700 MEs, FTPES, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
- args = parser.parse_args()
+ # HTTP TCs with single ME
+ parser.add_argument(
+ '--tc300',
+ action='store_true',
+ help='TC300 - One ME, HTTP, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc301',
+ action='store_true',
+ help='TC301 - One ME, HTTP, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc302',
+ action='store_true',
+ help='TC302 - One ME, HTTP, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc303',
+ action='store_true',
+ help='TC303 - One ME, HTTP JWT, 1 1MB file, 1 event')
+ # HTTPS TCs with single ME
+ parser.add_argument(
+ '--tc400',
+ action='store_true',
+ help='TC400 - One ME, HTTPS, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc401',
+ action='store_true',
+ help='TC401 - One ME, HTTPS, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc402',
+ action='store_true',
+ help='TC402 - One ME, HTTPS, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc403',
+ action='store_true',
+ help='TC403 - One ME, HTTPS client certificare authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc404',
+ action='store_true',
+ help='TC404 - One ME, HTTPS no client authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc405',
+ action='store_true',
+ help='TC405 - One ME, HTTPS JWT, 1 1MB file, 1 event')
+ args = parser.parse_args()
if args.tc100:
tc_num = "TC# 100"
@@ -1703,6 +1904,28 @@ if __name__ == "__main__":
elif args.tc810:
tc_num = "TC# 810"
+ elif args.tc300:
+ tc_num = "TC# 300"
+ elif args.tc301:
+ tc_num = "TC# 301"
+ elif args.tc302:
+ tc_num = "TC# 302"
+ elif args.tc303:
+ tc_num = "TC# 303"
+
+ elif args.tc400:
+ tc_num = "TC# 400"
+ elif args.tc401:
+ tc_num = "TC# 401"
+ elif args.tc402:
+ tc_num = "TC# 402"
+ elif args.tc403:
+ tc_num = "TC# 403"
+ elif args.tc404:
+ tc_num = "TC# 404"
+ elif args.tc405:
+ tc_num = "TC# 405"
+
else:
print("No TC was defined")
print("use --help for usage info")
@@ -1711,12 +1934,38 @@ if __name__ == "__main__":
print("TC num: " + tc_num)
for i in range(len(sftp_hosts)):
- print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(i) + " for sftp server address and port in file urls.")
+ print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(
+ i) + " for sftp server address and port in file urls.")
+
+ for i in range(len(ftpes_hosts)):
+ print("Using " + str(ftpes_hosts[i]) + ":" + str(ftpes_ports[i]) + " for ftpes server with index " + str(
+ i) + " for ftpes server address and port in file urls.")
+
+ for i in range(len(http_hosts)):
+ print("Using " + str(http_hosts[i]) + ":" + str(http_ports[i]) + " for http server with index " + str(
+ i) + " for http server address and port in file urls.")
+
+ for i in range(len(http_jwt_hosts)):
+ print("Using " + str(http_jwt_hosts[i]) + ":" + str(http_jwt_ports[i]) + " for http jwt server with index " + str(
+ i) + " for http jwt server address and port in file urls.")
- for i in range(len(ftps_hosts)):
- print("Using " + str(ftps_hosts[i]) + ":" + str(ftps_ports[i]) + " for ftps server with index " + str(i) + " for ftps server address and port in file urls.")
+ for i in range(len(https_hosts)):
+ print("Using " + str(https_hosts[i]) + ":" + str(https_ports[i]) + " for https server with index " + str(
+ i) + " for https server address and port in file urls.")
+
+ for i in range(len(https_hosts_no_auth)):
+ print("Using " + str(https_hosts_no_auth[i]) + ":" + str(https_ports_no_auth[i])
+ + " for https server with no authentication with index " + str(i)
+ + " for https server address and port in file urls.")
+
+ for i in range(len(https_jwt_hosts)):
+ print("Using " + str(https_jwt_hosts[i]) + ":" + str(https_jwt_ports[i]) + " for https jwt server with index " + str(
+ i) + " for https jwt server address and port in file urls.")
print("Using up to " + str(num_ftp_servers) + " ftp servers, for each protocol for PNFs.")
+ print("Using up to " + str(num_http_servers)
+ + " http/https/https with no auth/jwt servers, for each protocol for PNFs.")
+
def https_app(**kwargs):
import ssl
@@ -1724,10 +1973,11 @@ if __name__ == "__main__":
context.load_cert_chain('cert/cert.pem', 'cert/key.pem')
app.run(ssl_context=context, **kwargs)
+
from multiprocessing import Process
kwargs = dict(host=HOST_IP)
Process(target=https_app, kwargs=dict(kwargs, port=HOST_PORT_TLS),
daemon=True).start()
- app.run(port=HOST_PORT, host=HOST_IP) \ No newline at end of file
+ app.run(port=HOST_PORT, host=HOST_IP)
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
index 6661d0bb8..e6f50b25f 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
+++ b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
@@ -7,7 +7,7 @@ pip3 --version > /dev/null || { echo 'python3-pip package is not available, exit
if [ -d ".env" ]; then
echo ".env is prepared"
else
- virtualenv --no-site-packages --distribute -p python3 .env
+ virtualenv -p python3 .env
fi
-source .env/bin/activate && pip3 install -r requirements.txt
+source .env/bin/activate && pip3 install --no-cache-dir -r requirements.txt
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
index 74f16e75d..ce79f6ad6 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
+++ b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
@@ -3,4 +3,5 @@ node_modules
package.json
package-lock.json
.tmp*
-/tls \ No newline at end of file
+/tls/*.bak
+/dfc_config_volume
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md
index 55a2467ae..e13389373 100644..100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/README.md
+++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md
@@ -1,4 +1,5 @@
-###Introduction
+# Introduction
+
The purpose of the "simulator-group" is to run all containers in one go with specified behavior.
Mainly this is needed for CSIT tests and for auto test but can be used also for manual testing of dfc both as an java-app
or as a manually started container. Instead of running the simulators manually as described below the auto-test cases
@@ -8,88 +9,70 @@ In general these steps are needed to run the simulator group and dfc
1. Build the simulator images
2. Edit simulator env variables (to adapt the behavior of simulators)
-3. Configure consul
-4. Start the simulator monitor (to view the simulator stats)
-5. Start the simulators
-6. Start dfc
-
-###Overview of the simulators.
-There are 5 different types of simulators. For futher details, see the README.md in each simulator dir.
-
-1. The MR simulator emits fileready events, upon poll requests, with new and historice file references.
-It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
-these change identifier shall be generated. It is also possible to configure the number of events and files to generate and
-from which ftp servers the files shall be fetched from.
-2. The DR simulator handles the publish queries (to check if a file has previously been published) and the
-actual publish request (which results in a redirect to the DR REDIR simulator. It keeps a 'db' of published files updated by the DR REDIR simulator.
-It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
-to configure the responses for the publish queries and publish requests.
-3. The DR REDIR simulator handles the redirect request for publish from the DR simulator. All accepted files will be stored as and empty
-file with a file name concatenated from the published file name + file size + feed id.
-It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
-to configure the responses for the publish requests.
-4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
-possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
-populated with files to download.
-5. The FTPS simulator(s) is the same as the SFTP except that it using the FTPS protocol.
-
-
-### Build the simulator images
-Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPS servers.
-
-###Edit simulator env variables
-
-
-
-
-###Summary of scripts and files
-`consul_config.sh` - Convert a json config file to work with dfc when manually started as java-app or container and then add that json to Consul.
-
-`dfc-internal-stats.sh` - Periodically extract jvm data and dfc internal data and print to console/file.
+3. Start the simulator monitor (to view the simulator stats)
+4. Start the simulators
+5. Start dfc
-`docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
-
-`docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
-
-`prepare-images.sh` - Script to build all needed simulator images.
-
-`setup-ftp-files-for-image.sh` - Script executed in the ftp server to create files for download.
-
-`sim-monitor-start.sh` - Script to install needed packages and start the simulator monitor.
-
-`sim-monitor.js` - The source file the simulator monitor.
-
-`simulators-kill.sh` - Script to kill all the simulators
-
-`simulators-start.sh` - Script to start all the simulators. All env variables need to be set prior to executing the script.
+# Overview of the simulators.
+There are 6 different types of simulators. For futher details, see the README.md in each simulator dir.
+1. The MR simulator emits fileready events, upon poll requests, with new and historice file references
+ It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
+ these change identifier shall be generated. It is also possible to configure the number of events and files to generate and
+ from which ftp servers the files shall be fetched from.
+2. The DR simulator handles the publish queries (to check if a file has previously been published) and the
+ actual publish request (which results in a redirect to the DR REDIR simulator. It keeps a 'db' of published files updated by the DR REDIR simulator.
+ It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+ to configure the responses for the publish queries and publish requests.
+3. The DR REDIR simulator handles the redirect request for publish from the DR simulator. All accepted files will be stored as and empty
+ file with a file name concatenated from the published file name + file size + feed id.
+ It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+ to configure the responses for the publish requests.
+4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
+ possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
+ populated with files to download.
+5. The FTPES simulator(s) is the same as the SFTP except that it using the FTPES protocol.
+6. The HTTP simulator(s) is the same as SFTP except that it using the HTTP protocol.
-###Preparation
-Do the manual steps to prepare the simulator images
+# Build the simulator images
-Build the mr-sim image.
+Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPES servers.
-cd ../mr-sim
+# Edit simulator env variables
-Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
+## Summary of scripts and files
-cd ../dr-sim
+- `dfc-internal-stats.sh` - Periodically extract jvm data and dfc internal data and print to console/file.
+- `docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
+- `docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
+- `prepare-images.sh` - Script to build all needed simulator images.
+- `setup-ftp-files-for-image.sh` - Script executed in the ftp server to create files for download.
+- `sim-monitor-start.sh` - Script to install needed packages and start the simulator monitor.
+- `sim-monitor.js` - The source file the simulator monitor.
+- `simulators-kill.sh` - Script to kill all the simulators
+- `simulators-start.sh` - Script to start all the simulators. All env variables need to be set prior to executing the script.
-Run the docker build command to build the image for the DR simulators: `docker build -t drsim_common:latest .'
+## Preparation
-cd ../ftps-sftp-server
-Check the README.md in ftps-sftp-server dir in case the cert need to be updated.
-Run the docker build command to build the image for the DR simulators: `docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .'
+Do the manual steps to prepare the simulator images:
+- Build the mr-sim image.
+- cd ../mr-sim
+- Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
+- cd ../dr-sim
+- Run the docker build command to build the image for the DR simulators: \`docker build -t drsim_common:latest .'
+- cd ../ftpes-sftp-server
+- Check the README.md in ftpes-sftp-server dir in case the cert need to be updated.
+- Run the docker build command to build the image for the DR simulators: \`docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .'
-###Execution
+## Execution
Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators.
See each simulator to find a description of the available settings (DR_TC, DR_REDIR_TC and MR_TC).
The following env variables shall be set (example values).
-Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers.
-A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each ftp server (4 files in the below example).
+Note that NUM_FTPFILES, NUM_HTTPFILES and NUM_PNFS controls the number of ftp/http files created in the ftp/http servers.
+A total of NUM_FTPFILES \* NUM_PNFS (or NUM_HTTPFILES \* NUM_PNFS) ftp/http files will be created in each ftp/http server (4 files in the below example for ftp server).
Large settings will be time consuming at start of the servers.
Note that the number of files must match the number of file references emitted from the MR sim.
@@ -107,22 +90,24 @@ NUM_PNFS="2" #Two PNFs
To minimize the number of ftp file creation, the following two variables can be configured in the same file.
FILE_SIZE="1MB" #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPS or ALL)
+FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPES or ALL)
-If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPS` then only the server serving that protocol will be populated with files.
+If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPES` then only the server serving that protocol will be populated with files.
+`HTTP_TYPE` is prepared for `HTTP` and `HTTPS` protocol. Note, thanks to configuration of http server, single action populates files for all HTTP/HTTPS server type.
Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. The desired setting
in the script need to be manually adapted to for each specific simulator behavior according to the above. Check each simulator for available
parameters.
All simulators will be started with the generated docker-compose.yml file
-To generate ftp url with IP different from localhost, set SFTP_SIM_IP and/or FTPS_SIM_IP env variables to the addreses of the ftp servers before starting.
-So farm, this only works when the simulator python script is started from the command line.
+To generate an ftp/http/https url with an IP different from localhost, set the SFTP_SIM_IP and/or FTPES_SIM_IP and/or HTTP_SIM_IP and/or HTTPS_SIM_IP and/or HTTPS_SIM_NO_AUTH_IP and/or HTTP_JWT_SIM_IP and/or HTTPS_JWT_SIM_IP env variables to the address(es) of the ftp/http/https servers before starting.
+So far, this only works when the simulator python script is started from the command line.
Kill all the containers with `simulators-kill.se`
`simulators_start.sh` is for CSIT test and requires the env variables for test setting to be present in the shell.
-`setup-ftp-files.for-image.sh` is for CSIT and executed when the ftp servers are started from the docker-compose-setup.sh`.
+
+`setup-ftp-files.for-image.sh` and `setup-http-files-for-image.sh` is for CSIT and executed when the ftp/http servers are started from the docker-compose-setup.sh\`.
To make DFC to be able to connect to the simulator containers, DFC need to run in host mode.
Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <dfc-image> `
@@ -130,9 +115,8 @@ Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <
`<dfc-image>` could be either the locally built image `onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`.
+# Start the simulator monitor
-
-###Start the simulator monitor
Start the simulator monitor server with `node sim-monitor.js` on the cmd line and the open a browser with the url `localhost:9999/mon`
to see the statisics page with data from DFC(ss), MR sim, DR sim and DR redir sim.
If needed run 'npm install express' first
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
deleted file mode 100644
index eac26a9ab..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
deleted file mode 100644
index c320eda7a..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- },
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
deleted file mode 100644
index c7e5fc904..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- },
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
deleted file mode 100644
index 3a5280c27..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
deleted file mode 100644
index a78849890..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@dfc_mr-sim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
deleted file mode 100644
index dbcf08ab5..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
deleted file mode 100644
index d8189bd8d..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"https://dradmin:dradmin@mrsim:2223/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
deleted file mode 100644
index 489580a4f..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "CTR_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
deleted file mode 100644
index a98752b65..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c13/C13"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
deleted file mode 100644
index 07d3f3e78..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c14/C14"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
deleted file mode 100644
index 55ffa1ba0..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
deleted file mode 100644
index 5ab297466..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/5",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
deleted file mode 100644
index f5409755a..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
+++ /dev/null
@@ -1,13 +0,0 @@
-#server = true
-#bootstrap = true
-#client_addr = "0.0.0.0"
-
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- Name = "config-binding-service"
- # Host name where CBS is running
- Address = "config-binding-service"
- # Port number where CBS is running
- Port = 10000
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
deleted file mode 100644
index c2d9839ee..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
+++ /dev/null
@@ -1,11 +0,0 @@
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- # This is only to be used when contacting cbs via local host
- # (typicall when dfc is executed as an application without a container)
- Name = "config-binding-service-localhost"
- # Host name where CBS is running
- Address = "localhost"
- # Port number where CBS is running
- Port = 10000
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh b/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
deleted file mode 100755
index 5e8f7e2d4..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-bash -x
-
-# Script to configure consul with json configuration files with 'localhost' urls. This
-# is needed when running the simulator as as a stand-alone app or via a dfc container in 'host' network mode.
-# Assuming the input json files hostnames for MR and DR simulators are given as 'mrsim'/'drsim'
-# See available consul files in the consul dir
-# The script stores a json config for 'dfc_app'<dfc-instance-id>' if arg 'app' is given.
-# And for 'dfc_app'<dfc-instance-id>':dmaap' if arg 'dmaap' is given.
-# Instance id shall be and integer in the range 0..5
-
-. ../common/test_env.sh
-
-if [ $# != 3 ]; then
- echo "Script needs three args, app|dmaap <dfc-instance-id> <json-file-path>"
- exit 1
-fi
-
-if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
- __print_err "dfc-instance-id should be 0.."$DFC_MAX_IDX
- exit 1
-fi
-if ! [ -f $3 ]; then
- __print_err "json file does not extis: "$3
- exit 1
-fi
-
-echo "Configuring consul for " $appname " from " $3
-curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$3
-
-echo "Reading back from consul:"
-curl "http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1&raw=0"
-
-echo "done" \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
index e0d7c33b7..6af42f677 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
@@ -112,4 +112,4 @@ while [ true ]; do
heading=0
fi
sleep 5
-done \ No newline at end of file
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
new file mode 100644
index 000000000..89b1f7f4b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
new file mode 100644
index 000000000..cbc79f5bc
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
new file mode 100644
index 000000000..7e5e3dffa
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
new file mode 100644
index 000000000..dbd7641b6
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
@@ -0,0 +1,46 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
new file mode 100644
index 000000000..50a41be9b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.p12
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/p12.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.certificateConfig.httpsHostnameVerify: false
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
new file mode 100644
index 000000000..f249f76fd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "false"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
new file mode 100644
index 000000000..e578430b9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
new file mode 100644
index 000000000..8ec155f8f
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c13
+dmaap.dmaapConsumerConfiguration.consumerId: C13
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
new file mode 100644
index 000000000..274fdfb8b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c14
+dmaap.dmaapConsumerConfiguration.consumerId: C14
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
new file mode 100644
index 000000000..d72ff44ba
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
new file mode 100644
index 000000000..e4cc8cf1a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/5
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
index b212fc26c..e145d2606 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
@@ -1,7 +1,15 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Script for manually starting all simulators with test setting below
-# Matching json config is needed in CBS/Consul as well. Use consul_config.sh to add config to consul
export MR_TC="--tc710" # Test behaviour for MR sim
export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES" # Comma-separated list of <consumer-group>:<change-identifier>
@@ -13,15 +21,28 @@ export DR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <fee
export DR_REDIR_TC="--tc normal" # Test behaviour for DR redir sim
export DR_REDIR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <feed-id>:<file-name-prefixes> for DR redir sim
+export NUM_PNFS="700" # Number of unuqie PNFs to generate file for
+export FILE_SIZE="1MB" # File size for file (1KB, 1MB, 5MB, 50MB or ALL)
+
export NUM_FTPFILES="105" # Number of FTP files to generate per PNF
-export NUM_PNFS="700" # Number of unuqie PNFs to generate FTP file for
-export FILE_SIZE="1MB" # File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPS or ALL)
+export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPES or ALL)
export FTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for ftp files
export NUM_FTP_SERVERS=1 # Number of FTP server to distribute the PNFs (Max 5)
+export NUM_HTTPFILES="105" # Number of HTTP files to generate per PNF
+export HTTP_TYPE="HTTP" # Type of HTTP files to generate (HTTP, HTTPS or ALL)
+export HTTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for http files
+export NUM_HTTP_SERVERS=1 # Number of HTTP server to distribute the PNFs (Max 5)
+export BASIC_AUTH_LOGIN=demo
+export BASIC_AUTH_PASSWORD=demo123456!
+
export SFTP_SIMS="localhost:21,localhost:22,localhost:23,localhost:24,localhost:25" # Comma separated list for SFTP servers host:port
-export FTPS_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPS servers host:port
+export FTPES_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPES servers host:port
+export HTTP_SIMS="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85" # Comma separated list for HTTP servers host:port
+export HTTP_JWT_SIMS="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005" # Comma separated list for HTTP JWT servers host:port
+export HTTPS_SIMS="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448" # Comma separated list for HTTPS (enabling client certificate authorization and basic authorization) servers host:port
+export HTTPS_SIMS_NO_AUTH="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085" # Comma separated list for HTTPS (with no authorization) servers host:port
+export HTTPS_JWT_SIMS="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105" # Comma separated list for HTTPS JWT servers host:port
export DR_REDIR_SIM="localhost" # Hostname of DR redirect server
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
index 78fa7cb1b..005a5c022 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
@@ -1,85 +1,66 @@
version: '2'
networks:
- dfcnet:
- external:
- name: dfcnet
+ dfcnet:
+ external:
+ name: dfcnet
services:
- consul-server:
- networks:
- - dfcnet
- container_name: dfc_consul
- image: docker.io/consul:1.4.4
- ports:
- - "8500:8500"
+ cmpv2-postprocessor:
+ container_name: dfc_cmpv2-postprocessor
+ image: nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-post-processor:2.3.3
+ env_file:
+ - ../certservice/merger/merge-certs.env
volumes:
- - ./consul/consul/:/consul/config
-
- config-binding-service:
- networks:
- - dfcnet
- container_name: dfc_cbs
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:2.3.0
- ports:
- - "10000:10000"
- environment:
- - CONSUL_HOST=consul-server
- depends_on:
- - consul-server
-
- tls-init-container:
- container_name: dfc_tls-init-container
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tls-init-container
- networks:
- - dfcnet
- volumes:
- - ./tls:/opt/tls/shared:rw
+ - ${SIM_GROUP}/tls:/opt/app/datafile/etc/cert
+ - ${SIM_GROUP}/../certservice/generated-certs/dfc-p12:/opt/app/datafile/etc/
drsim:
networks:
- dfcnet
environment:
- DRR_SIM_IP: ${DR_REDIR_SIM}
- DR_FEEDS: ${DR_FEEDS}
+ DRR_SIM_IP: ${DR_REDIR_SIM}
+ DR_FEEDS: ${DR_FEEDS}
image: drsim_common:latest
ports:
- - "3906:3906"
- - "3907:3907"
+ - "3906:3906"
+ - "3907:3907"
container_name: dfc_dr-sim
command: node dmaapDR.js ${DR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
drsim_redir:
networks:
- dfcnet
environment:
- DR_SIM_IP: drsim
- DR_REDIR_FEEDS: ${DR_REDIR_FEEDS}
+ DR_SIM_IP: drsim
+ DR_REDIR_FEEDS: ${DR_REDIR_FEEDS}
image: drsim_common:latest
ports:
- - "3908:3908"
- - "3909:3909"
+ - "3908:3908"
+ - "3909:3909"
container_name: dfc_dr-redir-sim
command: node dmaapDR_redir.js ${DR_REDIR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
mrsim:
networks:
- dfcnet
environment:
- SFTP_SIMS: ${SFTP_SIMS}
- FTPS_SIMS: ${FTPS_SIMS}
- NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
- MR_GROUPS: ${MR_GROUPS}
- MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
+ SFTP_SIMS: ${SFTP_SIMS}
+ FTPES_SIMS: ${FTPES_SIMS}
+ HTTP_SIMS: ${HTTP_SIMS}
+ HTTP_JWT_SIMS: ${HTTP_JWT_SIMS}
+ HTTPS_SIMS: ${HTTPS_SIMS}
+ HTTPS_SIMS_NO_AUTH: ${HTTPS_SIMS_NO_AUTH}
+ HTTPS_JWT_SIMS: ${HTTPS_JWT_SIMS}
+ NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
+ NUM_HTTP_SERVERS: ${NUM_HTTP_SERVERS}
+ MR_GROUPS: ${MR_GROUPS}
+ MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
image: mrsim:latest
ports:
- "2222:2222"
@@ -88,14 +69,12 @@ services:
command: python mr-sim.py ${MR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
sftp-server0:
networks:
- dfcnet
container_name: dfc_sftp-server0
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
restart: on-failure
@@ -105,7 +84,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1023:22"
restart: on-failure
@@ -115,7 +94,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server2
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1024:22"
restart: on-failure
@@ -125,7 +104,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server3
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1025:22"
restart: on-failure
@@ -135,7 +114,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server4
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1026:22"
restart: on-failure
@@ -145,7 +124,7 @@ services:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd0
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1032:21"
environment:
@@ -155,14 +134,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd1:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd1
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1033:21"
environment:
@@ -172,14 +149,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd2:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd2
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1034:21"
environment:
@@ -189,14 +164,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd3:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd3
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1035:21"
environment:
@@ -206,14 +179,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd4:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd4
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1036:21"
environment:
@@ -223,5 +194,78 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
+
+ http-https-server0:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server0
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "81:80"
+ - "444:443"
+ - "8081:8080"
+ - "32001:32000"
+ - "32101:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server1:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server1
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "82:80"
+ - "445:443"
+ - "8082:8080"
+ - "32002:32000"
+ - "32102:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server2:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server2
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "83:80"
+ - "446:443"
+ - "8083:8080"
+ - "32003:32000"
+ - "32103:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server3:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server3
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "84:80"
+ - "447:443"
+ - "8084:8080"
+ - "32004:32000"
+ - "32104:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server4:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server4
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "85:80"
+ - "448:443"
+ - "8085:8080"
+ - "32005:32000"
+ - "32105:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
index 666e14a8e..59ac1c7ac 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
@@ -1,4 +1,13 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script for manually preparing images for mr-sim, dr-sim, dr-redir-sim and sftp server.
@@ -12,8 +21,7 @@ cd ../dr-sim
docker build -t drsim_common:latest .
-#Build image for ftps server
-cd ../ftps-sftp-server
-
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .
+#Build image for ftpes server
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
new file mode 100755
index 000000000..1a83dd143
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+# Script to create files for the HTTP server to return upon request.
+# The file names matches the files names in the events polled from the MR simulator.
+# Intended for execution in the running http containers in the http-root dir.
+
+NUM=200 #Default number of files
+PNFS=1 #Default number of PNFs
+FSIZE="ALL"
+PREFIXES="A"
+HTTP_SERV_INDEX=0
+NUM_HTTP_SERVERS=1
+
+if [ $# -ge 1 ]; then
+ NUM=$1
+fi
+if [ $# -ge 2 ]; then
+ PNFS=$2
+fi
+if [ $# -ge 3 ]; then
+ FSIZE=$3
+ if [ $3 != "1KB" ] && [ $3 != "1MB" ] && [ $3 != "5MB" ] && [ $3 != "50MB" ] && [ $3 != "ALL" ]; then
+ echo "File size shall be 1KB|1MB|5MB|50MB|ALL"
+ exit
+ fi
+fi
+if [ $# -ge 4 ]; then
+ PREFIXES=$4
+fi
+if [ $# -ge 5 ]; then
+ NUM_HTTP_SERVERS=$5
+fi
+if [ $# -ge 6 ]; then
+ HTTP_SERV_INDEX=$6
+fi
+if [ $# -lt 1 ] || [ $# -gt 6 ]; then
+ echo "Wrong args, usage: setup-http-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB [ <comma-separated-file-name-prefixs> [ <number-of-http-servers> <http-server-index> ] ] ] ] ] ]"
+ exit
+fi
+
+echo "Running http file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) " $FSIZE "and file prefixe(s) " $PREFIXES " in http servers with index " $HTTP_SERV_INDEX
+
+truncate -s 1KB 1KB.tar.gz
+truncate -s 1MB 1MB.tar.gz
+truncate -s 5MB 5MB.tar.gz
+truncate -s 50MB 50MB.tar.gz
+
+for fnp in ${PREFIXES//,/ }
+do
+ p=0
+ while [ $p -lt $PNFS ]; do
+ if [[ $(($p%$NUM_HTTP_SERVERS)) == $HTTP_SERV_INDEX ]]; then
+ i=0
+ while [ $i -lt $NUM ]; do #Problem with for loop and var substituion in curly bracket....so used good old style loop
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi
+ let i=i+1
+ done
+ fi
+ let p=p+1
+ done
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
index 32045ea56..634450b6d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script to kill and remove all simulators
docker logs dfc_mr-sim
@@ -16,8 +25,11 @@ docker kill dfc_ftpes-server-vsftpd1
docker kill dfc_ftpes-server-vsftpd2
docker kill dfc_ftpes-server-vsftpd3
docker kill dfc_ftpes-server-vsftpd4
-docker kill dfc_cbs
-docker kill dfc_consul
+docker kill dfc_http-https-server0
+docker kill dfc_http-https-server1
+docker kill dfc_http-https-server2
+docker kill dfc_http-https-server3
+docker kill dfc_http-https-server4
echo "Removing simulator containers"
docker rm dfc_dr-sim
@@ -33,7 +45,14 @@ docker rm dfc_ftpes-server-vsftpd1
docker rm dfc_ftpes-server-vsftpd2
docker rm dfc_ftpes-server-vsftpd3
docker rm dfc_ftpes-server-vsftpd4
-docker rm dfc_cbs
-docker rm dfc_consul
+docker rm -f dfc_http-https-server0
+docker rm -f dfc_http-https-server1
+docker rm -f dfc_http-https-server2
+docker rm -f dfc_http-https-server3
+docker rm -f dfc_http-https-server4
+if [ "$HTTP_TYPE" = "HTTPS" ]
+ then
+ docker rm -f oom-certservice-post-processor
+fi
-echo "done" \ No newline at end of file
+echo "done"
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
index 5c7c32f41..36dd2606d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
set -x
server_check() {
for i in {1..10}; do
@@ -12,6 +21,42 @@ server_check() {
echo "Simulator " $1 " on localhost:$2$3 - no response"
}
+http_https_basic_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" "$3"://"$BASIC_AUTH_LOGIN":"$BASIC_AUTH_PASSWORD"@localhost:"$2")
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " "$1" " on localhost: ""$2"" responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " "$1" " on localhost:""$2"" - no response"
+}
+
+http_https_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
+http_https_jwt_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA' -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
server_check_https() {
for i in {1..10}; do
res=$(curl -k -s -o /dev/null -w "%{http_code}" https://localhost:$2$3)
@@ -24,9 +69,9 @@ server_check_https() {
echo "Simulator " $1 " on https://localhost:$2$3 - no response"
}
-ftps_server_check() {
+ftpes_server_check() {
for i in {1..10}; do
- res=$(curl --silent --max-time 3 localhost:$2 2>&1 | grep vsFTPd)
+ res=$(curl --silent --max-time 3 ftp://localhost:$2 --ftp-ssl -v -k 2>&1 | grep vsFTPd)
if ! [ -z "$res" ]; then
echo "Simulator " $1 " on localhost:$2 responded ok"
return
@@ -55,15 +100,30 @@ DOCKER_SIM_NWNAME="dfcnet"
echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+if [ -z "$SIM_GROUP" ]
+ then
+ export SIM_GROUP="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+fi
+
+if [ -z "$NUM_FTP_SERVERS" ]
+ then
+ export NUM_FTP_SERVERS=1
+fi
+
+if [ -z "$NUM_HTTP_SERVERS" ]
+ then
+ export NUM_HTTP_SERVERS=1
+fi
+
docker-compose -f docker-compose-template.yml config > docker-compose.yml
docker-compose up -d
-sudo chown $(id -u):$(id -g) consul
-sudo chown $(id -u):$(id -g) consul/consul/
+sudo chown $(id -u):$(id -g) dfc_configs
declare -a SFTP_SIM
-declare -a FTPS_SIM
+declare -a FTPES_SIM
+declare -a HTTP_SIM
DR_SIM="$(docker ps -q --filter='name=dfc_dr-sim')"
DR_RD_SIM="$(docker ps -q --filter='name=dfc_dr-redir-sim')"
@@ -73,13 +133,16 @@ SFTP_SIM[1]="$(docker ps -q --filter='name=dfc_sftp-server1')"
SFTP_SIM[2]="$(docker ps -q --filter='name=dfc_sftp-server2')"
SFTP_SIM[3]="$(docker ps -q --filter='name=dfc_sftp-server3')"
SFTP_SIM[4]="$(docker ps -q --filter='name=dfc_sftp-server4')"
-FTPS_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
-FTPS_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
-FTPS_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
-FTPS_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
-FTPS_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
-CBS_SIM="$(docker ps -q --filter='name=dfc_cbs')"
-CONSUL_SIM="$(docker ps -q --filter='name=dfc_consul')"
+FTPES_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
+FTPES_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
+FTPES_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
+FTPES_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
+FTPES_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
+HTTP_SIM[0]="$(docker ps -q --filter='name=dfc_http-https-server0')"
+HTTP_SIM[1]="$(docker ps -q --filter='name=dfc_http-https-server1')"
+HTTP_SIM[2]="$(docker ps -q --filter='name=dfc_http-https-server2')"
+HTTP_SIM[3]="$(docker ps -q --filter='name=dfc_http-https-server3')"
+HTTP_SIM[4]="$(docker ps -q --filter='name=dfc_http-https-server4')"
#Wait for initialization of docker containers for all simulators
for i in {1..10}; do
@@ -91,13 +154,16 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[2]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[3]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[0]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[1]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[2]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[3]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CBS_SIM) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CONSUL_SIM) ]
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[4]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[4]}) ]
then
echo "All simulators Started"
break
@@ -107,24 +173,62 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
fi
done
-server_check "cbs " 10000 "/healthcheck"
-server_check "consul " 8500 "/v1/catalog/service/agent"
+if [ -z "$BASIC_AUTH_LOGIN" ]
+ then
+ BASIC_AUTH_LOGIN=demo
+fi
+
+if [ -z "$BASIC_AUTH_PASSWORD" ]
+ then
+ BASIC_AUTH_PASSWORD=demo123456!
+fi
+
server_check "DR sim " 3906 "/"
server_check "DR redir sim " 3908 "/"
server_check "MR sim " 2222 "/"
server_check_https "DR sim https " 3907 "/"
server_check_https "DR redir sim https" 3909 "/"
server_check_https "MR sim https " 2223 "/"
-ftps_server_check "FTPS server 0" 1032
-ftps_server_check "FTPS server 1" 1033
-ftps_server_check "FTPS server 2" 1034
-ftps_server_check "FTPS server 3" 1035
-ftps_server_check "FTPS server 4" 1036
+ftpes_server_check "FTPES server 0" 1032
+ftpes_server_check "FTPES server 1" 1033
+ftpes_server_check "FTPES server 2" 1034
+ftpes_server_check "FTPES server 3" 1035
+ftpes_server_check "FTPES server 4" 1036
sftp_server_check "SFTP server 0" 1022
sftp_server_check "SFTP server 1" 1023
sftp_server_check "SFTP server 2" 1024
sftp_server_check "SFTP server 3" 1025
sftp_server_check "SFTP server 4" 1026
+http_https_basic_server_check "HTTP basic auth server 0" 81 http
+http_https_basic_server_check "HTTP basic auth server 1" 82 http
+http_https_basic_server_check "HTTP basic auth server 2" 83 http
+http_https_basic_server_check "HTTP basic auth server 3" 84 http
+http_https_basic_server_check "HTTP basic auth server 4" 85 http
+http_https_jwt_server_check "HTTP JWT server 0" 32001 http
+http_https_jwt_server_check "HTTP JWT server 1" 32002 http
+http_https_jwt_server_check "HTTP JWT server 2" 32003 http
+http_https_jwt_server_check "HTTP JWT server 3" 32004 http
+http_https_jwt_server_check "HTTP JWT server 4" 32005 http
+http_https_basic_server_check "HTTPS basic auth server 0" 444 https -k
+http_https_basic_server_check "HTTPS basic auth server 1" 445 https -k
+http_https_basic_server_check "HTTPS basic auth server 2" 446 https -k
+http_https_basic_server_check "HTTPS basic auth server 3" 447 https -k
+http_https_basic_server_check "HTTPS basic auth server 4" 448 https -k
+http_https_server_check "HTTPS client certificate authentication server 0" 444 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 1" 445 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 2" 446 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 3" 447 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 4" 448 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS no auth server 0" 8081 https -k
+http_https_server_check "HTTPS no auth server 1" 8082 https -k
+http_https_server_check "HTTPS no auth server 2" 8083 https -k
+http_https_server_check "HTTPS no auth server 3" 8084 https -k
+http_https_server_check "HTTPS no auth server 4" 8085 https -k
+http_https_jwt_server_check "HTTPS JWT server 0" 32101 https -k
+http_https_jwt_server_check "HTTPS JWT server 1" 32102 https -k
+http_https_jwt_server_check "HTTPS JWT server 2" 32103 https -k
+http_https_jwt_server_check "HTTPS JWT server 3" 32104 https -k
+http_https_jwt_server_check "HTTPS JWT server 4" 32105 https -k
echo ""
@@ -150,10 +254,6 @@ if [ -z "$FTP_FILE_PREFIXES" ]
FTP_FILE_PREFIXES="A"
fi
-if [ -z "$NUM_FTP_SERVERS" ]
- then
- NUM_FTP_SERVERS=1
-fi
if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
@@ -166,13 +266,38 @@ if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
let p=p+1
done
fi
-if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then
- echo "Creating files for FTPS server, may take time...."
+if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPES" ]; then
+ echo "Creating files for FTPES server, may take time...."
p=0
while [ $p -lt $NUM_FTP_SERVERS ]; do
- docker cp setup-ftp-files-for-image.sh ${FTPS_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ docker cp setup-ftp-files-for-image.sh ${FTPES_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ #Double slash needed for docker on win...
+ docker exec -w //srv ${FTPES_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ let p=p+1
+ done
+fi
+
+#Populate the http server with files. Note some common variables with ftp files!
+if [ -z "$NUM_HTTPFILES" ]
+ then
+ NUM_HTTPFILES=200
+fi
+if [ -z "$HTTP_TYPE" ]
+ then
+ HTTP_TYPE="ALL"
+fi
+if [ -z "$HTTP_FILE_PREFIXES" ]
+ then
+ HTTP_FILE_PREFIXES="A"
+fi
+
+if [ $HTTP_TYPE = "ALL" ] || [ $HTTP_TYPE = "HTTP" ] || [ $HTTP_TYPE = "HTTPS" ]; then
+ echo "Creating files for HTTP server, may take time...."
+ p=0
+ while [ $p -lt $NUM_HTTP_SERVERS ]; do
+ docker cp setup-http-files-for-image.sh ${HTTP_SIM[$p]}:/tmp/setup-http-files-for-image.sh
#Double slash needed for docker on win...
- docker exec -w //srv ${FTPS_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ docker exec -w //usr//local//apache2//htdocs ${HTTP_SIM[$p]} //tmp/setup-http-files-for-image.sh $NUM_HTTPFILES $NUM_PNFS $FILE_SIZE $HTTP_FILE_PREFIXES $NUM_HTTP_SERVERS $p #>/dev/null 2>&1
let p=p+1
done
fi
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
new file mode 100644
index 000000000..fed038b16
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
@@ -0,0 +1,40 @@
+Bag Attributes
+ friendlyName: root
+ 2.16.840.1.113894.746875.1.1: <Unsupported tag 6>
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
new file mode 100644
index 000000000..616aa2e78
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12 b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
new file mode 100644
index 000000000..40ac5fb0b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
@@ -0,0 +1,103 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIKITCCCAmgAwIBAgIETsPoKjANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzQ0WhcNMzEwOTA1MTQwMzQ0WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCd2w1w/JuC9F1jUnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz
+/m5HJAnNmi6p8OyC7pbP+CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcE
+s8IW3opnXFW6mf7riwOK5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wI
+hDwivK6nnB2dZjwDLCEQoVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYo
+SK3Kx+iOY6/ko0fV6KN3yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ
+2+mcXNKTk6/KNKdQsG1Eg1gidvAVAgMBAAGjggWzMIIFrzAdBgNVHQ4EFgQUdEQF
+qgm+k7X7VdgcRwtAjKExUjowggU7BgNVHREBAf8EggUvMIIFK4IEZGNhZYITYmJz
+LWV2ZW50LXByb2Nlc3NvcoIYYmJzLWV2ZW50LXByb2Nlc3Nvci5vbmFwgipiYnMt
+ZXZlbnQtcHJvY2Vzc29yLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCFmNvbmZpZy1i
+aW5kaW5nLXNlcnZpY2WCG2NvbmZpZy1iaW5kaW5nLXNlcnZpY2Uub25hcIItY29u
+ZmlnLWJpbmRpbmctc2VydmljZS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2FsgglkYXNo
+Ym9hcmSCDmRhc2hib2FyZC5vbmFwgiBkYXNoYm9hcmQub25hcC5zdmMuY2x1c3Rl
+ci5sb2NhbIIVZGNhZS1jbG91ZGlmeS1tYW5hZ2VyghpkY2FlLWNsb3VkaWZ5LW1h
+bmFnZXIub25hcIIsZGNhZS1jbG91ZGlmeS1tYW5hZ2VyLm9uYXAuc3ZjLmNsdXN0
+ZXIubG9jYWyCF2RjYWUtZGF0YWZpbGUtY29sbGVjdG9yghxkY2FlLWRhdGFmaWxl
+LWNvbGxlY3Rvci5vbmFwgi5kY2FlLWRhdGFmaWxlLWNvbGxlY3Rvci5vbmFwLnN2
+Yy5jbHVzdGVyLmxvY2FsghVkY2FlLWh2LXZlcy1jb2xsZWN0b3KCGmRjYWUtaHYt
+dmVzLWNvbGxlY3Rvci5vbmFwgixkY2FlLWh2LXZlcy1jb2xsZWN0b3Iub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbIIOZGNhZS1wbS1tYXBwZXKCE2RjYWUtcG0tbWFwcGVy
+Lm9uYXCCJWRjYWUtcG0tbWFwcGVyLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWRj
+YWUtcG1zaIIOZGNhZS1wbXNoLm9uYXCCIGRjYWUtcG1zaC5vbmFwLnN2Yy5jbHVz
+dGVyLmxvY2FsgghkY2FlLXByaIINZGNhZS1wcmgub25hcIIfZGNhZS1wcmgub25h
+cC5zdmMuY2x1c3Rlci5sb2NhbIISZGNhZS10Y2EtYW5hbHl0aWNzghdkY2FlLXRj
+YS1hbmFseXRpY3Mub25hcIIpZGNhZS10Y2EtYW5hbHl0aWNzLm9uYXAuc3ZjLmNs
+dXN0ZXIubG9jYWyCEmRjYWUtdmVzLWNvbGxlY3RvcoIXZGNhZS12ZXMtY29sbGVj
+dG9yLm9uYXCCKWRjYWUtdmVzLWNvbGxlY3Rvci5vbmFwLnN2Yy5jbHVzdGVyLmxv
+Y2FsghJkZXBsb3ltZW50LWhhbmRsZXKCF2RlcGxveW1lbnQtaGFuZGxlci5vbmFw
+gilkZXBsb3ltZW50LWhhbmRsZXIub25hcC5zdmMuY2x1c3Rlci5sb2NhbIISaG9s
+bWVzLWVuZ2luZS1tZ210ghdob2xtZXMtZW5naW5lLW1nbXQub25hcIIpaG9sbWVz
+LWVuZ2luZS1tZ210Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCEGhvbG1lcy1ydWxl
+LW1nbXSCFmhvbG1lcy1ydWxlcy1tZ210Lm9uYXCCKGhvbG1lcy1ydWxlcy1tZ210
+Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWludmVudG9yeYIOaW52ZW50b3J5Lm9u
+YXCCIGludmVudG9yeS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2Fsgg5wb2xpY3ktaGFu
+ZGxlcoITcG9saWN5LWhhbmRsZXIub25hcIIlcG9saWN5LWhhbmRsZXIub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbDAPBgNVHRMECDAGAQH/AgEAMB8GA1UdIwQYMBaAFDqC
+dzPZiuzYW/23yaAEtKGw2iGgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
+AjANBgkqhkiG9w0BAQwFAAOCAgEAawsOZQi0SN3N2USsgH0+byGv888MtxlHS26Q
+b4klYwwopBteIyO97aQQhbcnvyqAgRg5Ka/EUSOB6E4saBQhwnW5WyStxtmDfBYG
+FKsOW09ouPkCjDjJWrgNmvAeT+34b2JTJ+Li0hQKGb8K5mWyxakwFz4sYbrphmEC
+MEDci0Ev5NAluM5H+XKejEB/FqUrV4v+Mv4WGfR/HlNPnIJZm3W7IvQyjxiMkvl+
+XP3MNi9XfYxmFCGpNxYVBxkpgCutIyaJI+gT1dVlJaD1C8H+nrgHIpEFCJlzcYRc
+eJHo/dH1xRynDE8zcnO5/tXnYGQFrEAQ8pApH+QzF5IvdExUuH9146MPHGthZ0gy
+xXd7gJFhHTDoU5YN1NtqxVKW99Y1denvBbY7wMvJXoa5+sYN6ZFAdK+WbJ3D8GcV
+Sl4sSysa9AW9RSJiOPfcXOBOP1W9Sw6OBjlNgqXY/q1gF2r4eCEn3dyySAV6BKtq
+WLE4wTuIh+HXz/uZU3CYYs4S2BptKDHaPT35hfN9pAyotwfjUjMwlE0XbtdE378y
++eXEdWGASf4NjZLZ+e5XbS9Ay8HJMxFvvuk/2zg6nOW1gaZQMvDsw2J+m8j+rQMs
+9PiO53LxBxhV4d9AVjDaicwCh5WgQSe8Ukih0eMMSIcsT1MUXx4l/tM/ZbFqj8X/
+TBymHVQ=
+-----END CERTIFICATE-----
+Bag Attributes
+ friendlyName: CN=onap.org,OU=ONAP,O=Linux-Foundation,L=San-Francisco,ST=California,C=US
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
new file mode 100644
index 000000000..d486121d1
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
@@ -0,0 +1,32 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCd2w1w/JuC9F1j
+UnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz/m5HJAnNmi6p8OyC7pbP
++CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcEs8IW3opnXFW6mf7riwOK
+5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wIhDwivK6nnB2dZjwDLCEQ
+oVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYoSK3Kx+iOY6/ko0fV6KN3
+yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ2+mcXNKTk6/KNKdQsG1E
+g1gidvAVAgMBAAECggEAXcB6pC8zATy2tkWDgbr6QIZ5xXKCAv2kDw7f7/4usPQL
+bqkOlmOE6hCEviOPRWsu81BuBHpVTZH8OfoKZyfVnuizUXI+C/hYXUMD0opD0ZHI
+jxV+JQwWUbj/GajVThXPp4GcXN4XG7eNXBKFM1QUWjbDvFvisJVniDpTkT5njzuS
+bFzu5H6U5TpOOmX19pJJ1u+9+q5U2XAIq+GmpNG+neV9RVZNQtGq/rFcq0tSHMiC
+4eJh8euWqwVjQ/D5QpRBJUajJkr30nQCnAxefMXzjN/cVvggmHiWZu4XG0Doh6ju
+JXJp6MIHqKX2ECFdPE+17xB5W9o7GFSvlhgvbgaexQKBgQDkdghIGVOc9MOjpqwy
++ufbAmfcAiyLqgc7IIryiu3RA8MjzBNgHrAVvXVmwL4vumH3wW6MYnPqN134ssU9
+D1FPqKoecr1GQ7MV1CLridyW651VCHKfKvsxeq3G7wc7GYGfKXOCEywTYuGGgsrr
+XdShP59WuCGXMIzIyBAafrkHUwKBgQCw4j4+NtrYsECeof7dUNgr+WFlN++CTAYL
+Wv7ytfW5lSuDEpacJlOAuO6sZ260bVPudG4fNTUwaICJetN+z2h/bxhp3U0xfTCe
+u5SZdhFcqdeOb1FN7UzluagdD1JTkNG9219/3Wy8S0xQrDlfwiBxr60F8M29ptiU
+KcpzE7rF9wKBgQConuF/7YmEGDfpqtQAEKZeRElJ3iTotIb/wgYr/BSJJ6C45CAM
+2rmWYi6rt2joK0Wxqoggf24Umeb272OarJqUE+Xz8TX4DXG5k8huVmOE1MRcBY8s
+IXojS+vFH5kTqsC8K8kAYYwvhtT0BcclryyIE0BUrjTEtWXDr74LACsq1wKBgH+/
+pnyAWaZOBR2Mmel1TerUfUcBEvXjuinRWngoeoPB/F75udSkyYIaDiZtvUKKAygg
+5rebUgLYNp0UHFNlsG746YTr06h+ZfL+PuBmqTtnXsr8EphZXkQ7xfLW8fpwiUq5
+eUt7u+Bx8XgCKp3CMnRpEGlN9QmXyquXUyOxiB8ZAoGBAODW0aHrw99vHB0mc0Dt
+/GVaUdSbr98OczIXxeRtdqRu+NDUn1BtlF0UJV5JgNy+KAYEKP6pqJlUh2G2L3TC
+JTaG2iwJHz3h/IhnoHcr/cLScBlCfPsiwtuXDJwWQlD1gKj8YIjH3/40WQ5gOFZS
+LogmLTcbhYXRdwZuhBwZQwW1
+-----END PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
new file mode 100644
index 000000000..140f67904
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/emssimulator/install.sh b/test/mocks/emssimulator/install.sh
index e3069aae2..64982d9a1 100755
--- a/test/mocks/emssimulator/install.sh
+++ b/test/mocks/emssimulator/install.sh
@@ -4,13 +4,13 @@ readonly old_pwd=$PWD
cd swm/sw_server_simulator
for i in `ls -1`; do
- if [ -d $i ]; then
- cd $i
- echo $i
- zip -r ${i}.zip *
- mv ${i}.zip ..
- cd $OLDPWD
- fi
+ if [ -d $i ]; then
+ cd $i
+ echo $i
+ zip -r ${i}.zip *
+ mv ${i}.zip ..
+ cd $OLDPWD
+ fi
done
cd $old_pwd
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/build-cba-for-pnf-sw-upgrade-with-em.sh b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/build-cba-for-pnf-sw-upgrade-with-em.sh
new file mode 100755
index 000000000..76102c3b1
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/build-cba-for-pnf-sw-upgrade-with-em.sh
@@ -0,0 +1,77 @@
+#!/bin/bash
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+set -euo pipefail
+
+readonly SOURCE_CBA="PNF_CDS_RESTCONF"
+readonly TARGET_CBA="PNF_SW_UPGRADE_WITH_EM"
+readonly CDS_CODE_DIR="cds-codes-for-pnf-sw-upgrade"
+
+function use_specific_commit()
+{
+ local commit_id="$1"
+ cd ${CDS_CODE_DIR}
+ local tmp_branch="get-cba-for-pnf-sw-upgrade"
+ if ! git branch | grep -q "${tmp_branch}"; then
+ git checkout -b ${tmp_branch} ${commit_id}
+ else
+ git checkout -q ${tmp_branch}
+ fi
+ cd ${OLDPWD}
+}
+
+if [ ! -e ${CDS_CODE_DIR} ]; then
+ git clone https://gerrit.onap.org/r/ccsdk/cds ${CDS_CODE_DIR}
+else
+ cd ${CDS_CODE_DIR}
+ code_status=`git status -s`
+ if [ -n "${code_status}" ]; then
+ echo "Please keep the CDS codes are not modified."
+ exit 1
+ fi
+ cd ${OLDPWD}
+fi
+
+# Lock the version of source CBA files
+use_specific_commit f4ac359d80d043a2d0e6eaf1730813b81f2c837f
+
+if [ -e ${TARGET_CBA} -o -e ${TARGET_CBA}.zip ]; then
+ echo "${TARGET_CBA} or ${TARGET_CBA}.zip has existed, please rename or delete them."
+ exit 1
+fi
+
+cp -ir ${CDS_CODE_DIR}/components/model-catalog/blueprint-model/uat-blueprints/${SOURCE_CBA} ${TARGET_CBA}
+cp -ir patches ${TARGET_CBA}
+
+cd ${TARGET_CBA}
+
+mv Definitions/PNF_CDS_RESTCONF.json Definitions/PNF_SW_UPGRADE_WITH_EM.json
+mv Templates/pnf-swug-config-template.vtl Templates/pnf-swug-activate-ne-sw-template.vtl
+
+for p in patches/*.patch; do
+ patch -p1 -i $p
+done
+
+rm -rf patches
+
+zip -r ${TARGET_CBA}.zip .
+
+cd ${OLDPWD}
+
+mv -i ${TARGET_CBA}/${TARGET_CBA}.zip .
+
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/PNF_SW_UPGRADE_WITH_EM.json.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/PNF_SW_UPGRADE_WITH_EM.json.patch
new file mode 100644
index 000000000..f4c80624e
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/PNF_SW_UPGRADE_WITH_EM.json.patch
@@ -0,0 +1,148 @@
+--- PNF_CDS_RESTCONF/Definitions/PNF_CDS_RESTCONF.json 2020-03-14 20:54:46.677546900 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Definitions/PNF_SW_UPGRADE_WITH_EM.json 2020-03-14 22:08:47.225398400 +0800
+@@ -4,7 +4,7 @@
+ "template_author" : "Raj Gumma",
+ "author-email" : "raj.gumma@est.tech",
+ "user-groups" : "ADMIN, OPERATION",
+- "template_name" : "PNF_CDS_RESTCONF",
++ "template_name" : "PNF_SW_UPGRADE_WITH_EM",
+ "template_version" : "1.0.0",
+ "template_tags" : "PNF, Restconf, config, configuration, software upgrade"
+ },
+@@ -74,28 +74,6 @@
+ }
+ }
+ },
+- "config-deploy" : {
+- "steps" : {
+- "activate-process" : {
+- "description" : "Send a configlet to the pnf",
+- "target" : "config-deploy",
+- "activities" : [ {
+- "call_operation" : ""
+- } ]
+- }
+- },
+- "inputs" : {
+- "resolution-key" : {
+- "required" : true,
+- "type" : "string"
+- },
+- "config-deploy-properties" : {
+- "description" : "Dynamic PropertyDefinition for workflow(config-deploy).",
+- "required" : true,
+- "type" : "dt-config-deploy-properties"
+- }
+- }
+- },
+ "preCheck" : {
+ "steps" : {
+ "activate-process" : {
+@@ -220,41 +198,6 @@
+ }
+ }
+ },
+- "config-deploy" : {
+- "type" : "component-script-executor",
+- "interfaces" : {
+- "ComponentScriptExecutor" : {
+- "operations" : {
+- "process" : {
+- "implementation" : {
+- "primary" : "component-script",
+- "timeout" : 180,
+- "operation_host" : "SELF"
+- },
+- "inputs" : {
+- "script-type" : "kotlin",
+- "script-class-reference" : "cba.pnf.config.aai.RestconfConfigDeploy",
+- "dynamic-properties" : "*config-deploy-properties"
+- },
+- "outputs" : {
+- "response-data" : "",
+- "status" : "success"
+- }
+- }
+- }
+- }
+- },
+- "artifacts" : {
+- "config-deploy-template" : {
+- "type" : "artifact-template-velocity",
+- "file" : "Templates/restconf-mount-template.vtl"
+- },
+- "config-deploy-mapping" : {
+- "type" : "artifact-mapping-resource",
+- "file" : "Definitions/config-deploy-pnf-mapping.json"
+- }
+- }
+- },
+ "preCheck" : {
+ "type" : "component-script-executor",
+ "interfaces" : {
+@@ -268,7 +211,7 @@
+ },
+ "inputs" : {
+ "script-type" : "kotlin",
+- "script-class-reference" : "cba.pnf.swug.RestconfSoftwareUpgrade",
++ "script-class-reference" : "cba.pnf.swm.RestconfSoftwareUpgrade",
+ "dynamic-properties" : "*preCheck-properties"
+ },
+ "outputs" : {
+@@ -303,7 +246,7 @@
+ },
+ "inputs" : {
+ "script-type" : "kotlin",
+- "script-class-reference" : "cba.pnf.swug.RestconfSoftwareUpgrade",
++ "script-class-reference" : "cba.pnf.swm.RestconfSoftwareUpgrade",
+ "dynamic-properties" : "*downloadNESw-properties"
+ },
+ "outputs" : {
+@@ -323,14 +266,6 @@
+ "type" : "artifact-mapping-resource",
+ "file" : "Definitions/pnf-software-upgrade-mapping.json"
+ },
+- "configure-template" : {
+- "type" : "artifact-template-velocity",
+- "file" : "Templates/pnf-swug-config-template.vtl"
+- },
+- "configure-mapping" : {
+- "type" : "artifact-mapping-resource",
+- "file" : "Definitions/pnf-software-upgrade-mapping.json"
+- },
+ "download-ne-sw-template" : {
+ "type" : "artifact-template-velocity",
+ "file" : "Templates/pnf-swug-download-ne-sw-template.vtl"
+@@ -354,7 +289,7 @@
+ },
+ "inputs" : {
+ "script-type" : "kotlin",
+- "script-class-reference" : "cba.pnf.swug.RestconfSoftwareUpgrade",
++ "script-class-reference" : "cba.pnf.swm.RestconfSoftwareUpgrade",
+ "dynamic-properties" : "*activateNESw-properties"
+ },
+ "outputs" : {
+@@ -374,11 +309,11 @@
+ "type" : "artifact-mapping-resource",
+ "file" : "Definitions/pnf-software-upgrade-mapping.json"
+ },
+- "configure-template" : {
++ "activate-ne-sw-template" : {
+ "type" : "artifact-template-velocity",
+- "file" : "Templates/pnf-swug-config-template.vtl"
++ "file" : "Templates/pnf-swug-activate-ne-sw-template.vtl"
+ },
+- "configure-mapping" : {
++ "activate-ne-sw-mapping" : {
+ "type" : "artifact-mapping-resource",
+ "file" : "Definitions/pnf-software-upgrade-mapping.json"
+ }
+@@ -397,7 +332,7 @@
+ },
+ "inputs" : {
+ "script-type" : "kotlin",
+- "script-class-reference" : "cba.pnf.swug.RestconfSoftwareUpgrade",
++ "script-class-reference" : "cba.pnf.swm.RestconfSoftwareUpgrade",
+ "dynamic-properties" : "*postCheck-properties"
+ },
+ "outputs" : {
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/RestconfSoftwareUpgrade.kt.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/RestconfSoftwareUpgrade.kt.patch
new file mode 100644
index 000000000..01473f4ad
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/RestconfSoftwareUpgrade.kt.patch
@@ -0,0 +1,192 @@
+--- PNF_CDS_RESTCONF/Scripts/kotlin/RestconfSoftwareUpgrade.kt 2020-03-12 15:16:34.617000000 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Scripts/kotlin/RestconfSoftwareUpgrade.kt 2020-03-12 23:12:50.012507800 +0800
+@@ -1,6 +1,7 @@
+ /*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
++* Modifications Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+@@ -17,7 +18,7 @@
+ */
+
+
+-package cba.pnf.swug
++package cba.pnf.swm
+
+ import com.fasterxml.jackson.databind.node.ObjectNode
+ import org.onap.ccsdk.cds.blueprintsprocessor.core.api.data.ExecutionServiceInput
+@@ -38,20 +39,27 @@
+ class RestconfSoftwareUpgrade : AbstractScriptComponentFunction() {
+
+ private val RESTCONF_SERVER_IDENTIFIER = "sdnc"
+- private val CONFIGLET_RESOURCE_PATH = "yang-ext:mount/pnf-sw-upgrade:software-upgrade"
++ private val CONFIGLET_RESOURCE_PATH = "yang-ext:mount/pnf-swm:software-management"
+ private val log = logger(AbstractScriptComponentFunction::class.java)
+- private val TARGET_SOFTWARE_PATH = "$CONFIGLET_RESOURCE_PATH/upgrade-package/"
++ private val TARGET_SOFTWARE_PATH = "$CONFIGLET_RESOURCE_PATH/pnf-software-package/"
+
+ override suspend fun processNB(executionRequest: ExecutionServiceInput) {
+
+ // Extract request properties
+- val model= validatedPayload(executionRequest)
++ var model: SoftwareManagementModel = validatedPayload(executionRequest)
+
+ log.info("Blueprint invoked for ${model.resolutionKey} for SW Upgrade : " +
+- "${model.action} for sw version ${model.targetSwVersion} on pnf: ${model.deviceId}")
++ "${model.action} for sw version ${model.targetSwVersion} on pnf: ${model.neIdentifier}")
+
+ try {
+- val mountPayload = contentFromResolvedArtifactNB("mount-node")
++ var mountPayload: String = contentFromResolvedArtifactNB("mount-node")
++
++ val mountPayloadObject = JacksonUtils.jsonNode(mountPayload) as ObjectNode
++ val emsIp = mountPayloadObject.get("node")?.get(0)?.get("netconf-node-topology:host")?.asText()
++
++ model.deviceId = "ems-" + emsIp?.replace(".", "-")
++ mountPayload = mountPayload.replace("%ems-id%", model.deviceId)
++
+ log.debug("Mount Payload : $mountPayload")
+ restconfMountDevice(model.client, model.deviceId, mountPayload, mutableMapOf("Content-Type" to "application/json"))
+
+@@ -70,40 +78,34 @@
+ }
+ }
+
+- private fun validatedPayload(executionRequest: ExecutionServiceInput): SoftwareUpgradeModel {
++ private fun validatedPayload(executionRequest: ExecutionServiceInput): SoftwareManagementModel {
+ val properties = requestPayloadActionProperty(executionRequest.actionIdentifiers.actionName + "-properties")!!.get(0)
+ if(!properties?.get("pnf-id")?.textValue().isNullOrEmpty() &&
+ !properties?.get("target-software-version")?.textValue().isNullOrEmpty()) {
+- return SoftwareUpgradeModel(getDynamicProperties("resolution-key").asText(),
++ return SoftwareManagementModel(getDynamicProperties("resolution-key").asText(),
+ BluePrintDependencyService.restClientService(RESTCONF_SERVER_IDENTIFIER),
+- properties.get("pnf-id").textValue(), properties.get("target-software-version").textValue(),
++ "",
++ properties.get("pnf-id").textValue(),
++ properties.get("target-software-version").textValue(),
+ Action.getEnumFromActionName(executionRequest.actionIdentifiers.actionName))
+ }else{
+ throw BluePrintException("Invalid parameters sent to CDS. Request parameters pnf-id or target-software-version missing")
+ }
+ }
+
+- private suspend fun processPreCheck(model: SoftwareUpgradeModel) {
++ private suspend fun processPreCheck(model: SoftwareManagementModel) {
+ log.debug("In PNF SW upgrade : processPreCheck")
+ //Log the current configuration for the subtree
+ val payloadObject = getCurrentConfig(model)
+- log.debug("Current sw version on pnf : ${payloadObject.get("software-upgrade")?.get("upgrade-package")?.get(0)?.get("software-version")?.asText()}")
++ log.debug("Current sw version on pnf : ${payloadObject.get("software-management")?.get("pnf-software-package")?.get(0)?.get("software-version")?.asText()}")
+ log.info("PNF is Healthy!")
+ }
+
+- private suspend fun processDownloadNESw(model: SoftwareUpgradeModel) {
++ private suspend fun processDownloadNESw(model: SoftwareManagementModel) {
+ log.debug("In PNF SW upgrade : processDownloadNESw")
+- //Check if there is existing config for the targeted software version
+
+- var downloadConfigPayload: String
+- if (checkIfSwReadyToPerformAction(Action.PRE_CHECK, model)) {
+- downloadConfigPayload = contentFromResolvedArtifactNB("configure")
+- downloadConfigPayload =downloadConfigPayload.replace("%id%", model.yangId)
+- }
+- else {
+- downloadConfigPayload = contentFromResolvedArtifactNB("download-ne-sw")
+- model.yangId=model.targetSwVersion
+- }
++ var downloadConfigPayload: String = contentFromResolvedArtifactNB("download-ne-sw")
++ model.yangId = model.neIdentifier
+ downloadConfigPayload = downloadConfigPayload.replace("%actionName%", Action.DOWNLOAD_NE_SW.name)
+ log.info("Config Payload to start download : $downloadConfigPayload")
+
+@@ -115,14 +117,15 @@
+ checkExecution(model)
+ }
+
+- private suspend fun processActivateNESw(model: SoftwareUpgradeModel) {
++ private suspend fun processActivateNESw(model: SoftwareManagementModel) {
+ log.debug("In PNF SW upgrade : processActivateNESw")
++
+ //Check if the software is downloaded and ready to be activated
+ if (checkIfSwReadyToPerformAction(Action.DOWNLOAD_NE_SW, model)) {
+- var activateConfigPayload: String = contentFromResolvedArtifactNB("configure")
++ var activateConfigPayload: String = contentFromResolvedArtifactNB("activate-ne-sw")
+ activateConfigPayload = activateConfigPayload.replace("%actionName%", Action.ACTIVATE_NE_SW.name)
+- activateConfigPayload = activateConfigPayload.replace("%id%", model.yangId)
+ log.info("Config Payload to start activate : $activateConfigPayload")
++
+ //Apply configlet
+ restconfApplyDeviceConfig(model.client, model.deviceId, CONFIGLET_RESOURCE_PATH, activateConfigPayload,
+ mutableMapOf("Content-Type" to "application/yang.patch+json"))
+@@ -134,7 +137,7 @@
+ }
+ }
+
+- private suspend fun processPostCheck(model: SoftwareUpgradeModel) {
++ private suspend fun processPostCheck(model: SoftwareManagementModel) {
+ log.info("In PNF SW upgrade : processPostCheck")
+ //Log the current configuration for the subtree
+ if (checkIfSwReadyToPerformAction(Action.POST_CHECK, model)) {
+@@ -142,35 +145,36 @@
+ }
+ }
+
+- private fun processCancel(model :SoftwareUpgradeModel) {
++ private fun processCancel(model :SoftwareManagementModel) {
+ //This is for future implementation of cancel step during software upgrade
+ log.info("In PNF SW upgrade : processCancel")
+ }
+
+- private suspend fun getCurrentConfig(model: SoftwareUpgradeModel) : ObjectNode{
++ private suspend fun getCurrentConfig(model: SoftwareManagementModel) : ObjectNode{
+ val currentConfig: BlueprintWebClientService.WebClientResponse<String> = restconfDeviceConfig(model.client, model.deviceId, CONFIGLET_RESOURCE_PATH)
+ return JacksonUtils.jsonNode(currentConfig.body) as ObjectNode
+ }
+- private suspend fun checkExecution(model: SoftwareUpgradeModel) {
++
++ private suspend fun checkExecution(model: SoftwareManagementModel) {
+ val checkExecutionBlock: suspend (Int) -> String = {
+ val result = restconfDeviceConfig(model.client, model.deviceId, TARGET_SOFTWARE_PATH.plus(model.yangId))
+ if (result.body.contains(model.action.completionStatus)) {
+ log.info("${model.action.name} is complete")
+ result.body
+ } else {
+- throw BluePrintRetryException("Waiting for device(${model.deviceId}) to activate sw version ${model.targetSwVersion}")
++ throw BluePrintRetryException("Waiting for device(${model.deviceId}) to complete ${model.action.name}")
+ }
+ }
+ model.client.retry<String>(10, 0, 1000, checkExecutionBlock)
+
+ }
+
+- private suspend fun checkIfSwReadyToPerformAction(action : Action, model: SoftwareUpgradeModel): Boolean {
++ private suspend fun checkIfSwReadyToPerformAction(action : Action, model: SoftwareManagementModel): Boolean {
+ val configBody = getCurrentConfig(model)
+- configBody.get("software-upgrade")?.get("upgrade-package")?.iterator()?.forEach { item ->
++ configBody.get("software-management")?.get("pnf-software-package")?.iterator()?.forEach { item ->
+ if (model.targetSwVersion == item.get("software-version")?.asText() &&
+ action.completionStatus == item?.get("current-status")?.asText()) {
+- model.yangId= item.get("id").textValue()
++ model.yangId= item.get("neIdentifier").textValue()
+ return true
+ }
+ }
+@@ -201,5 +205,12 @@
+ }
+ }
+
+-data class SoftwareUpgradeModel(val resolutionKey: String, val client: BlueprintWebClientService, val deviceId: String,
+- val targetSwVersion: String, val action: Action, var yangId: String = "")
+\ No newline at end of file
++data class SoftwareManagementModel(
++ val resolutionKey: String,
++ val client: BlueprintWebClientService,
++ var deviceId: String,
++ val neIdentifier: String,
++ val targetSwVersion: String,
++ val action: Action,
++ var yangId: String = ""
++)
+\ No newline at end of file
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/TOSCA.meta.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/TOSCA.meta.patch
new file mode 100644
index 000000000..29ce13c10
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/TOSCA.meta.patch
@@ -0,0 +1,13 @@
+--- PNF_CDS_RESTCONF/TOSCA-Metadata/TOSCA.meta 2020-03-12 12:13:38.654109400 +0800
++++ PNF_SW_UPGRADE_WITH_EM/TOSCA-Metadata/TOSCA.meta 2020-03-13 00:05:30.186335900 +0800
+@@ -1,7 +1,7 @@
+ TOSCA-Meta-File-Version: 1.0.0
+ CSAR-Version: 1.0
+ Created-By: Raj Gumma <raj.gumma@est.tech>
+-Entry-Definitions: Definitions/PNF_CDS_RESTCONF.json
+-Template-Name: PNF_CDS_RESTCONF
++Entry-Definitions: Definitions/PNF_SW_UPGRADE_WITH_EM.json
++Template-Name: PNF_SW_UPGRADE_WITH_EM
+ Template-Version: 1.0.0
+-Template-Tags: PNF_CDS_RESTCONF
++Template-Tags: PNF_SW_UPGRADE_WITH_EM
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-software-upgrade-mapping.json.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-software-upgrade-mapping.json.patch
new file mode 100644
index 000000000..74c5d184e
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-software-upgrade-mapping.json.patch
@@ -0,0 +1,17 @@
+--- PNF_CDS_RESTCONF/Definitions/pnf-software-upgrade-mapping.json 2020-03-12 12:13:38.642390600 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Definitions/pnf-software-upgrade-mapping.json 2020-03-12 23:59:44.111140600 +0800
+@@ -22,12 +22,12 @@
+ ]
+ },
+ {
+- "name": "pnf-ipv4-address",
++ "name": "ems-ipv4-address",
+ "input-param": false,
+ "property": {
+ "type": "string"
+ },
+- "dictionary-name": "pnf-ipaddress-aai",
++ "dictionary-name": "ems-ipaddress-aai",
+ "dictionary-source": "aai-data",
+ "dependencies": [
+ "pnf-id"
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-activate-ne-sw-template.vtl.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-activate-ne-sw-template.vtl.patch
new file mode 100644
index 000000000..7d8d7efe7
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-activate-ne-sw-template.vtl.patch
@@ -0,0 +1,19 @@
+--- PNF_CDS_RESTCONF/Templates/pnf-swug-config-template.vtl 2020-03-12 19:06:30.108210900 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Templates/pnf-swug-activate-ne-sw-template.vtl 2020-03-12 23:29:51.565242200 +0800
+@@ -7,11 +7,12 @@
+ "operation": "merge",
+ "target": "/",
+ "value": {
+- "software-upgrade": {
+- "upgrade-package": [
++ "software-management": {
++ "pnf-software-package": [
+ {
+- "id": "%id%",
+- "action": "%actionName%"
++ "neIdentifier": "${pnf-id}",
++ "action": "%actionName%",
++ "swVersionToBeActivated": "${target-software-version}"
+ }
+ ]
+ }
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-download-ne-sw-template.vtl.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-download-ne-sw-template.vtl.patch
new file mode 100644
index 000000000..f74ad838a
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/pnf-swug-download-ne-sw-template.vtl.patch
@@ -0,0 +1,34 @@
+--- PNF_CDS_RESTCONF/Templates/pnf-swug-download-ne-sw-template.vtl 2020-03-12 12:13:38.660945300 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Templates/pnf-swug-download-ne-sw-template.vtl 2020-03-12 23:12:49.980281200 +0800
+@@ -7,17 +7,21 @@
+ "operation": "merge",
+ "target": "/",
+ "value": {
+- "software-upgrade": {
+- "upgrade-package": [
++ "software-management": {
++ "pnf-software-package": [
+ {
+- "id": "${target-software-version}",
+- "current-status": "INITIALIZED",
+- "action": "%actionName%",
+- "user-label": "trial software update",
+- "uri": "sftp://127.0.0.1/test_software_2.img",
+- "software-version": "${target-software-version}",
+- "user": "test_user",
+- "password": "test_password"
++ "neIdentifier": "${pnf-id}",
++ "current-status": "INITIALIZED",
++ "action": "%actionName%",
++ "software-version": "${target-software-version}",
++ "swToBeDownloaded": [
++ {
++ "swLocation": "http://192.168.35.96:10080/ran_du_pkg1-v2.zip",
++ "swFileSize": "12345678",
++ "swFileCompression": "ZIP",
++ "swFileFormat": "binary"
++ }
++ ]
+ }
+ ]
+ }
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/resources_definition_types.json.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/resources_definition_types.json.patch
new file mode 100644
index 000000000..74f3fb0fb
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/resources_definition_types.json.patch
@@ -0,0 +1,14 @@
+--- PNF_CDS_RESTCONF/Definitions/resources_definition_types.json 2020-03-12 12:13:38.643367200 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Definitions/resources_definition_types.json 2020-03-12 23:55:20.636531200 +0800
+@@ -14,9 +14,9 @@
+ }
+ }
+ },
+- "pnf-ipaddress-aai" : {
++ "ems-ipaddress-aai" : {
+ "tags" : "aai-get",
+- "name" : "pnf-ipaddress-aai",
++ "name" : "ems-ipaddress-aai",
+ "property" : {
+ "description" : "primary aai data to get resource",
+ "type" : "string"
diff --git a/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/restconf-mount-template.vtl.patch b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/restconf-mount-template.vtl.patch
new file mode 100644
index 000000000..b5a9c5bb2
--- /dev/null
+++ b/test/mocks/emssimulator/pnf-sw-upgrade-cba-builder/patches/restconf-mount-template.vtl.patch
@@ -0,0 +1,16 @@
+--- PNF_CDS_RESTCONF/Templates/restconf-mount-template.vtl 2020-03-12 12:13:38.664851500 +0800
++++ PNF_SW_UPGRADE_WITH_EM/Templates/restconf-mount-template.vtl 2020-03-12 23:36:52.209773400 +0800
+@@ -1,11 +1,11 @@
+ {
+ "node": [
+ {
+- "node-id": "${pnf-id}",
++ "node-id": "%ems-id%",
+ "netconf-node-topology:protocol": {
+ "name": "TLS"
+ },
+- "netconf-node-topology:host": "${pnf-ipv4-address}",
++ "netconf-node-topology:host": "${ems-ipv4-address}",
+ "netconf-node-topology:key-based": {
+ "username": "netconf",
+ "key-id": "ODL_private_key_0"
diff --git a/test/mocks/emssimulator/swm-netconf/docker-compose.yml b/test/mocks/emssimulator/swm-netconf/docker-compose.yml
new file mode 100644
index 000000000..b72668566
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/docker-compose.yml
@@ -0,0 +1,13 @@
+version: '3'
+
+services:
+ ems-netconf-swm:
+ image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.2
+ #image: netconf-pnp-simulator:latest
+ container_name: ems-netconf-swm
+ restart: always
+ ports:
+ - "830:830"
+ - "6513:6513"
+ volumes:
+ - ./pnf-swm:/config/modules/pnf-swm/
diff --git a/test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE b/test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE
new file mode 100644
index 000000000..3eface2c7
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE
@@ -0,0 +1,13 @@
+Copyright (C) 2019 Nordix Foundation
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProvider.java b/test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE-2
index ea87ae6b3..24d86c289 100644
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProvider.java
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/LICENSE-2
@@ -1,8 +1,6 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
+/*-
* ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,12 +13,4 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
- * ============LICENSE_END=========================================================
*/
-
-package org.onap.pnfsimulator.simulator;
-
-@FunctionalInterface
-public interface IncrementProvider {
- int getAndIncrement(String id);
-}
diff --git a/test/mocks/emssimulator/swm-netconf/pnf-swm/README b/test/mocks/emssimulator/swm-netconf/pnf-swm/README
new file mode 100644
index 000000000..303511647
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/README
@@ -0,0 +1,9 @@
+1. Introduction
+ The directory /pnf-swm shall be located in ems-netconf-swm container for integrtion test for PNF software upgrade with EM with Netconf.
+
+2. Directory structure
+ a. model.yang: YANG model for donwnloadNESw and activateNESw operations of software management.
+ b. data.xml: Initial data or device configuration info stored in ems-netconf-swm container.
+ c. subscribe.py: Inherited from /integration/test/mocks/netconf-pnp-simulator/modules/pnf-sw/upgrade/ and mofication for the model.yang
+ d. LICENSE: original license from /integration/test/mocks/netconf-pnp-simulator/modules/pnf-sw/upgrade/
+ e. LICENSE-2: new license info for enhancement
diff --git a/test/mocks/emssimulator/swm-netconf/pnf-swm/data.xml b/test/mocks/emssimulator/swm-netconf/pnf-swm/data.xml
new file mode 100644
index 000000000..3ef512550
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/data.xml
@@ -0,0 +1,13 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<software-management xmlns="http://onap.org/pnf-swm">
+ <pnf-software-package>
+ <neIdentifier>5gDU0001</neIdentifier>
+ <current-status>CREATED</current-status>
+ <software-version>v1</software-version>
+ </pnf-software-package>
+ <pnf-software-package>
+ <neIdentifier>5gDU0002</neIdentifier>
+ <current-status>CREATED</current-status>
+ <software-version>v1</software-version>
+ </pnf-software-package>
+</software-management>
diff --git a/test/mocks/emssimulator/swm-netconf/pnf-swm/model.yang b/test/mocks/emssimulator/swm-netconf/pnf-swm/model.yang
new file mode 100644
index 000000000..de1daefac
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/model.yang
@@ -0,0 +1,86 @@
+module pnf-swm {
+ namespace "http://onap.org/pnf-swm";
+ prefix swm;
+
+ import ietf-yang-types {
+ prefix yang;
+ }
+
+ revision "2020-03-10" {
+ description
+ "initial version, Ref. 3GPP 32.532-f00";
+ }
+
+ container software-management {
+ config true;
+ list pnf-software-package {
+ key "neIdentifier";
+ leaf neIdentifier {
+ type string;
+ description
+ "NE identifier.";
+ }
+ leaf current-status {
+ type enumeration {
+ enum CREATED;
+ enum INITIALIZED;
+ enum DOWNLOAD_IN_PROGRESS;
+ enum DOWNLOAD_COMPLETED;
+ enum ACTIVATION_IN_PROGRESS;
+ enum ACTIVATION_COMPLETED;
+ }
+ description
+ "List of possible states of the upgrade";
+ }
+ leaf state-change-time {
+ mandatory false;
+ description
+ "Date and time of the last state change.";
+ type yang:date-and-time;
+ }
+ leaf action {
+ mandatory false;
+ type enumeration {
+ enum NONE;
+ enum PRE_CHECK;
+ enum DOWNLOAD_NE_SW;
+ enum ACTIVATE_NE_SW;
+ enum POST_CHECK;
+ enum CANCEL;
+ }
+ description
+ "List of possible actions for the upgrade";
+ }
+ leaf software-version {
+ type string;
+ description
+ "Possible name or release version of the UP";
+ }
+ list swToBeDownloaded {
+ key "swLocation";
+ leaf swLocation {
+ type string;
+ description
+ "Software location to be downloaded.";
+ }
+ leaf swFileSize {
+ type uint64;
+ description "Software file size.";
+ }
+ leaf swFileCompression {
+ type string;
+ description "Software file compression algorithm.";
+ }
+ leaf swFileFormat {
+ type string;
+ description "Software file format.";
+ }
+ }
+ leaf swVersionToBeActivated {
+ type string;
+ description
+ "Software version to be activated.";
+ }
+ }
+ }
+}
diff --git a/test/mocks/emssimulator/swm-netconf/pnf-swm/subscriber.py b/test/mocks/emssimulator/swm-netconf/pnf-swm/subscriber.py
new file mode 100755
index 000000000..56d061906
--- /dev/null
+++ b/test/mocks/emssimulator/swm-netconf/pnf-swm/subscriber.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Modification Copyright 2020 Huawei Technologies Co., Ltd
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+__author__ = "Eliezio Oliveira <eliezio.oliveira@est.tech>"
+__copyright__ = "Copyright (C) 2020 Nordix Foundation, and Huawei"
+__license__ = "Apache 2.0"
+
+import time
+from concurrent.futures import ThreadPoolExecutor
+from threading import Timer
+
+import sysrepo as sr
+
+YANG_MODULE_NAME = 'pnf-swm'
+
+#
+# ----- BEGIN Finite State Machine definitions -----
+#
+
+# Actions
+ACT_PRE_CHECK = 'PRE_CHECK'
+ACT_DOWNLOAD_NE_SW = 'DOWNLOAD_NE_SW'
+ACT_ACTIVATE_NE_SW = 'ACTIVATE_NE_SW'
+ACT_CANCEL = 'CANCEL'
+
+# States
+ST_CREATED = 'CREATED'
+ST_INITIALIZED = 'INITIALIZED'
+ST_DOWNLOAD_IN_PROGRESS = 'DOWNLOAD_IN_PROGRESS'
+ST_DOWNLOAD_COMPLETED = 'DOWNLOAD_COMPLETED'
+ST_ACTIVATION_IN_PROGRESS = 'ACTIVATION_IN_PROGRESS'
+ST_ACTIVATION_COMPLETED = 'ACTIVATION_COMPLETED'
+
+# Timeout used for timed transitions
+TO_DOWNLOAD = 7
+TO_ACTIVATION = 7
+
+
+def timestamper(sess, key_id):
+ xpath = xpath_of(key_id, 'state-change-time')
+ now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+ state = sr.Val(now, sr.SR_STRING_T)
+ sess.set_item(xpath, state)
+
+
+def xpath_of(key_id, leaf_id):
+ selector = "[neIdentifier='{0}']".format(key_id) if key_id else ''
+ return "/%s:software-management/pnf-software-package%s/%s" % (YANG_MODULE_NAME, selector, leaf_id)
+
+
+"""
+The finite state machine (FSM) is represented as a dictionary where the current state is the key, and its value is
+an object (also represented as a dictionary) with the following optional attributes:
+
+- on_enter: a function called when FSM enters this state;
+- transitions: a dictionary mapping every acceptable action to the target state;
+- timed_transition: a pair for a timed transition that will automatically occur after a given interval.
+"""
+STATE_MACHINE = {
+ ST_CREATED: {
+ 'transitions': {ACT_PRE_CHECK: ST_INITIALIZED}
+ },
+ ST_INITIALIZED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_DOWNLOAD_NE_SW: ST_DOWNLOAD_IN_PROGRESS}
+ },
+ ST_DOWNLOAD_IN_PROGRESS: {
+ 'on_enter': timestamper,
+ 'timed_transition': (TO_DOWNLOAD, ST_DOWNLOAD_COMPLETED),
+ 'transitions': {ACT_CANCEL: ST_INITIALIZED}
+ },
+ ST_DOWNLOAD_COMPLETED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_ACTIVATE_NE_SW: ST_ACTIVATION_IN_PROGRESS}
+ },
+ ST_ACTIVATION_IN_PROGRESS: {
+ 'on_enter': timestamper,
+ 'timed_transition': (TO_ACTIVATION, ST_ACTIVATION_COMPLETED),
+ 'transitions': {ACT_CANCEL: ST_DOWNLOAD_COMPLETED}
+ },
+ ST_ACTIVATION_COMPLETED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_ACTIVATE_NE_SW: ST_ACTIVATION_IN_PROGRESS}
+ }
+}
+
+#
+# ----- END Finite State Machine definitions -----
+#
+
+
+def main():
+ try:
+ conn = sr.Connection(YANG_MODULE_NAME)
+ sess = sr.Session(conn)
+ subscribe = sr.Subscribe(sess)
+
+ subscribe.module_change_subscribe(YANG_MODULE_NAME, module_change_cb, conn)
+
+ try:
+ print_current_config(sess, YANG_MODULE_NAME)
+ except Exception as e:
+ print(e)
+
+ sr.global_loop()
+
+ print("Application exit requested, exiting.")
+ except Exception as e:
+ print(e)
+
+
+# Function to be called for subscribed client of given session whenever configuration changes.
+def module_change_cb(sess, module_name, event, private_ctx):
+ try:
+ conn = private_ctx
+ change_path = xpath_of(None, 'action')
+ it = sess.get_changes_iter(change_path)
+ while True:
+ change = sess.get_change_next(it)
+ if change is None:
+ break
+ handle_change(conn, change.oper(), change.old_val(), change.new_val())
+ except Exception as e:
+ print(e)
+ return sr.SR_ERR_OK
+
+
+# Function to print current configuration state.
+# It does so by loading all the items of a session and printing them out.
+def print_current_config(session, module_name):
+ select_xpath = "/" + module_name + ":*//*"
+
+ values = session.get_items(select_xpath)
+
+ if values is not None:
+ print("========== BEGIN CONFIG ==========")
+ for i in range(values.val_cnt()):
+ print(values.val(i).to_string(), end='')
+ print("=========== END CONFIG ===========")
+
+
+def handle_change(conn, op, old_val, new_val):
+ """
+ Handle individual changes on the model.
+ """
+ if op == sr.SR_OP_CREATED:
+ print("CREATED: %s" % new_val.to_string())
+ xpath = new_val.xpath()
+ last_node = xpath_ctx.last_node(xpath)
+ # Warning: 'key_value' modifies 'xpath'!
+ key_id = xpath_ctx.key_value(xpath, 'pnf-software-package', 'neIdentifier')
+ if key_id and last_node == 'action':
+ executor.submit(execute_action, conn, key_id, new_val.data().get_enum())
+ elif op == sr.SR_OP_DELETED:
+ print("DELETED: %s" % old_val.to_string())
+ elif op == sr.SR_OP_MODIFIED:
+ print("MODIFIED: %s to %s" % (old_val.to_string(), new_val.to_string()))
+ elif op == sr.SR_OP_MOVED:
+ print("MOVED: %s after %s" % (new_val.xpath(), old_val.xpath()))
+
+
+def execute_action(conn, key_id, action):
+ sess = sr.Session(conn)
+ try:
+ cur_state = sess.get_item(xpath_of(key_id, 'current-status')).data().get_enum()
+ next_state_str = STATE_MACHINE[cur_state]['transitions'].get(action, None)
+ if next_state_str:
+ handle_set_state(conn, key_id, next_state_str)
+ sess.delete_item(xpath_of(key_id, 'action'))
+ sess.commit()
+ finally:
+ sess.session_stop()
+
+
+def handle_set_state(conn, key_id, state_str):
+ sess = sr.Session(conn)
+ try:
+ state = sr.Val(state_str, sr.SR_ENUM_T)
+ sess.set_item(xpath_of(key_id, 'current-status'), state)
+ on_enter = STATE_MACHINE[state_str].get('on_enter', None)
+ if on_enter:
+ # noinspection PyCallingNonCallable
+ on_enter(sess, key_id)
+ sess.commit()
+ delay, next_state_str = STATE_MACHINE[state_str].get('timed_transition', [0, None])
+ if delay:
+ Timer(delay, handle_set_state, (conn, key_id, next_state_str)).start()
+ finally:
+ sess.session_stop()
+
+
+if __name__ == '__main__':
+ xpath_ctx = sr.Xpath_Ctx()
+ executor = ThreadPoolExecutor(max_workers=2)
+ main()
diff --git a/test/mocks/emssimulator/swm/activateNESw b/test/mocks/emssimulator/swm/activateNESw
index 67d233e24..3531f1634 100755
--- a/test/mocks/emssimulator/swm/activateNESw
+++ b/test/mocks/emssimulator/swm/activateNESw
@@ -1,117 +1,28 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
import argparse
import json
-import os
-import shutil
-import random
-import time
import conf
-import ems_util
-
-
-def do_activate_sw(sw_version_to_be_activated, ne_info):
- """
- return err, reason
- """
-
- installed_sw = ne_info.get("installedSw", {})
- if sw_version_to_be_activated in installed_sw:
- target_sw_version = installed_sw[sw_version_to_be_activated]["version"]
- else:
- target_sw_version = sw_version_to_be_activated
-
- sw_install_dir_in_ne = conf.PNF_SIMULATORS_DIR + '/' + ne_info['omIP'] + conf.PNF_SW_INSTALL_DIR
- target_sw_dir = sw_install_dir_in_ne + '/' + target_sw_version
- if not os.path.isdir(target_sw_dir):
- return True, "SW to be activated does not install"
-
- if "targetSwVersion" in ne_info:
- if ne_info["targetSwVersion"] != target_sw_version:
- return True, "Conflicted targetVersion with to be activated %s" % target_sw_version
- del ne_info["targetSwVersion"]
-
- old_sw_version = ne_info.get("oldSwVersion", "")
-
- if target_sw_version != ne_info["currentSwVersion"]:
- ne_info["oldSwVersion"] = ne_info["currentSwVersion"]
- ne_info["currentSwVersion"] = target_sw_version
- ne_info["status"] = conf.STATUS_ACTIVATING
- ems_util.update_ne_info(ne_info)
-
- if target_sw_version != old_sw_version:
- old_sw_dir = sw_install_dir_in_ne + '/' + old_sw_version
- if old_sw_version and os.path.isdir(old_sw_dir):
- shutil.rmtree(old_sw_dir, ignore_errors=True)
-
- old_cwd = os.getcwd()
- os.chdir(sw_install_dir_in_ne)
- if os.path.islink(conf.CURRENT_VERSION_DIR):
- os.remove(conf.CURRENT_VERSION_DIR)
- os.symlink(target_sw_version, conf.CURRENT_VERSION_DIR)
- os.chdir(old_cwd)
-
- if "downloadedSwLocation" in ne_info:
- if os.path.isdir(ne_info["downloadedSwLocation"]):
- shutil.rmtree(ne_info["downloadedSwLocation"], ignore_errors=True)
- del ne_info["downloadedSwLocation"]
-
- return False, None
-
-
-def generate_notification(activate_process_id, activate_status, sw_version, failure_reason):
- notification = {
- "objectClass": "EMSClass",
- "objectInstance": "EMSInstance",
- "notificationId": random.randint(1, conf.MAX_INT),
- "eventTime": time.asctime(),
- "systemDN": "emssimulator",
- "notificationType": "notifyActivateNESwStatusChanged",
- "activateProcessId": activate_process_id,
- "activateOperationStatus": activate_status,
- "swVersion": sw_version
- }
-
- if failure_reason:
- notification["failureReason"] = failure_reason
-
- return notification
-
-
-def activate_ne_sw(sw_version_to_be_activated, ne_id):
- ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
-
- activate_process_id = random.randint(1, conf.MAX_INT)
- result = conf.REQ_SUCCESS
- ret_value = {
- "activateProcessId": activate_process_id,
- "result": result
- }
-
- if not ne_info:
- ret_value["result"] = conf.REQ_FAILURE
- ret_value["reason"] = "Can not find NE %s" % ne_id
- return ret_value
-
- err, reason = do_activate_sw(sw_version_to_be_activated, ne_info)
-
- if not err:
- ne_info["status"] = conf.STATUS_ACTIVATED
- ems_util.update_ne_info(ne_info)
- activate_status = "NE_SWACTIVATION_SUCCESSFUL"
- else:
- ret_value["result"] = conf.REQ_FAILURE
- ret_value["reason"] = reason
-
- activate_status = "NE_SWACTIVATION_FAILED"
-
- notification = generate_notification(activate_process_id, activate_status, sw_version_to_be_activated, reason)
- ems_util.send_notification(notification, activate_process_id)
-
- # for automated software management, there is no listOfStepNumbersAndDurations
- return ret_value
+import activate_n_e_sw
def main():
@@ -122,8 +33,8 @@ def main():
args = parser.parse_args()
- ret_value = activate_ne_sw(args.swVersionToBeActivated, args.neIdentifier)
- print json.dumps(ret_value)
+ _, ret_value = activate_n_e_sw.activate(args.swVersionToBeActivated, args.neIdentifier)
+ print(json.dumps(ret_value))
if ret_value["result"] == conf.REQ_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
diff --git a/test/mocks/emssimulator/swm/activate_n_e_sw.py b/test/mocks/emssimulator/swm/activate_n_e_sw.py
new file mode 100644
index 000000000..26214fa53
--- /dev/null
+++ b/test/mocks/emssimulator/swm/activate_n_e_sw.py
@@ -0,0 +1,141 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+import shutil
+import random
+import time
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "activateNESw"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def do_activate(sw_version_to_be_activated, ne_info):
+ """
+ return err, reason
+ """
+
+ logger.info("swVersionToBeActivated: %s" % sw_version_to_be_activated)
+
+ installed_sw = ne_info.get("installedSw", {})
+ if sw_version_to_be_activated in installed_sw:
+ target_sw_version = installed_sw[sw_version_to_be_activated]["version"]
+ else:
+ target_sw_version = sw_version_to_be_activated
+
+ sw_install_dir_in_ne = ems_util.get_install_dir(ne_info['omIP'])
+ logger.info("SW has been installed at %s" % sw_install_dir_in_ne)
+
+ if "targetSwVersion" in ne_info:
+ if ne_info["targetSwVersion"] != target_sw_version:
+ msg = "Conflicted targetVersion with to be activated %s" % target_sw_version
+ logger.error(msg)
+ return True, msg
+ del ne_info["targetSwVersion"]
+
+ old_sw_version = ne_info.get("oldSwVersion", "")
+
+ if target_sw_version != ne_info["currentSwVersion"]:
+ ne_info["oldSwVersion"] = ne_info["currentSwVersion"]
+ ne_info["currentSwVersion"] = target_sw_version
+ ne_info["status"] = conf.STATUS_ACTIVATING
+ ems_util.update_ne_info(ne_info)
+
+ if target_sw_version != old_sw_version:
+ old_sw_dir = os.path.join(sw_install_dir_in_ne, old_sw_version)
+ if old_sw_version and os.path.isdir(old_sw_dir):
+ shutil.rmtree(old_sw_dir, ignore_errors=True)
+
+ old_cwd = os.getcwd()
+ os.chdir(sw_install_dir_in_ne)
+ if os.path.islink(conf.CURRENT_VERSION_DIR):
+ os.remove(conf.CURRENT_VERSION_DIR)
+ os.symlink(target_sw_version, conf.CURRENT_VERSION_DIR)
+ os.chdir(old_cwd)
+
+ if "downloadedSwLocation" in ne_info:
+ if os.path.isdir(ne_info["downloadedSwLocation"]):
+ shutil.rmtree(ne_info["downloadedSwLocation"], ignore_errors=True)
+ del ne_info["downloadedSwLocation"]
+
+ return False, None
+
+
+def generate_notification(activate_process_id, activate_status, sw_version, failure_reason):
+ notification = {
+ "objectClass": conf.OBJECT_CLASS,
+ "objectInstance": conf.OBJECT_INSTANCE,
+ "notificationId": random.randint(1, conf.MAX_INT),
+ "eventTime": time.asctime(),
+ "systemDN": conf.SYSTEM_DN,
+ "notificationType": "notifyActivateNESwStatusChanged",
+ "activateProcessId": activate_process_id,
+ "activateOperationStatus": activate_status,
+ "swVersion": sw_version
+ }
+
+ if failure_reason:
+ logger.error(failure_reason)
+ notification["failureReason"] = failure_reason
+
+ return notification
+
+
+def activate(sw_version_to_be_activated, ne_id):
+ ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
+
+ activate_process_id = random.randint(1, conf.MAX_INT)
+ result = conf.REQ_SUCCESS
+ ret_value = {
+ "activateProcessId": activate_process_id,
+ "result": result
+ }
+
+ if not ne_info:
+ ret_value["result"] = conf.REQ_FAILURE
+ ret_value["reason"] = "Can not find NE %s" % ne_id
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ err, reason = do_activate(sw_version_to_be_activated, ne_info)
+
+ if not err:
+ ne_info["status"] = conf.STATUS_ACTIVATED
+ ems_util.update_ne_info(ne_info)
+
+ logger.info("Activate SW success")
+ activate_status = "NE_SWACTIVATION_SUCCESSFUL"
+ else:
+ ret_value["result"] = conf.REQ_FAILURE
+ ret_value["reason"] = reason
+
+ logger.error("Activate SW failure, reason: %s" % ret_value["reason"])
+ activate_status = "NE_SWACTIVATION_FAILED"
+
+ notification = generate_notification(activate_process_id, activate_status, sw_version_to_be_activated, reason)
+ ems_util.send_notification(notification, activate_process_id)
+
+ # for automated software management, there is no listOfStepNumbersAndDurations
+ return notification, ret_value
diff --git a/test/mocks/emssimulator/swm/conf.py b/test/mocks/emssimulator/swm/conf.py
index d7ba5b4d5..5aac383d9 100644
--- a/test/mocks/emssimulator/swm/conf.py
+++ b/test/mocks/emssimulator/swm/conf.py
@@ -1,22 +1,46 @@
-#!/usr/bin/python
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
+import os
SWM_DIR = sys.path[0]
-NE_INFO_TABLE = SWM_DIR + "/ems_db/ne_info_table.json"
-SW_SERVER_SIMULATOR = SWM_DIR + "/sw_server_simulator"
-PNF_SIMULATORS_DIR = SWM_DIR + "/pnf_simulators"
-PNF_SW_DOWNLOAD_DIR = "/opt/download"
-PNF_SW_INSTALL_DIR = "/opt/install"
-PNF_SW_FALLBACK_DIR = "/opt/fallback"
+LOGGER_FORMAT = "[%(asctime)-15s] %(levelname)s [%(name)s]: %(message)s"
+LOGGER_FILE_DIR = os.path.join(SWM_DIR, "log")
+
+NE_INFO_TABLE = os.path.join(SWM_DIR, "ems_db", "ne_info_table.json")
+SW_SERVER_SIMULATOR = os.path.join(SWM_DIR, "sw_server_simulator")
+PNF_SIMULATORS_DIR = os.path.join(SWM_DIR, "pnf_simulators")
+COMMON_PATH = "opt"
+PNF_SW_DOWNLOAD_DIR = "download"
+PNF_SW_INSTALL_DIR = "install"
MANIFEST_FILE = "manifest.json"
-INSTALLED_SW = "installed_sw.json"
+INSTALLED_SW_FILE = "installed_sw.json"
CURRENT_VERSION_DIR = "current"
NOTIFICATION_DIR = "/tmp"
MAX_INT = (2**32) - 1
+OBJECT_CLASS = "NRCellDU"
+OBJECT_INSTANCE = "DC=com, SubNetwork=1, ManagedElement=123, GNBDUFunction=1, NRCellDU=1"
+SYSTEM_DN = "DC=com, SubNetwork=1, ManagedElement=123"
+
STATUS_DOWNLOADING = "Downloading"
STATUS_INSTALLING = "Installing"
STATUS_ACTIVATING = "Activating"
@@ -34,4 +58,3 @@ RET_CODE_FAILURE = 1
RESULT_SUCCESS = "Success"
RESULT_FAILURE = "Failure"
RESULT_PARTLY = "Partly successful"
-
diff --git a/test/mocks/emssimulator/swm/downloadNESw b/test/mocks/emssimulator/swm/downloadNESw
index 06a8d6b37..90794488d 100755
--- a/test/mocks/emssimulator/swm/downloadNESw
+++ b/test/mocks/emssimulator/swm/downloadNESw
@@ -1,150 +1,70 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
import argparse
import json
-import os
-import shutil
-import random
-import time
-import tempfile
import conf
-import ems_util
+import download_n_e_sw
+import install_n_e_sw
-def do_download_sw(sw_info, sw_download_dir):
- """
- return err, reason, file_location_in_nes
- """
-
- sw_location = sw_info['swLocation']
-
- # Use copy file from SW_SERVER_SIMULATOR to simulate download file
- sw_file_name = sw_location.split('/')[-1]
-
- file_location_in_server = conf.SW_SERVER_SIMULATOR + '/' + sw_file_name
- file_location_in_ne = sw_download_dir + '/' + sw_file_name
-
- try:
- shutil.copy(file_location_in_server, sw_download_dir)
- except IOError as e:
- return True, "Download %s to %s error: %s" % (sw_file_name, sw_download_dir, str(e)), file_location_in_ne
-
- return False, None, file_location_in_ne
-
-
-def generate_notification(download_process_id, download_status, downloaded_ne_sw_info, failed_sw_info):
- notification = {
- "objectClass": "EMSClass",
- "objectInstance": "EMSInstance",
- "notificationId": random.randint(1, conf.MAX_INT),
- "eventTime": time.asctime(),
- "systemDN": "emssimulator",
- "notificationType": "notifyDownloadNESwStatusChanged",
- "downloadProcessId": download_process_id,
- "downloadOperationStatus": download_status
- }
+def main():
+ parser = argparse.ArgumentParser()
- if downloaded_ne_sw_info:
- notification["downloadedNESwInfo"] = downloaded_ne_sw_info
+ parser.add_argument("--swToBeDownloaded", help="The NE software to be downloaded", required=True)
+ parser.add_argument("--neIdentifier", help="The NE where the software can be downloaded", required=True)
- if failed_sw_info:
- notification["failedSwInfo"] = failed_sw_info
+ args = parser.parse_args()
- return notification
+ sw_to_be_downloaded = json.loads(args.swToBeDownloaded)
+ all_installed_ne_sw_list = []
+ all_failed_sw_info = []
-def download_ne_sw(sw_to_be_downloaded, ne_id):
- ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
+ download_notification, download_ret_value = download_n_e_sw.download(sw_to_be_downloaded, args.neIdentifier)
- download_process_id = random.randint(1, conf.MAX_INT)
- result = conf.REQ_SUCCESS
- ret_value = {
- "downloadProcessId": download_process_id,
- "result": result
- }
+ downloaded_ne_sw_list = download_notification.get("downloadedNESwInfo", [])
+ failed_downloaded_sw_list = download_notification.get("failedSwInfo", [])
+ all_failed_sw_info.extend(failed_downloaded_sw_list)
- if not ne_info:
- ret_value["result"] = conf.REQ_FAILURE
- ret_value["reason"] = "Can not find NE %s" % ne_id
- return ret_value
+ for downloaded_ne_sw in downloaded_ne_sw_list:
+ install_notification, _ = install_n_e_sw.install(downloaded_ne_sw, args.neIdentifier)
+ installed_ne_sw_list = install_notification.get("installedNESwInfo", [])
+ failed_installed_sw_list = install_notification.get("failedSwInfo", [])
- ne_info["status"] = conf.STATUS_DOWNLOADING
- ems_util.update_ne_info(ne_info)
+ all_installed_ne_sw_list.extend(installed_ne_sw_list)
+ all_failed_sw_info.extend(failed_installed_sw_list)
+ num_all_installed_ne_sw_list = len(all_installed_ne_sw_list)
num_sw_to_be_downloaded = len(sw_to_be_downloaded)
- downloaded_ne_sw_info = []
- failed_sw_info = []
-
- sw_download_parent_dir = conf.PNF_SIMULATORS_DIR + '/' + ne_info['omIP'] + conf.PNF_SW_DOWNLOAD_DIR
- sw_download_dir = ne_info.get("downloadedSwLocation", "")
- try:
- if not os.path.isdir(sw_download_parent_dir):
- os.makedirs(sw_download_parent_dir)
-
- if sw_download_dir and not os.path.isdir(sw_download_dir):
- os.makedirs(sw_download_dir)
- except OSError as e:
- ret_value["result"] = conf.REQ_FAILURE
- ret_value["reason"] = str(e)
- return ret_value
-
- if not sw_download_dir:
- sw_download_dir = tempfile.mkdtemp(dir=sw_download_parent_dir)
-
- for sw_info in sw_to_be_downloaded:
- err, reason, file_location = do_download_sw(sw_info, sw_download_dir)
- if not err:
- downloaded_ne_sw_info.append(file_location)
- else:
- result = conf.REQ_FAILURE
- failed_sw_entry = {
- "failedSw": file_location,
- "failureReason": reason
- }
- failed_sw_info.append(failed_sw_entry)
-
- num_downloaded_ne_sw = len(downloaded_ne_sw_info)
-
- if num_downloaded_ne_sw == num_sw_to_be_downloaded:
- download_status = "NE_SWDOWNLOAD_SUCCESSFUL"
- elif num_downloaded_ne_sw == 0:
- download_status = "NE_SWDOWNLOAD_FAILED"
+ if num_all_installed_ne_sw_list == num_sw_to_be_downloaded:
+ download_ret_value["result"] = conf.REQ_SUCCESS
else:
- download_status = "NE_SWDOWNLOAD_PARTIALLY_SUCCESSFUL"
-
- notification = generate_notification(download_process_id, download_status, downloaded_ne_sw_info, failed_sw_info)
- ems_util.send_notification(notification, download_process_id)
-
- if result == conf.REQ_SUCCESS:
- ne_info["downloadedSwLocation"] = sw_download_dir
- ems_util.update_ne_info(ne_info)
- else:
- shutil.rmtree(sw_download_dir, ignore_errors=True)
-
- ret_value["result"] = result
- ret_value["reason"] = json.dumps(failed_sw_info)
-
- # for automated software management, there is no listOfStepNumbersAndDurations
- return ret_value
-
-
-def main():
- parser = argparse.ArgumentParser()
-
- parser.add_argument("--swToBeDownloaded", help="The NE software to be downloaded", required=True)
- parser.add_argument("--neIdentifier", help="The NE where the software can be downloaded", required=True)
-
- args = parser.parse_args()
-
- sw_to_be_downloaded = json.loads(args.swToBeDownloaded)
+ download_ret_value["result"] = conf.REQ_FAILURE
+ download_ret_value["reason"] = "Failed downloaded or installed SW: %s" % json.dumps(all_failed_sw_info)
- ret_value = download_ne_sw(sw_to_be_downloaded, args.neIdentifier)
- print json.dumps(ret_value)
+ print(json.dumps(download_ret_value))
- if ret_value["result"] == conf.REQ_SUCCESS:
+ if download_ret_value["result"] == conf.REQ_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
else:
sys.exit(conf.RET_CODE_FAILURE)
diff --git a/test/mocks/emssimulator/swm/download_n_e_sw.py b/test/mocks/emssimulator/swm/download_n_e_sw.py
new file mode 100644
index 000000000..05ae4bb6a
--- /dev/null
+++ b/test/mocks/emssimulator/swm/download_n_e_sw.py
@@ -0,0 +1,169 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+import json
+import shutil
+import random
+import time
+import tempfile
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "downloadNESw"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def do_download(sw_info, sw_download_dir):
+ """
+ return err, reason, file_location_in_nes
+ """
+
+ sw_location = sw_info['swLocation']
+
+ logger.info("swLocationToBeDownloaded: %s" % sw_location)
+
+ # Use copy file from SW_SERVER_SIMULATOR to simulate download file
+ sw_file_name = sw_location.split('/')[-1]
+
+ file_location_in_server = os.path.join(conf.SW_SERVER_SIMULATOR, sw_file_name)
+ file_location_in_ne = os.path.join(sw_download_dir, sw_file_name)
+
+ try:
+ shutil.copy(file_location_in_server, sw_download_dir)
+ except IOError as e:
+ msg = "Download %s to %s error: %s" % (sw_file_name, sw_download_dir, str(e))
+ logger.error(msg)
+ return True, msg, file_location_in_ne
+
+ return False, None, file_location_in_ne
+
+
+def generate_notification(download_process_id, download_status, downloaded_ne_sw_info, failed_sw_info):
+ notification = {
+ "objectClass": conf.OBJECT_CLASS,
+ "objectInstance": conf.OBJECT_INSTANCE,
+ "notificationId": random.randint(1, conf.MAX_INT),
+ "eventTime": time.asctime(),
+ "systemDN": conf.SYSTEM_DN,
+ "notificationType": "notifyDownloadNESwStatusChanged",
+ "downloadProcessId": download_process_id,
+ "downloadOperationStatus": download_status
+ }
+
+ if downloaded_ne_sw_info:
+ notification["downloadedNESwInfo"] = downloaded_ne_sw_info
+
+ if failed_sw_info:
+ notification["failedSwInfo"] = failed_sw_info
+
+ return notification
+
+
+def download(sw_to_be_downloaded, ne_id):
+ ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
+
+ download_process_id = random.randint(1, conf.MAX_INT)
+ result = conf.REQ_SUCCESS
+ ret_value = {
+ "downloadProcessId": download_process_id,
+ "result": result
+ }
+
+ if not ne_info:
+ ret_value["result"] = conf.REQ_FAILURE
+ ret_value["reason"] = "Can not find NE %s" % ne_id
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ ne_info["status"] = conf.STATUS_DOWNLOADING
+ ems_util.update_ne_info(ne_info)
+
+ num_sw_to_be_downloaded = len(sw_to_be_downloaded)
+
+ downloaded_ne_sw_info = []
+ failed_sw_info = []
+
+ sw_download_parent_dir = ems_util.get_download_dir(ne_info['omIP'])
+ logger.info("SW will be downloaded to %s" % sw_download_parent_dir)
+
+ sw_download_dir = ne_info.get("downloadedSwLocation", "")
+ try:
+ if not os.path.isdir(sw_download_parent_dir):
+ os.makedirs(sw_download_parent_dir)
+
+ if sw_download_dir and not os.path.isdir(sw_download_dir):
+ os.makedirs(sw_download_dir)
+ except OSError as e:
+ ret_value["result"] = conf.REQ_FAILURE
+ ret_value["reason"] = str(e)
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ if not sw_download_dir:
+ sw_download_dir = tempfile.mkdtemp(dir=sw_download_parent_dir)
+
+ for sw_info in sw_to_be_downloaded:
+ err, reason, file_location = do_download(sw_info, sw_download_dir)
+ if not err:
+ logger.info("Downloaded SW file location: %s" % file_location)
+ downloaded_ne_sw_info.append(file_location)
+ else:
+ result = conf.REQ_FAILURE
+ failed_sw_entry = {
+ "failedSw": file_location,
+ "failureReason": reason
+ }
+
+ logger.error("Failed downloaded SW: %s" % str(failed_sw_entry))
+ failed_sw_info.append(failed_sw_entry)
+
+ num_downloaded_ne_sw = len(downloaded_ne_sw_info)
+
+ if num_downloaded_ne_sw == num_sw_to_be_downloaded:
+ download_status = "NE_SWDOWNLOAD_SUCCESSFUL"
+ elif num_downloaded_ne_sw == 0:
+ download_status = "NE_SWDOWNLOAD_FAILED"
+ else:
+ download_status = "NE_SWDOWNLOAD_PARTIALLY_SUCCESSFUL"
+ logger.info("Download SW status: %s" % download_status)
+
+ notification = generate_notification(download_process_id, download_status, downloaded_ne_sw_info, failed_sw_info)
+ ems_util.send_notification(notification, download_process_id)
+
+ if result == conf.REQ_SUCCESS:
+ ne_info["downloadedSwLocation"] = sw_download_dir
+ ems_util.update_ne_info(ne_info)
+
+ logger.info("Download SW success")
+ else:
+ shutil.rmtree(sw_download_dir, ignore_errors=True)
+
+ ret_value["result"] = result
+ ret_value["reason"] = json.dumps(failed_sw_info)
+
+ logger.info("Download SW failure, reason: %s" % ret_value["reason"])
+
+ # for automated software management, there is no listOfStepNumbersAndDurations
+ return notification, ret_value
diff --git a/test/mocks/emssimulator/swm/ems_db/ne_info_table.json b/test/mocks/emssimulator/swm/ems_db/ne_info_table.json
index 90aaeb255..5bf014f99 100644
--- a/test/mocks/emssimulator/swm/ems_db/ne_info_table.json
+++ b/test/mocks/emssimulator/swm/ems_db/ne_info_table.json
@@ -1,39 +1,39 @@
[
{
- "status": "Activated",
- "nEIdentification": "5gDU0001",
- "updateTime": "Tue Apr 23 13:08:43 2019",
- "omIP": "192.168.1.1",
- "currentSwVersion": "v1",
+ "status": "Activated",
+ "nEIdentification": "5gDU0001",
+ "updateTime": "Tue Apr 23 13:08:43 2019",
+ "omIP": "192.168.1.1",
+ "currentSwVersion": "v1",
"installedSw": {
"ran_du_pkg2-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg2",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg2",
+ "version": "v1",
"name": "ran_du_pkg2"
- },
+ },
"ran_du_pkg1-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg1",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg1",
+ "version": "v1",
"name": "ran_du_pkg1"
}
}
- },
+ },
{
- "status": "Activated",
- "nEIdentification": "5gDU0002",
- "updateTime": "Tue Apr 23 11:06:36 2019",
- "omIP": "192.168.1.2",
- "oldSwVersion": "v1",
- "currentSwVersion": "v2",
+ "status": "Activated",
+ "nEIdentification": "5gDU0002",
+ "updateTime": "Tue Apr 23 11:06:36 2019",
+ "omIP": "192.168.1.2",
+ "oldSwVersion": "v1",
+ "currentSwVersion": "v2",
"installedSw": {
"ran_du_pkg1-v2": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg1",
- "version": "v2",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg1",
+ "version": "v2",
"name": "ran_du_pkg1"
- },
+ },
"ran_du_pkg2-v2": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg2",
- "version": "v2",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg2",
+ "version": "v2",
"name": "ran_du_pkg2"
}
}
diff --git a/test/mocks/emssimulator/swm/ems_util.py b/test/mocks/emssimulator/swm/ems_util.py
index 6d0d3102b..9bfb2cd06 100644
--- a/test/mocks/emssimulator/swm/ems_util.py
+++ b/test/mocks/emssimulator/swm/ems_util.py
@@ -1,5 +1,22 @@
-#!/usr/bin/python
-
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
import time
import json
import jsonpath
@@ -7,6 +24,18 @@ import jsonpath
import conf
+def get_log_file(operation):
+ return os.path.join(conf.LOGGER_FILE_DIR, "%s.txt" % operation)
+
+
+def get_download_dir(om_ip):
+ return os.path.join(conf.PNF_SIMULATORS_DIR, om_ip, conf.COMMON_PATH, conf.PNF_SW_DOWNLOAD_DIR)
+
+
+def get_install_dir(om_ip):
+ return os.path.join(conf.PNF_SIMULATORS_DIR, om_ip, conf.COMMON_PATH, conf.PNF_SW_INSTALL_DIR)
+
+
def get_ne_info_list_from_db(ne_filter):
with open(conf.NE_INFO_TABLE) as f_ne_info:
ne_info_table = json.load(f_ne_info)
@@ -47,7 +76,7 @@ def update_ne_info(ne_info):
def send_notification(notification, process_id):
- notification_file = conf.NOTIFICATION_DIR + '/%s-%d' % (notification['notificationType'], process_id)
+ notification_file = os.path.join(conf.NOTIFICATION_DIR, '%s-%d' % (notification['notificationType'], process_id))
with open(notification_file, 'w') as f_notification:
f_notification.write(json.dumps(notification))
diff --git a/test/mocks/emssimulator/swm/installNESw b/test/mocks/emssimulator/swm/installNESw
index 84e2fb9ae..e56f799f7 100755
--- a/test/mocks/emssimulator/swm/installNESw
+++ b/test/mocks/emssimulator/swm/installNESw
@@ -1,188 +1,28 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
import argparse
import json
-import os
-import shutil
-import random
-import time
-import tempfile
-import zipfile
import conf
-import ems_util
-
-
-def do_install_sw(sw_to_be_installed, ne_info):
- """
- return err, reason, installed_ne_sw
- """
-
- sw_install_dir_in_ne = conf.PNF_SIMULATORS_DIR + '/' + ne_info['omIP'] + conf.PNF_SW_INSTALL_DIR
-
- if sw_to_be_installed.startswith('/'):
- file_location = sw_to_be_installed
- else:
- sw_download_dir_in_ne = ne_info.get("downloadedSwLocation", "")
- file_location = sw_download_dir_in_ne + '/' + sw_to_be_installed
-
- if not os.access(file_location, os.R_OK):
- return True, "Missing to be installed SW file %s" % file_location, None
-
- try:
- if not os.path.isdir(sw_install_dir_in_ne):
- os.makedirs(sw_install_dir_in_ne)
- except OSError as e:
- return True, str(e), None
-
- temp_dir = tempfile.mkdtemp(dir=sw_install_dir_in_ne)
- if file_location.endswith(".zip"):
- with zipfile.ZipFile(file_location) as sw_zip:
- sw_zip.extractall(temp_dir)
- else:
- return True, "Only support zip file", None
-
- manifest_location = temp_dir + '/' + conf.MANIFEST_FILE
- if os.access(manifest_location, os.R_OK):
- with open(manifest_location) as f_manifest:
- manifest = json.load(f_manifest)
- else:
- shutil.rmtree(temp_dir, ignore_errors=True)
- return True, "Missing manifest file in %s" % file_location, None
-
- try:
- target_sw_name = manifest["name"]
- target_sw_version = manifest["version"]
- except KeyError as e:
- shutil.rmtree(temp_dir, ignore_errors=True)
- return True, "Missing key %s in %s of %s" % (str(e), conf.MANIFEST_FILE, file_location), None
-
- if "targetSwVersion" in ne_info and ne_info["targetSwVersion"] != target_sw_version:
- shutil.rmtree(temp_dir, ignore_errors=True)
- return True, "Conflicted targetVersion for %s" % file_location, None
-
- ne_info["targetSwVersion"] = target_sw_version
- ems_util.update_ne_info(ne_info)
-
- target_sw_parent_dir = sw_install_dir_in_ne + '/' + target_sw_version
- try:
- if not os.path.isdir(target_sw_parent_dir):
- os.makedirs(target_sw_parent_dir)
- except OSError as e:
- shutil.rmtree(temp_dir, ignore_errors=True)
- return True, str(e), None
-
- target_sw_dir = target_sw_parent_dir + '/' + target_sw_name
- if os.path.isdir(target_sw_dir):
- shutil.rmtree(target_sw_dir, ignore_errors=True)
-
- try:
- shutil.move(temp_dir, target_sw_dir)
- except shutil.Error as e:
- shutil.rmtree(temp_dir, ignore_errors=True)
- return True, str(e), None
-
- installed_ne_sw = target_sw_name + '-' + target_sw_version
-
- installed_sw_db = target_sw_parent_dir + '/' + conf.INSTALLED_SW
- if os.path.isfile(installed_sw_db):
- with open(installed_sw_db) as f_installed_sw:
- installed_sw_table = json.load(f_installed_sw)
- if not installed_sw_table:
- installed_sw_table = {}
- else:
- installed_sw_table = {}
-
- target_sw_info = {
- "name": target_sw_name,
- "version": target_sw_version,
- "installedLocation": target_sw_dir
- }
- installed_sw_table[installed_ne_sw] = target_sw_info
-
- with open(installed_sw_db, 'w') as f_installed_sw:
- json.dump(installed_sw_table, f_installed_sw, indent=2)
-
- ne_info["installedSw"] = installed_sw_table
-
- return False, None, installed_ne_sw
-
-
-def generate_notification(install_process_id, install_status, installed_ne_sw_info, failed_sw_info):
- notification = {
- "objectClass": "EMSClass",
- "objectInstance": "EMSInstance",
- "notificationId": random.randint(1, conf.MAX_INT),
- "eventTime": time.asctime(),
- "systemDN": "emssimulator",
- "notificationType": "notifyInstallNESwStatusChanged",
- "installProcessId": install_process_id,
- "installOperationStatus": install_status
- }
-
- if installed_ne_sw_info:
- notification["installedNESwInfo"] = installed_ne_sw_info
-
- if failed_sw_info:
- notification["failedSwInfo"] = failed_sw_info
-
- return notification
-
-
-def install_ne_sw(sw_to_be_installed, ne_id):
- ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
-
- install_process_id = random.randint(1, conf.MAX_INT)
- result = conf.REQ_SUCCESS
- ret_value = {
- "installProcessId": install_process_id,
- "result": result
- }
-
- if not ne_info:
- ret_value["result"] = conf.REQ_FAILURE
- ret_value["reason"] = "Can not find NE %s" % ne_id
- return ret_value
-
- ne_info["status"] = conf.STATUS_INSTALLING
- ems_util.update_ne_info(ne_info)
-
- installed_ne_sw_info = []
- failed_sw_info = []
-
- err, reason, installed_ne_sw = do_install_sw(sw_to_be_installed, ne_info)
-
- if not err:
- installed_ne_sw_info.append(installed_ne_sw)
- else:
- result = conf.REQ_FAILURE
- failed_sw_entry = {
- "failedSw": installed_ne_sw,
- "failureReason": reason
- }
- failed_sw_info.append(failed_sw_entry)
-
- num_installed_ne_sw = len(installed_ne_sw_info)
-
- if num_installed_ne_sw == 1:
- install_status = "NE_SWINSTALLATION_SUCCESSFUL"
- elif num_installed_ne_sw == 0:
- install_status = "NE_SWINSTALLATION_FAILED"
- else:
- install_status = "NE_SWINSTALLATION_PARTIALLY_SUCCESSFUL"
-
- notification = generate_notification(install_process_id, install_status, installed_ne_sw_info, failed_sw_info)
- ems_util.send_notification(notification, install_process_id)
-
- if result == conf.REQ_SUCCESS:
- ems_util.update_ne_info(ne_info)
- else:
- ret_value["result"] = result
- ret_value["reason"] = json.dumps(failed_sw_info)
-
- # for automated software management, there is no listOfStepNumbersAndDurations
- return ret_value
+import install_n_e_sw
def main():
@@ -193,8 +33,8 @@ def main():
args = parser.parse_args()
- ret_value = install_ne_sw(args.swToBeInstalled, args.neIdentifier)
- print json.dumps(ret_value)
+ _, ret_value = install_n_e_sw.install(args.swToBeInstalled, args.neIdentifier)
+ print(json.dumps(ret_value))
if ret_value["result"] == conf.REQ_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
diff --git a/test/mocks/emssimulator/swm/install_n_e_sw.py b/test/mocks/emssimulator/swm/install_n_e_sw.py
new file mode 100644
index 000000000..57c59a389
--- /dev/null
+++ b/test/mocks/emssimulator/swm/install_n_e_sw.py
@@ -0,0 +1,234 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+import json
+import shutil
+import random
+import time
+import tempfile
+import zipfile
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "installNESw"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def do_install(sw_to_be_installed, ne_info):
+ """
+ return err, reason, installed_ne_sw
+ """
+
+ logger.info("swToBeInstalled: %s" % sw_to_be_installed)
+
+ sw_install_dir_in_ne = ems_util.get_install_dir(ne_info['omIP'])
+
+ if sw_to_be_installed.startswith('/'):
+ file_location = sw_to_be_installed
+ else:
+ sw_download_dir_in_ne = ne_info.get("downloadedSwLocation", "")
+ file_location = os.path.join(sw_download_dir_in_ne, sw_to_be_installed)
+
+ if not os.access(file_location, os.R_OK):
+ msg = "Missing to be installed SW file %s" % file_location
+ logger.error(msg)
+ return True, msg, None
+
+ try:
+ if not os.path.isdir(sw_install_dir_in_ne):
+ os.makedirs(sw_install_dir_in_ne)
+ except OSError as e:
+ msg = str(e)
+ logger.error(msg)
+ return True, msg, None
+
+ temp_dir = tempfile.mkdtemp(dir=sw_install_dir_in_ne)
+ if file_location.endswith(".zip"):
+ with zipfile.ZipFile(file_location) as sw_zip:
+ sw_zip.extractall(temp_dir)
+ else:
+ msg = "Only support zip file"
+ logger.error(msg)
+ return True, msg, None
+
+ manifest_location = os.path.join(temp_dir, conf.MANIFEST_FILE)
+ if os.access(manifest_location, os.R_OK):
+ with open(manifest_location) as f_manifest:
+ manifest = json.load(f_manifest)
+ else:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+ msg = "Missing manifest file in %s" % file_location
+ logger.error(msg)
+ return True, msg, None
+
+ try:
+ target_sw_name = manifest["name"]
+ target_sw_version = manifest["version"]
+ except KeyError as e:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+ msg = "Missing key %s in %s of %s" % (str(e), conf.MANIFEST_FILE, file_location)
+ logger.error(msg)
+ return True, msg, None
+
+ if "targetSwVersion" in ne_info and ne_info["targetSwVersion"] != target_sw_version:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+ msg = "Conflicted targetVersion for %s" % file_location
+ logger.error(msg)
+ return True, msg, None
+
+ ne_info["targetSwVersion"] = target_sw_version
+ ems_util.update_ne_info(ne_info)
+
+ target_sw_parent_dir = os.path.join(sw_install_dir_in_ne, target_sw_version)
+ try:
+ if not os.path.isdir(target_sw_parent_dir):
+ os.makedirs(target_sw_parent_dir)
+ except OSError as e:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+ msg = str(e)
+ logger.error(msg)
+ return True, msg, None
+
+ target_sw_dir = os.path.join(target_sw_parent_dir, target_sw_name)
+ if os.path.isdir(target_sw_dir):
+ shutil.rmtree(target_sw_dir, ignore_errors=True)
+
+ try:
+ shutil.move(temp_dir, target_sw_dir)
+ except shutil.Error as e:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+ msg = str(e)
+ logger.error(msg)
+ return True, msg, None
+ logger.info("Install SW to %s" % target_sw_dir)
+
+ installed_ne_sw = target_sw_name + '-' + target_sw_version
+ logger.info("Installed SW: %s" % installed_ne_sw)
+
+ installed_sw_db = os.path.join(target_sw_parent_dir, conf.INSTALLED_SW_FILE)
+ if os.path.isfile(installed_sw_db):
+ with open(installed_sw_db) as f_installed_sw:
+ installed_sw_table = json.load(f_installed_sw)
+ if not installed_sw_table:
+ installed_sw_table = {}
+ else:
+ installed_sw_table = {}
+
+ target_sw_info = {
+ "name": target_sw_name,
+ "version": target_sw_version,
+ "installedLocation": target_sw_dir
+ }
+ installed_sw_table[installed_ne_sw] = target_sw_info
+
+ with open(installed_sw_db, 'w') as f_installed_sw:
+ json.dump(installed_sw_table, f_installed_sw, indent=2)
+
+ ne_info["installedSw"] = installed_sw_table
+
+ return False, None, installed_ne_sw
+
+
+def generate_notification(install_process_id, install_status, installed_ne_sw_info, failed_sw_info):
+ notification = {
+ "objectClass": conf.OBJECT_CLASS,
+ "objectInstance": conf.OBJECT_INSTANCE,
+ "notificationId": random.randint(1, conf.MAX_INT),
+ "eventTime": time.asctime(),
+ "systemDN": conf.SYSTEM_DN,
+ "notificationType": "notifyInstallNESwStatusChanged",
+ "installProcessId": install_process_id,
+ "installOperationStatus": install_status
+ }
+
+ if installed_ne_sw_info:
+ notification["installedNESwInfo"] = installed_ne_sw_info
+
+ if failed_sw_info:
+ notification["failedSwInfo"] = failed_sw_info
+
+ return notification
+
+
+def install(sw_to_be_installed, ne_id):
+ ne_info = ems_util.get_ne_info_from_db_by_id(ne_id)
+
+ install_process_id = random.randint(1, conf.MAX_INT)
+ result = conf.REQ_SUCCESS
+ ret_value = {
+ "installProcessId": install_process_id,
+ "result": result
+ }
+
+ if not ne_info:
+ ret_value["result"] = conf.REQ_FAILURE
+ ret_value["reason"] = "Can not find NE %s" % ne_id
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ ne_info["status"] = conf.STATUS_INSTALLING
+ ems_util.update_ne_info(ne_info)
+
+ installed_ne_sw_info = []
+ failed_sw_info = []
+
+ err, reason, installed_ne_sw = do_install(sw_to_be_installed, ne_info)
+
+ if not err:
+ installed_ne_sw_info.append(installed_ne_sw)
+ else:
+ result = conf.REQ_FAILURE
+ failed_sw_entry = {
+ "failedSw": installed_ne_sw,
+ "failureReason": reason
+ }
+
+ logger.error("Failed installed SW: %s" % str(failed_sw_entry))
+ failed_sw_info.append(failed_sw_entry)
+
+ num_installed_ne_sw = len(installed_ne_sw_info)
+
+ if num_installed_ne_sw == 1:
+ install_status = "NE_SWINSTALLATION_SUCCESSFUL"
+ elif num_installed_ne_sw == 0:
+ install_status = "NE_SWINSTALLATION_FAILED"
+ else:
+ install_status = "NE_SWINSTALLATION_PARTIALLY_SUCCESSFUL"
+ logger.info("Install SW status: %s" % install_status)
+
+ notification = generate_notification(install_process_id, install_status, installed_ne_sw_info, failed_sw_info)
+ ems_util.send_notification(notification, install_process_id)
+
+ if result == conf.REQ_SUCCESS:
+ ems_util.update_ne_info(ne_info)
+
+ logger.info("Install SW success")
+ else:
+ ret_value["result"] = result
+ ret_value["reason"] = json.dumps(failed_sw_info)
+
+ logger.info("Install SW failure, reason: %s" % ret_value["reason"])
+
+ # for automated software management, there is no listOfStepNumbersAndDurations
+ return notification, ret_value
diff --git a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.1/opt/install/v1/installed_sw.json b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.1/opt/install/v1/installed_sw.json
index 9c1921521..e7f13a5dd 100644
--- a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.1/opt/install/v1/installed_sw.json
+++ b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.1/opt/install/v1/installed_sw.json
@@ -1,12 +1,12 @@
{
"ran_du_pkg2-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg2",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg2",
+ "version": "v1",
"name": "ran_du_pkg2"
- },
+ },
"ran_du_pkg1-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg1",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.1/opt/install/v1/ran_du_pkg1",
+ "version": "v1",
"name": "ran_du_pkg1"
}
}
diff --git a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v1/installed_sw.json b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v1/installed_sw.json
index 31cddf9a1..bd63618c2 100644
--- a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v1/installed_sw.json
+++ b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v1/installed_sw.json
@@ -1,12 +1,12 @@
{
"ran_du_pkg2-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v1/ran_du_pkg2",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v1/ran_du_pkg2",
+ "version": "v1",
"name": "ran_du_pkg2"
- },
+ },
"ran_du_pkg1-v1": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v1/ran_du_pkg1",
- "version": "v1",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v1/ran_du_pkg1",
+ "version": "v1",
"name": "ran_du_pkg1"
}
}
diff --git a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v2/installed_sw.json b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v2/installed_sw.json
index e55c7f5f0..3fed1839f 100644
--- a/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v2/installed_sw.json
+++ b/test/mocks/emssimulator/swm/pnf_simulators/192.168.1.2/opt/install/v2/installed_sw.json
@@ -1,12 +1,12 @@
{
"ran_du_pkg1-v2": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg1",
- "version": "v2",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg1",
+ "version": "v2",
"name": "ran_du_pkg1"
- },
+ },
"ran_du_pkg2-v2": {
- "installedLocation": "/home/ubuntu/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg2",
- "version": "v2",
+ "installedLocation": "/home/admin/swm/pnf_simulators/192.168.1.2/opt/install/v2/ran_du_pkg2",
+ "version": "v2",
"name": "ran_du_pkg2"
}
}
diff --git a/test/mocks/emssimulator/swm/swFallback b/test/mocks/emssimulator/swm/swFallback
index 9d6608c23..506eee0d4 100755
--- a/test/mocks/emssimulator/swm/swFallback
+++ b/test/mocks/emssimulator/swm/swFallback
@@ -1,108 +1,29 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+import sys
import argparse
import json
-import sys
-import os
-import shutil
import conf
import ems_util
-
-
-def sw_fallback(ne_info_list):
- ne_list = []
- num_failure = 0
-
- for ne_info in ne_info_list:
- if ne_info.get("status") == conf.STATUS_DOWNLOADING:
- ne_info["status"] = conf.STATUS_ACTIVATED
- ems_util.update_ne_info(ne_info)
-
- ne_entry = {
- "nEIdentification": ne_info["nEIdentification"],
- "swFallbackStatus": "fallbackSuccessful"
- }
- ne_list.append(ne_entry)
- continue
-
- sw_install_dir_in_ne = conf.PNF_SIMULATORS_DIR + '/' + ne_info['omIP'] + conf.PNF_SW_INSTALL_DIR
-
- if ne_info.get("status") == conf.STATUS_INSTALLING:
- old_sw_version = ne_info.get("currentSwVersion", "")
- current_sw_version = ne_info.get("targetSwVersion", "")
- else:
- old_sw_version = ne_info.get("oldSwVersion", "")
- current_sw_version = ne_info.get("currentSwVersion", "")
-
- old_sw_dir = sw_install_dir_in_ne + '/' + old_sw_version
-
- if not old_sw_version or not os.path.isdir(old_sw_dir):
- ne_entry = {
- "nEIdentification": ne_info["nEIdentification"],
- "swFallbackStatus": "fallbackUnsuccessful"
- }
- ne_list.append(ne_entry)
-
- num_failure += 1
- continue
-
- current_sw_dir = sw_install_dir_in_ne + '/' + current_sw_version
-
- if current_sw_version and os.path.isdir(current_sw_dir) and current_sw_dir != old_sw_dir:
- shutil.rmtree(current_sw_dir, ignore_errors=True)
-
- old_cwd = os.getcwd()
- os.chdir(sw_install_dir_in_ne)
- if os.path.islink(conf.CURRENT_VERSION_DIR):
- os.remove(conf.CURRENT_VERSION_DIR)
- os.symlink(old_sw_version, conf.CURRENT_VERSION_DIR)
- os.chdir(old_cwd)
-
- installed_sw_db = old_sw_dir + '/' + conf.INSTALLED_SW
- if os.path.isfile(installed_sw_db):
- with open(installed_sw_db) as f_installed_sw:
- installed_sw_table = json.load(f_installed_sw)
- if not installed_sw_table:
- installed_sw_table = {}
- else:
- installed_sw_table = {}
-
- ne_info["installedSw"] = installed_sw_table
- if "oldSwVersion" in ne_info:
- ne_info["currentSwVersion"] = ne_info["oldSwVersion"]
- del ne_info["oldSwVersion"]
-
- if "targetSwVersion" in ne_info:
- del ne_info["targetSwVersion"]
-
- if "downloadedSwLocation" in ne_info:
- if os.path.isdir(ne_info["downloadedSwLocation"]):
- shutil.rmtree(ne_info["downloadedSwLocation"], ignore_errors=True)
- del ne_info["downloadedSwLocation"]
-
- ne_info["status"] = conf.STATUS_ACTIVATED
- ems_util.update_ne_info(ne_info)
-
- ne_entry = {
- "nEIdentification": ne_info["nEIdentification"],
- "swFallbackStatus": "fallbackSuccessful"
- }
- ne_list.append(ne_entry)
-
- if num_failure == 0:
- result = conf.RESULT_SUCCESS
- elif num_failure == len(ne_info_list):
- result = conf.RESULT_FAILURE
- else:
- result = conf.RESULT_PARTLY
-
- ret_value = {
- "nEList": ne_list,
- "result": result
- }
-
- return ret_value
+import sw_fallback
def main():
@@ -114,8 +35,8 @@ def main():
ne_info_list = ems_util.get_ne_info_list_from_db(args.filter)
- ret_value = sw_fallback(ne_info_list)
- print json.dumps(ret_value)
+ ret_value = sw_fallback.fallback(ne_info_list)
+ print(json.dumps(ret_value))
if ret_value["result"] == conf.RESULT_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
diff --git a/test/mocks/emssimulator/swm/sw_fallback.py b/test/mocks/emssimulator/swm/sw_fallback.py
new file mode 100644
index 000000000..3037d4575
--- /dev/null
+++ b/test/mocks/emssimulator/swm/sw_fallback.py
@@ -0,0 +1,129 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+import json
+import shutil
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "swFallback"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def fallback(ne_info_list):
+ logger.info("NE info list: %s" % ne_info_list)
+
+ ne_list = []
+ num_failure = 0
+
+ for ne_info in ne_info_list:
+ if ne_info.get("status") == conf.STATUS_DOWNLOADING:
+ ne_info["status"] = conf.STATUS_ACTIVATED
+ ems_util.update_ne_info(ne_info)
+
+ ne_entry = {
+ "nEIdentification": ne_info["nEIdentification"],
+ "swFallbackStatus": "fallbackSuccessful"
+ }
+ ne_list.append(ne_entry)
+ continue
+
+ sw_install_dir_in_ne = ems_util.get_install_dir(ne_info['omIP'])
+
+ if ne_info.get("status") == conf.STATUS_INSTALLING:
+ old_sw_version = ne_info.get("currentSwVersion", "")
+ current_sw_version = ne_info.get("targetSwVersion", "")
+ else:
+ old_sw_version = ne_info.get("oldSwVersion", "")
+ current_sw_version = ne_info.get("currentSwVersion", "")
+
+ old_sw_dir = os.path.join(sw_install_dir_in_ne, old_sw_version)
+
+ if not old_sw_version or not os.path.isdir(old_sw_dir):
+ ne_entry = {
+ "nEIdentification": ne_info["nEIdentification"],
+ "swFallbackStatus": "fallbackUnsuccessful"
+ }
+ logger.error("oldSwVersion (%s) or oldSwDirectory (%s) is none" % (old_sw_version, old_sw_dir))
+ ne_list.append(ne_entry)
+
+ num_failure += 1
+ continue
+
+ current_sw_dir = os.path.join(sw_install_dir_in_ne, current_sw_version)
+
+ if current_sw_version and os.path.isdir(current_sw_dir) and current_sw_dir != old_sw_dir:
+ shutil.rmtree(current_sw_dir, ignore_errors=True)
+
+ old_cwd = os.getcwd()
+ os.chdir(sw_install_dir_in_ne)
+ if os.path.islink(conf.CURRENT_VERSION_DIR):
+ os.remove(conf.CURRENT_VERSION_DIR)
+ os.symlink(old_sw_version, conf.CURRENT_VERSION_DIR)
+ os.chdir(old_cwd)
+
+ installed_sw_db = os.path.join(old_sw_dir, conf.INSTALLED_SW_FILE)
+ if os.path.isfile(installed_sw_db):
+ with open(installed_sw_db) as f_installed_sw:
+ installed_sw_table = json.load(f_installed_sw)
+ if not installed_sw_table:
+ installed_sw_table = {}
+ else:
+ installed_sw_table = {}
+
+ ne_info["installedSw"] = installed_sw_table
+ if "oldSwVersion" in ne_info:
+ ne_info["currentSwVersion"] = ne_info["oldSwVersion"]
+ del ne_info["oldSwVersion"]
+
+ if "targetSwVersion" in ne_info:
+ del ne_info["targetSwVersion"]
+
+ if "downloadedSwLocation" in ne_info:
+ if os.path.isdir(ne_info["downloadedSwLocation"]):
+ shutil.rmtree(ne_info["downloadedSwLocation"], ignore_errors=True)
+ del ne_info["downloadedSwLocation"]
+
+ ne_info["status"] = conf.STATUS_ACTIVATED
+ ems_util.update_ne_info(ne_info)
+
+ ne_entry = {
+ "nEIdentification": ne_info["nEIdentification"],
+ "swFallbackStatus": "fallbackSuccessful"
+ }
+ ne_list.append(ne_entry)
+
+ if num_failure == 0:
+ result = conf.RESULT_SUCCESS
+ elif num_failure == len(ne_info_list):
+ result = conf.RESULT_FAILURE
+ else:
+ result = conf.RESULT_PARTLY
+ logger.info("Fallback SW result: %s" % result)
+
+ ret_value = {
+ "nEList": ne_list,
+ "result": result
+ }
+
+ return ret_value
diff --git a/test/mocks/emssimulator/swm/upgrade-post-check b/test/mocks/emssimulator/swm/upgrade-post-check
index 799afbccc..e5dfae573 100755
--- a/test/mocks/emssimulator/swm/upgrade-post-check
+++ b/test/mocks/emssimulator/swm/upgrade-post-check
@@ -1,74 +1,52 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
import json
import conf
-import ems_util
-
-
-def upgrade_postcheck(pnf_id, old_sw_version, target_sw_version, rule_name, tmp_file=None):
- ne_info = ems_util.get_ne_info_from_db_by_id(pnf_id)
-
- if not ne_info:
- ret_value = {
- "result": conf.RESULT_FAILURE,
- "reason": "Can not find NE %s" % pnf_id
- }
- return ret_value
-
- old_sw_version_in_db = ne_info.get("oldSwVersion", "")
- current_sw_version_in_db = ne_info.get("currentSwVersion", "")
-
- if old_sw_version != old_sw_version_in_db:
- ret_value = {
- "result": conf.RESULT_FAILURE,
- "reason": "Old SW version %s in PNF is not matched with oldSwVersion %s" %
- (old_sw_version_in_db, old_sw_version)
- }
- return ret_value
-
- if target_sw_version != current_sw_version_in_db:
- ret_value = {
- "result": conf.RESULT_FAILURE,
- "reason": "Current SW version %s in PNF is not matched with targetSwVersion %s" %
- (current_sw_version_in_db, target_sw_version)
- }
- return ret_value
-
- ne_info["checkStatus"] = conf.STATUS_POSTCHECKED
- ems_util.update_ne_info(ne_info)
-
- ret_value = {
- "result": conf.RESULT_SUCCESS
- }
-
- return ret_value
+import upgrade_post_check
def main():
- # {{pnfId}} {{oldSwVersion}} {{targetSwVersion}} {{ruleName}} /tmp/tmp-{{Id}}
+ # {{pnfName}} {{oldSwVersion}} {{targetSwVersion}} {{ruleName}} {{additionalDataFile}}
if len(sys.argv) < 5:
ret_value = {
"result": conf.RESULT_FAILURE,
"reason": "Missing parameters"
}
- print json.dumps(ret_value)
+ print(json.dumps(ret_value))
sys.exit(conf.RET_CODE_FAILURE)
-
- if len(sys.argv) >= 5:
- pnf_id = sys.argv[1]
+ else:
+ pnf_name = sys.argv[1]
old_sw_version = sys.argv[2]
target_sw_version = sys.argv[3]
rule_name = sys.argv[4]
- tmp_file = None
+ additional_data_file = None
if len(sys.argv) >= 6:
- tmp_file = sys.argv[5]
+ additional_data_file = sys.argv[5]
- ret_value = upgrade_postcheck(pnf_id, old_sw_version, target_sw_version, rule_name, tmp_file)
- print json.dumps(ret_value)
+ ret_value = upgrade_post_check.post_check(
+ pnf_name, old_sw_version, target_sw_version, rule_name, additional_data_file)
+ print(json.dumps(ret_value))
if ret_value["result"] == conf.RESULT_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
diff --git a/test/mocks/emssimulator/swm/upgrade-pre-check b/test/mocks/emssimulator/swm/upgrade-pre-check
index 6bf867f48..5b0726b63 100755
--- a/test/mocks/emssimulator/swm/upgrade-pre-check
+++ b/test/mocks/emssimulator/swm/upgrade-pre-check
@@ -1,65 +1,52 @@
-#!/usr/bin/python
+#!/usr/bin/python3
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
import sys
import json
import conf
-import ems_util
-
-
-def upgrade_precheck(pnf_id, old_sw_version, target_sw_version, rule_name, tmp_file=None):
- ne_info = ems_util.get_ne_info_from_db_by_id(pnf_id)
-
- if not ne_info:
- ret_value = {
- "result": conf.RESULT_FAILURE,
- "reason": "Can not find NE %s" % pnf_id
- }
- return ret_value
-
- current_sw_version_in_db = ne_info.get("currentSwVersion", "")
-
- if old_sw_version != current_sw_version_in_db:
- ret_value = {
- "result": conf.RESULT_FAILURE,
- "reason": "Current SW version %s in PNF is not matched with oldSwVersion %s" %
- (current_sw_version_in_db, old_sw_version)
- }
- return ret_value
-
- ne_info["checkStatus"] = conf.STATUS_PRECHECKED
- ems_util.update_ne_info(ne_info)
-
- ret_value = {
- "result": conf.RESULT_SUCCESS
- }
-
- return ret_value
+import upgrade_pre_check
def main():
- # {{pnfId}} {{oldSwVersion}} {{targetSwVersion}} {{ruleName}} /tmp/tmp-{{Id}}
+ # {{pnfName}} {{oldSwVersion}} {{targetSwVersion}} {{ruleName}} {{additionalDataFile}}
if len(sys.argv) < 5:
ret_value = {
"result": conf.RESULT_FAILURE,
"reason": "Missing parameters"
}
- print json.dumps(ret_value)
+ print(json.dumps(ret_value))
sys.exit(conf.RET_CODE_FAILURE)
-
- if len(sys.argv) >= 5:
- pnf_id = sys.argv[1]
+ else:
+ pnf_name = sys.argv[1]
old_sw_version = sys.argv[2]
target_sw_version = sys.argv[3]
rule_name = sys.argv[4]
- tmp_file = None
+ additional_data_file = None
if len(sys.argv) >= 6:
- tmp_file = sys.argv[5]
+ additional_data_file = sys.argv[5]
- ret_value = upgrade_precheck(pnf_id, old_sw_version, target_sw_version, rule_name, tmp_file)
- print json.dumps(ret_value)
+ ret_value = upgrade_pre_check.pre_check(
+ pnf_name, old_sw_version, target_sw_version, rule_name, additional_data_file)
+ print(json.dumps(ret_value))
if ret_value["result"] == conf.RESULT_SUCCESS:
sys.exit(conf.RET_CODE_SUCCESS)
diff --git a/test/mocks/emssimulator/swm/upgrade_post_check.py b/test/mocks/emssimulator/swm/upgrade_post_check.py
new file mode 100644
index 000000000..5140e0603
--- /dev/null
+++ b/test/mocks/emssimulator/swm/upgrade_post_check.py
@@ -0,0 +1,76 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import logging
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "upgrade-post-check"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def post_check(pnf_name, old_sw_version, target_sw_version, rule_name, additional_data_file=None):
+ logger.info("PostCheck for oldSwVersion: %s, targetSwVersion: %s, ruleName: %s, additionalDataFile: %s" %
+ (old_sw_version, target_sw_version, rule_name, additional_data_file))
+
+ ne_info = ems_util.get_ne_info_from_db_by_id(pnf_name)
+
+ if not ne_info:
+ ret_value = {
+ "result": conf.RESULT_FAILURE,
+ "reason": "Can not find NE %s" % pnf_name
+ }
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ old_sw_version_in_db = ne_info.get("oldSwVersion", "")
+ current_sw_version_in_db = ne_info.get("currentSwVersion", "")
+
+ if old_sw_version != old_sw_version_in_db:
+ ret_value = {
+ "result": conf.RESULT_FAILURE,
+ "reason": "Old SW version %s in PNF is not matched with oldSwVersion %s" %
+ (old_sw_version_in_db, old_sw_version)
+ }
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ if target_sw_version != current_sw_version_in_db:
+ ret_value = {
+ "result": conf.RESULT_FAILURE,
+ "reason": "Current SW version %s in PNF is not matched with targetSwVersion %s" %
+ (current_sw_version_in_db, target_sw_version)
+ }
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ ne_info["checkStatus"] = conf.STATUS_POSTCHECKED
+ ems_util.update_ne_info(ne_info)
+ logger.info("PostCheck SW success, check status: %s" % ne_info["checkStatus"])
+
+ ret_value = {
+ "result": conf.RESULT_SUCCESS
+ }
+
+ return ret_value
diff --git a/test/mocks/emssimulator/swm/upgrade_pre_check.py b/test/mocks/emssimulator/swm/upgrade_pre_check.py
new file mode 100644
index 000000000..ee0423acc
--- /dev/null
+++ b/test/mocks/emssimulator/swm/upgrade_pre_check.py
@@ -0,0 +1,65 @@
+# ============LICENSE_START=======================================================
+# ONAP - SO
+# ================================================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import logging
+
+import conf
+import ems_util
+
+
+OPERATION_NAME = "upgrade-pre-check"
+logging.basicConfig(level=logging.INFO, format=conf.LOGGER_FORMAT, filename=ems_util.get_log_file(OPERATION_NAME))
+logger = logging.getLogger(OPERATION_NAME)
+
+
+def pre_check(pnf_name, old_sw_version, target_sw_version, rule_name, additional_data_file=None):
+ logger.info("PreCheck for oldSwVersion: %s, targetSwVersion: %s, ruleName: %s, additionalDataFile: %s" %
+ (old_sw_version, target_sw_version, rule_name, additional_data_file))
+
+ ne_info = ems_util.get_ne_info_from_db_by_id(pnf_name)
+
+ if not ne_info:
+ ret_value = {
+ "result": conf.RESULT_FAILURE,
+ "reason": "Can not find NE %s" % pnf_name
+ }
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ current_sw_version_in_db = ne_info.get("currentSwVersion", "")
+
+ if old_sw_version != current_sw_version_in_db:
+ ret_value = {
+ "result": conf.RESULT_FAILURE,
+ "reason": "Current SW version %s in PNF is not matched with oldSwVersion %s" %
+ (current_sw_version_in_db, old_sw_version)
+ }
+
+ logger.error(ret_value["reason"])
+ return ret_value
+
+ ne_info["checkStatus"] = conf.STATUS_PRECHECKED
+ ems_util.update_ne_info(ne_info)
+ logger.info("PreCheck SW success, check status: %s" % ne_info["checkStatus"])
+
+ ret_value = {
+ "result": conf.RESULT_SUCCESS
+ }
+
+ return ret_value
diff --git a/test/mocks/hvvessimulator/hvves_sim.yaml b/test/mocks/hvvessimulator/hvves_sim.yaml
index c037036f4..dc868b55a 100644
--- a/test/mocks/hvvessimulator/hvves_sim.yaml
+++ b/test/mocks/hvvessimulator/hvves_sim.yaml
@@ -3,7 +3,7 @@ description: Heat template for HV-VES simulator deployment
heat_template_version: 2013-05-23
parameters:
- name: { description: Instance name, label: Name, type: string, default: hvves-sim }
+ name: { description: Instance name, label: Name, type: string, default: hvves-sim }
flavor_name: { description: Instance flavor to be used, label: Flavor Name, type: string }
image_name: { description: Ubuntu 16.04 image to be used, label: Image Name, type: string }
private_net_id: { description: Private network id, label: Private Network ID, type: string }
@@ -222,4 +222,3 @@ outputs:
hvves-sim_public_ip:
description: HV-VES simualtor floating IP
value: { get_attr: [ hvves-sim_public, floating_ip_address ] }
-
diff --git a/test/mocks/mass-pnf-sim/.gitignore b/test/mocks/mass-pnf-sim/.gitignore
deleted file mode 100644
index 7111c7f28..000000000
--- a/test/mocks/mass-pnf-sim/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-pnf-sim-lw-*
-.env/
-pnf-sim-lightweight/logs/*
-files/onap/*
diff --git a/test/mocks/mass-pnf-sim/README.md b/test/mocks/mass-pnf-sim/README.md
deleted file mode 100644
index 07f74e2b7..000000000
--- a/test/mocks/mass-pnf-sim/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-### Mass PNF simulator
-The purpose of this simulator is to mimic the PNF for benchmark purposes.
-This variant is based on the PNF simulator and use several components.
-The modification are focusing on the following areas:
- -add a script configuring and governing multiple instances of PNF simualtor
- -removing parts which are not required for benchmark purposes.
- -add functionality which creates and maintains the ROP files
- -add functionality to query the actual ROP files and construct VES events based on them
-
-
-
-###Pre-configuration
-The ipstart should align to a /28 Ip address range start (e.g. 10.11.0.16, 10.11.0.32)
-
-For debug purposes, you can use your own IP address as VES collector, use "ip" command to determine it.
-
-Example:
-./mass-pnf-sim.py --bootstrap 2 --urlves http://10.148.95.??:10000/eventListener/v7 --ipfileserver 10.148.95.??? --typefileserver sftp --ipstart 10.11.0.16
-
-Note that the file creator is started at a time of the bootstrapping.
-Stop/start will not re-launch it.
-
-###Replacing VES for test purposes
-`sudo nc -vv -l -k -p 10000`
-
-###Start
-Define the amount of simulators to be launched
-./mass-pnf-sim.py --start 2
-
-###Trigger
-./mass-pnf-sim.py --trigger 2
-
-###Trigger only a subset of the simulators
-./mass-pnf-sim.py --triggerstart 0 --triggerend 3
-#this will trigger 0,1,2,3
-
-./mass-pnf-sim.py --triggerstart 4 --triggerend 5
-#this will trigger 4,5
-
-###Stop and clean
-./mass-pnf-sim.py --stop 2
-./mass-pnf-sim.py --clean
-
-###Verbose printout from Python
-python3 -m trace --trace --count -C . ./mass-pnf-sim.py .....
-
-###Cleaning and recovery after incorrect configuration
-docker stop $(docker ps -aq); docker rm $(docker ps -aq)
diff --git a/test/mocks/mass-pnf-sim/clean.sh b/test/mocks/mass-pnf-sim/clean.sh
deleted file mode 100755
index 28df0ef78..000000000
--- a/test/mocks/mass-pnf-sim/clean.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-killall ROP_file_creator.sh
-
-docker stop $(docker ps -aq); docker rm $(docker ps -aq)
-
-./mass-pnf-sim.py --clean
-
diff --git a/test/mocks/mass-pnf-sim/diagnostic.sh b/test/mocks/mass-pnf-sim/diagnostic.sh
deleted file mode 100755
index 99e35cd0a..000000000
--- a/test/mocks/mass-pnf-sim/diagnostic.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-echo "======= docker ps"
-docker ps
-
-echo "======= Docker image cache"
-docker images nexus3.onap.org:10003/onap/masspnf-simulator
-
-export NUM_OF_SIMS=`find pnf-sim-lw* -maxdepth 0 | wc -l`
-echo $NUM_OF_SIMS
-
-if [ "$NUM_OF_SIMS" -gt 0 ];
-then
- echo "======= docker-compose, first instance"
- cat pnf-sim-lw-0/docker-compose.yml
-
- echo "======= Java config.yml, first instance"
- cat pnf-sim-lw-0/config/config.yml
-fi
-
-if (("$NUM_OF_SIMS" > 2));
-then
- echo "======= docker-compose, last instance"
- cat pnf-sim-lw-$(($NUM_OF_SIMS-1))/docker-compose.yml
-
- echo "======= Java config.yml, last instance"
- cat pnf-sim-lw-$(($NUM_OF_SIMS-1))/config/config.yml
-fi
-
-
diff --git a/test/mocks/mass-pnf-sim/mass-pnf-sim.py b/test/mocks/mass-pnf-sim/mass-pnf-sim.py
deleted file mode 100755
index c3a17b0b1..000000000
--- a/test/mocks/mass-pnf-sim/mass-pnf-sim.py
+++ /dev/null
@@ -1,226 +0,0 @@
-#!/usr/bin/env python3
-import argparse
-import sys
-from subprocess import *
-from subprocess import STDOUT
-import subprocess
-import ipaddress
-import time
-
-parser = argparse.ArgumentParser()
-parser.add_argument(
- '--bootstrap',
- help='Bootstrapping the system',
-)
-
-parser.add_argument(
- '--trigger',
- help='Trigger one single VES event from each simulator',
-)
-
-
-parser.add_argument(
- '--triggerstart',
- help='Trigger only a subset of the simulators (note --triggerend)',
-)
-
-parser.add_argument(
- '--triggerend',
- help='Last instance to trigger',
-)
-
-parser.add_argument(
- '--urlves',
- help='URL of the VES collector',
-)
-
-parser.add_argument(
- '--ipfileserver',
- help='Visible IP of the file server (SFTP/FTPS) to be included in the VES event',
-)
-
-parser.add_argument(
- '--typefileserver',
- help='Type of the file server (SFTP/FTPS) to be included in the VES event',
-)
-
-parser.add_argument(
- '--ipstart',
- help='IP address range beginning',
-)
-
-parser.add_argument(
- '--clean',
- action='store_true',
- help='Cleaning work-dirs',
-)
-
-parser.add_argument(
- '--start',
- help='Starting instances',
-)
-
-parser.add_argument(
- '--status',
- help='Status',
-)
-
-parser.add_argument(
- '--stop',
- help='Stopping instances',
-)
-
-args = parser.parse_args()
-
-if args.bootstrap and args.ipstart and args.urlves:
- print("Bootstrap:")
-
- start_port=2000
- ftps_pasv_port_start=8000
- ftps_pasv_port_num_of_ports=10
-
- ftps_pasv_port_end=ftps_pasv_port_start + ftps_pasv_port_num_of_ports
-
-
- for i in range(int(args.bootstrap)):
- print("PNF simulator instance: " + str(i) + ".")
-
- ip_subnet = ipaddress.ip_address(args.ipstart) + int(0 + (i * 16))
- print("\tIp Subnet:" + str(ip_subnet))
- # The IP ranges are in distance of 16 compared to each other.
- # This is matching the /28 subnet mask used in the dockerfile inside.
-
- ip_gw = ipaddress.ip_address(args.ipstart) + int(1 + (i * 16))
- print("\tIP Gateway:" + str(ip_gw))
-
- IpPnfSim = ipaddress.ip_address(args.ipstart) + int(2 + (i * 16))
- print("\tIp Pnf SIM:" + str(IpPnfSim))
-
- IpFileServer = args.ipfileserver
- TypeFileServer = args.typefileserver
-
-
- PortSftp=start_port +1
- PortFtps=start_port +2
- start_port +=2
- UrlFtps = str(ipaddress.ip_address(args.ipstart) + int(3 + (i * 16)))
- print("\tUrl Ftps: " + str(UrlFtps))
-
- UrlSftp = str(ipaddress.ip_address(args.ipstart) + int(4 + (i * 16)))
- print("\tUrl Sftp: " + str(UrlSftp))
-
- foldername = "pnf-sim-lw-" + str(i)
- completed = subprocess.run('mkdir ' + foldername, shell=True)
- print('\tCreating folder:', completed.stdout)
- completed = subprocess.run(
- 'cp -r pnf-sim-lightweight/* ' +
- foldername,
- shell=True)
- print('\tCloning folder:', completed.stdout)
-
- composercmd = "./simulator.sh compose " + \
- str(ip_gw) + " " + \
- str(ip_subnet) + " " + \
- str(i) + " " + \
- str(args.urlves) + " " + \
- str(IpPnfSim) + " " + \
- str(IpFileServer) + " " + \
- str(TypeFileServer) + " " + \
- str(PortSftp) + " " + \
- str(PortFtps) + " " + \
- str(UrlFtps) + " " + \
- str(UrlSftp) + " " + \
- str(ftps_pasv_port_start) + " " + \
- str(ftps_pasv_port_end)
-
- completed = subprocess.run(
- 'set -x; cd ' +
- foldername +
- '; ' +
- composercmd,
- shell=True)
- print('Cloning:', completed.stdout)
-
- ftps_pasv_port_start += ftps_pasv_port_num_of_ports + 1
- ftps_pasv_port_end += ftps_pasv_port_num_of_ports +1
-
- completed = subprocess.run('set -x; cd pnf-sim-lightweight; ./simulator.sh build ', shell=True)
- print("Build docker image: ", completed.stdout)
-
- sys.exit()
-
-if args.clean:
- completed = subprocess.run('rm -rf ./pnf-sim-lw-*', shell=True)
- print('Deleting:', completed.stdout)
- sys.exit()
-
-if args.start:
-
- for i in range(int(args.start)):
- foldername = "pnf-sim-lw-" + str(i)
-
- completed = subprocess.run(
- 'set -x ; cd ' +
- foldername +
- "; bash -x ./simulator.sh start",
- shell=True)
- print('Starting:', completed.stdout)
-
- time.sleep(5)
-
-if args.status:
-
- for i in range(int(args.status)):
- foldername = "pnf-sim-lw-" + str(i)
-
- completed = subprocess.run(
- 'cd ' +
- foldername +
- "; ./simulator.sh status",
- shell=True)
- print('Status:', completed.stdout)
-
-if args.stop:
- for i in range(int(args.stop)):
- foldername = "pnf-sim-lw-" + str(i)
-
- completed = subprocess.run(
- 'cd ' +
- foldername +
- "; ./simulator.sh stop " + str(i),
- shell=True)
- print('Stopping:', completed.stdout)
-
-
-if args.trigger:
- print("Triggering VES sending:")
-
- for i in range(int(args.trigger)):
- foldername = "pnf-sim-lw-" + str(i)
-
- completed = subprocess.run(
- 'cd ' +
- foldername +
- "; ./simulator.sh trigger-simulator",
- shell=True)
- print('Status:', completed.stdout)
-
-if args.triggerstart and args.triggerend:
- print("Triggering VES sending by a range of simulators:")
-
- for i in range(int(args.triggerstart), int(args.triggerend)+1):
- foldername = "pnf-sim-lw-" + str(i)
- print("Instance being processed:" + str(i))
-
- completed = subprocess.run(
- 'cd ' +
- foldername +
- "; ./simulator.sh trigger-simulator",
- shell=True)
- print('Status:', completed.stdout)
-
-
-
-else:
- print("No instruction was defined")
- sys.exit()
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md
deleted file mode 100644
index 0e2b668a4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md
+++ /dev/null
@@ -1,39 +0,0 @@
-##Local development shortcuts:
-####To start listening on port 10000 for test purposes
-`nc -l -k -p 10000`
-####Test the command above:
-`echo "Hello World" | nc localhost 10000`
-
-####Trigger the pnf simulator locally:
-
-```
-~/dev/git/integration/test/mocks/mass-pnf-sim/pnf-sim-lightweight$ curl -s -X POST -H "Content-Type: application/json" -H "X-ONAP-RequestID: 123" -H "X-InvocationID: 456" -d @config/config.json
-http://localhost:5000/simulator/start
-```
-#### VES event sending
-the default action is to send a VES Message every 15 minutes and the total duration of the VES FileReady Message sending is 1 day (these values can be changed in config/config.json)
-
-Message from the stdout of nc:
-
-```
-POST / HTTP/1.1
-Content-Type: application/json
-X-ONAP-RequestID: 123
-X-InvocationID: 3a256e95-2594-4b11-b25c-68c4baeb5c20
-Content-Length: 734
-Host: localhost:10000
-Connection: Keep-Alive
-User-Agent: Apache-HttpClient/4.5.5 (Java/1.8.0_162)
-Accept-Encoding: gzip,deflate
-```
-
-```javascript
-{"event":{"commonEventHeader":{"startEpochMicrosec":"1551865758690","sourceId":"val13","eventId":"registration_51865758",
-"nfcNamingCode":"oam","internalHeaderFields":{},"priority":"Normal","version":"4.0.1","reportingEntityName":"NOK6061ZW3",
-"sequence":"0","domain":"notification","lastEpochMicrosec":"1551865758690","eventName":"pnfRegistration_Nokia_5gDu",
-"vesEventListenerVersion":"7.0.1","sourceName":"NOK6061ZW3","nfNamingCode":"gNB"},
-"notificationFields":{"notificationFieldsVersion":"2.0","changeType":"FileReady","changeIdentifier":"PM_MEAS_FILES",
-"arrayOfNamedHashMap":[{"name":"10MB.tar.gz","hashMap":{
-"location":"ftpes://10.11.0.68/10MB.tar.gz","fileFormatType":"org.3GPP.32.435#measCollec",
-"fileFormatVersion":"V10","compression":"gzip"}}]}}}
-``` \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh
deleted file mode 100755
index 6ea6ffde0..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-MAIN_DIRECTORY=${PWD##*/}
-FULL_DIRECTORY=${PWD}
-FILE_DIRECTORY=$FULL_DIRECTORY/files/onap
-FILE_TEMPLATE=$FULL_DIRECTORY/templates/file_template_new.xml.gz
-UPDATE_MINS=15
-NUM_FILES=96
-
-rm -rf $FILE_DIRECTORY
-mkdir -p "$FILE_DIRECTORY"
-
-for ((n=0;n<$NUM_FILES;n++))
-do
- if [[ "$OSTYPE" == "linux-gnu" ]]; then # Linux OS
- DATE=$(date -d $(($UPDATE_MINS*($n+1)-1))" minutes ago" +"%Y%m%d")
- TIME_START=$(date -d $(($UPDATE_MINS*($n+1)-1))" minutes ago" +"%H%M%z")
- TIME_END=$(date -d $(($UPDATE_MINS*$n))" minutes ago" +"%H%M%z")
- elif [[ "$OSTYPE" == "darwin"* ]]; then # Mac OS
- DATE=$(date -v "-"$(($UPDATE_MINS*($n+1)-1))"M" +"%Y%m%d")
- TIME_START=$(date -v "-"$(($UPDATE_MINS*($n+1)-1))"M" +"%H%M%z")
- TIME_END=$(date -v "-"$(($UPDATE_MINS*$n))"M" +"%H%M%z")
- else
- echo "ERROR: OS not supported"
- exit 1
- fi
-
- FILE_NAME_TIMESTAMP="A"$DATE"."$TIME_START"-"$TIME_END
- FILE_NAME=$FILE_NAME_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- cp $FILE_TEMPLATE $FILE_DIRECTORY/$FILE_NAME
-
- TIMESTAMP_ARRAY[$n]=$FILE_NAME_TIMESTAMP
-done
-
-while true
-do
- sleep $(($UPDATE_MINS*60))
- OLD_TIMESTAMP=${TIMESTAMP_ARRAY[$NUM_FILES-1]}
- unset TIMESTAMP_ARRAY[$NUM_FILES-1]
-
- TIME_END=$(date +"%H%M%z")
- if [[ "$OSTYPE" == "linux-gnu" ]]; then # Linux OS
- DATE=$(date -d $(($UPDATE_MINS-1))" minutes ago" +"%Y%m%d")
- TIME_START=$(date -d $(($UPDATE_MINS-1))" minutes ago" +"%H%M%z")
- elif [[ "$OSTYPE" == "darwin"* ]]; then # Mac OS
- DATE=$(date -v "-"$(($UPDATE_MINS-1))"M" +"%Y%m%d")
- TIME_START=$(date -v "-"$(($UPDATE_MINS-1))"M" +"%H%M%z")
- else
- echo "ERROR: OS not supported"
- exit 1
- fi
-
- NEW_TIMESTAMP="A"$DATE"."$TIME_START"-"$TIME_END
- OLD_FILE_NAME=$OLD_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- NEW_FILE_NAME=$NEW_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- mv $FILE_DIRECTORY/$OLD_FILE_NAME $FILE_DIRECTORY/$NEW_FILE_NAME
- #echo "Renamed OLD file: "$OLD_FILE_NAME" to NEW file: "$NEW_FILE_NAME # uncomment for debugging
-
- TIMESTAMP_ARRAY=("$NEW_TIMESTAMP" "${TIMESTAMP_ARRAY[@]}")
-done
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json
deleted file mode 100644
index 9d2ba7e08..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "simulatorParams": {
- "testDuration": "86400",
- "messageInterval": "900"
- },
- "commonEventHeaderParams": {
- "eventName": "Noti_RnNode-Ericsson_FileReady",
- "nfNamingCode": "gNB",
- "nfcNamingCode": "oam"
- },
- "notificationParams": {
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady"
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml
deleted file mode 100644
index f21329eff..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-urlves: http://localhost:10000/eventListener/v7
-urlftps: ftps://onap:pano@10.11.0.67
-urlsftp: sftp://onap:pano@10.11.0.68
-#when run in simulator, it does not have own IP
-ippnfsim: localhost
-typefileserver: sftp
-...
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env
deleted file mode 100644
index ef79365ec..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env
+++ /dev/null
@@ -1,10 +0,0 @@
-
-//to run in simulator
-//NETCONF_ADDRESS=netopeer
-
-//to run in docker
-NETCONF_ADDRESS=localhost
-
-NETCONF_PORT=830
-NETCONF_MODEL=pnf-simulator
-NETCONF_MAIN_CONTAINER=config
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf
deleted file mode 100644
index 75bb974a3..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf
+++ /dev/null
@@ -1,59 +0,0 @@
-# Server Config
-anonymous_enable=NO
-local_enable=YES
-write_enable=YES
-local_umask=022
-dirmessage_enable=YES
-
-# Security and User auth
-chroot_local_user=YES
-pam_service_name=vsftpd_virtual
-virtual_use_local_privs=YES
-chmod_enable=NO
-user_config_dir=/etc/vsftpd/user_conf
-user_sub_token=$USER
-#local_root=/srv/$USER
-local_root=/srv/
-userlist_enable=NO
-allow_writeable_chroot=YES
-
-# Logging
-log_ftp_protocol=YES
-xferlog_enable=YES
-xferlog_std_format=YES
-#xferlog_file=/dev/stdout
-syslog_enable=NO
-dual_log_enable=YES
-
-# Remap all login users to this username
-guest_enable=YES
-guest_username=ftp
-hide_ids=YES
-
-# Networking
-connect_from_port_20=NO
-listen=YES
-tcp_wrappers=YES
-pasv_min_port=${FTPS_PASV_MIN}
-pasv_max_port=${FTPS_PASV_MAX}
-
-# SSL
-ssl_enable=Yes
-require_ssl_reuse=NO
-force_local_data_ssl=YES
-force_local_logins_ssl=YES
-ssl_ciphers=HIGH
-allow_anon_ssl=NO
-
-ssl_tlsv1=YES
-ssl_sslv2=YES
-ssl_sslv3=YES
-rsa_cert_file=/etc/ssl/private/ftp.crt
-rsa_private_key_file=/etc/ssl/private/ftp.key
-
-require_cert=YES
-ssl_request_cert=YES
-ca_certs_file=/etc/ssl/private/dfc.crt
-
-write_enable=YES
-pasv_address=${IPFILESERVER}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml
deleted file mode 100644
index 4bf6758d9..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml
+++ /dev/null
@@ -1,118 +0,0 @@
-description: Heat template that deploys PnP PNF simulator
-heat_template_version: '2013-05-23'
-parameters:
- flavor_name: {description: Type of instance (flavor) to be used, label: Flavor,
- type: string}
- image_name: {description: Image to be used for compute instance, label: Image name
- or ID, type: string}
- key_name: {description: Public/Private key pair name, label: Key pair name, type: string}
- public_net_id: {description: Public network that enables remote connection to VNF,
- label: Public network name or ID, type: string}
- private_net_id: {type: string, description: Private network id, label: Private network name or ID}
- private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID}
- proxy: {type: string, description: Proxy, label: Proxy, default: ""}
-resources:
- PNF_PnP_simualtor:
- type: OS::Nova::Server
- properties:
- key_name: { get_param: key_name }
- image: { get_param: image_name }
- flavor: { get_param: flavor_name }
- networks:
- - port: { get_resource: PNF_PnP_simualtor_port0 }
- user_data_format: RAW
- user_data:
- str_replace:
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_COMPOSE_VERSION=1.22.0
- }
-
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "arthur\narthur" | passwd root
- }
-
- update_os () {
- dnf -y install fedora-upgrade
- }
-
- docker_remove () {
- dnf -y remove docker \
- docker-client \
- docker-client-latest \
- docker-common \
- docker-latest \
- docker-latest-logrotate \
- docker-logrotate \
- docker-selinux \
- docker-engine-selinux \
- docker-engine
- }
-
- docker_install_and_configure () {
- dnf -y install dnf-plugins-core
- dnf config-manager \
- --add-repo \
- https://download.docker.com/linux/fedora/docker-ce.repo
- dnf -y install docker-ce
- systemctl start docker
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/override.conf<< EOF
- [Service]
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- }
- docker_compose_install () {
- curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- printf "{\n "simulatorParams": {\n "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7",\n "testDuration": "10",\n "messageInterval": "1"\n },\n "commonEventHeaderParams": {\n "eventName": "pnfRegistration_Nokia_5gDu",\n "nfNamingCode": "gNB",\n "nfcNamingCode": "oam",\n "sourceName": "NOK6061ZW3",\n "sourceId": "val13",\n "reportingEntityName": "NOK6061ZW3"\n },\n "pnfRegistrationParams": {\n "serialNumber": "6061ZW3",\n "vendorName": "Nokia",\n "oamV4IpAddress": "val3",\n "oamV6IpAddress": "val4",\n "unitFamily": "BBU",\n "modelNumber": "val6",\n "softwareVersion": "val7",\n "unitType": "val8"\n }\n}" > integration/test/mocks/pnfsimulator/config/config.json
- }
-
- start_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator
- ./simulator.sh start
- }
-
- set_versions
- enable_root_ssh
- update_os
- docker_remove
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_simulator
- params:
- $proxy: { get_param: proxy }
- PNF_PnP_simualtor_port0:
- type: OS::Neutron::Port
- properties:
- network_id: { get_param: private_net_id }
- security_groups:
- - default
- fixed_ips:
- - subnet_id: { get_param: private_subnet_id }
- PNF_PnP_simualtor_public:
- type: OS::Neutron::FloatingIP
- properties:
- floating_network_id: { get_param: public_net_id }
- port_id: { get_resource: PNF_PnP_simualtor_port0 }
-outputs:
- PNF_PnP_simualtor_private_ip:
- description: IP address of PNF_PnP_simualtor in private network
- value: { get_attr: [ PNF_PnP_simualtor, first_address ] }
- PNF_PnP_simualtor_public_ip:
- description: Floating IP address of PNF_PnP_simualtor in public network
- value: { get_attr: [ PNF_PnP_simualtor_public, floating_ip_address ] } \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml
deleted file mode 100644
index 5dd8e6d58..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml
+++ /dev/null
@@ -1,165 +0,0 @@
-description: Heat template that deploys PnP PNF simulator
-heat_template_version: '2013-05-23'
-outputs:
- PNF_PnP_simualtor_private_ip:
- description: IP address of PNF_PnP_simualtor in private network
- value:
- get_attr: [PNF_PnP_simualtor, first_address]
- PNF_PnP_simualtor_public_ip:
- description: Floating IP address of PNF_PnP_simualtor in public network
- value:
- get_attr: [PNF_PnP_simualtor_public, floating_ip_address]
-parameters:
- flavor_name:
- description: Type of instance (flavor) to be used
- label: Flavor
- type: string
- image_name:
- description: Ubuntu 16.04 image to be used
- label: Image name or ID
- type: string
- key_name:
- description: Public/Private key pair name
- label: Key pair name
- type: string
- private_net_id:
- description: Private network id
- label: Private network name or ID
- type: string
- private_subnet_id:
- description: Private subnet id
- label: Private subnetwork name or ID
- type: string
- public_net_id:
- description: Public network that enables remote connection to VNF
- label: Public network name or ID
- type: string
- security_group:
- default: default
- description: Security group to be used
- label: Security Groups
- type: string
- proxy:
- default: ''
- description: Proxy
- label: Proxy
- type: string
- correlation_id:
- default: 'someId'
- description: Correlation ID
- label: Correlation ID
- type: string
- VES-HOST:
- default: 'VES-HOST'
- description: VES collector host ip
- label: VES ip
- type: string
- VES-PORT:
- default: 'VES-PORT'
- description: VES collector host port
- label: VES port
- type: string
-resources:
- PNF_PnP_simualtor:
- properties:
- flavor:
- get_param: flavor_name
- image:
- get_param: image_name
- key_name:
- get_param: key_name
- networks:
- - port:
- get_resource: PNF_PnP_simualtor_port0
- user_data:
- str_replace:
- params:
- $proxy:
- get_param: proxy
- $VES-PORT:
- get_param: VES-PORT
- $VES-HOST:
- get_param: VES-HOST
- $correlation_id:
- get_param: correlation_id
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_VERSION=17.03
- DOCKER_COMPOSE_VERSION=1.22.0
- PROTOBUF_VERSION=3.6.1
- }
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "onap\nonap" | passwd root
- }
-
- update_os () {
- rm -rf /var/lib/apt/lists/*
- apt-get clean
- apt-get update
- }
-
- docker_install_and_configure () {
- curl "https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh" | sh
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
- [Service]
- ExecStart=
- ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10003
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- apt-mark hold docker-ce
- docker login -u docker -p docker nexus3.onap.org:10003
- }
-
- docker_compose_install () {
- curl -L "https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
-
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- printf "{\n \"simulatorParams\": {\n \"vesServerUrl\": \"http://$VES-HOST:$VES-PORT/eventListener/v7\",\n \"testDuration\": \"10\",\n \"messageInterval\": \"1\"\n },\n \"commonEventHeaderParams\": {\n \"eventName\": \"pnfRegistration_Nokia_5gDu\",\n \"nfNamingCode\": \"gNB\",\n \"nfcNamingCode\": \"oam\",\n \"sourceName\": \"$correlation_id\",\n \"sourceId\": \"val13\",\n \"reportingEntityName\": \"NOK6061ZW3\"\n },\n \"pnfRegistrationParams\": {\n \"serialNumber\": \"6061ZW3\",\n \"vendorName\": \"Nokia\",\n \"oamV4IpAddress\": \"val3\",\n \"oamV6IpAddress\": \"val4\",\n \"unitFamily\": \"BBU\",\n \"modelNumber\": \"val6\",\n \"softwareVersion\": \"val7\",\n \"unitType\": \"val8\"\n }\n}" > integration/test/mocks/pnfsimulator/config/config.json
- }
-
- start_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator
- ./simulator.sh start
- }
-
- set_versions
- enable_root_ssh
- update_os
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_simulator
-
- type: OS::Nova::Server
- PNF_PnP_simualtor_port0:
- properties:
- fixed_ips:
- - subnet_id:
- get_param: private_subnet_id
- network_id:
- get_param: private_net_id
- security_groups:
- - get_param: security_group
- type: OS::Neutron::Port
- PNF_PnP_simualtor_public:
- properties:
- floating_network_id:
- get_param: public_net_id
- port_id:
- get_resource: PNF_PnP_simualtor_port0
- type: OS::Neutron::FloatingIP
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml
deleted file mode 100644
index d2c529033..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-version: '2'
-networks:
- front-${I}:
- driver: bridge
- ipam:
- config:
- - gateway: "${IPGW}"
- subnet: "${IPSUBNET}/28"
- driver: default
-services:
- pnf-simulator-${I}:
- container_name: pnf-simulator-${I}
- image: nexus3.onap.org:10003/onap/masspnf-simulator:1.0.0-SNAPSHOT
- networks:
- front-${I}:
- ipv4_address: "${IPPNFSIM}"
- volumes:
- - ./logs:/var/log
- - ./json_schema:/json_schema
- - ./config/config.yml:/config/config.yml:rw
- - ./files/:/files/:rw
- environment:
- TZ: "${TIMEZONE}"
- env_file:
- - ./config/netconf.env
- restart: on-failure
-
- sftp-server-${I}:
- container_name: sftp-server-$I
- image: atmoz/sftp:alpine
- networks:
- front-${I}:
- ipv4_address: "${IPSFTP}"
- ports:
- - "${PORTSFTP}:22"
- volumes:
- - ./files/onap/:/home/onap/
- restart: on-failure
- command: onap:pano:1001
-
- ftpes-server-vsftpd-${I}:
- container_name: ftpes-server-vsftpd-$I
- image: docker.io/panubo/vsftpd
- networks:
- front-${I}:
- ipv4_address: "${IPFTPS}"
- ports:
- - "${PORTFTPS}:21"
- - "${FTPS_PASV_MIN}-${FTPS_PASV_MAX}:${FTPS_PASV_MIN}-${FTPS_PASV_MAX}"
- environment:
- FTP_USER: onap
- FTP_PASSWORD: pano
- PASV_ADDRESS: localhost
- PASV_MIN_PORT: ${FTPS_PASV_MIN}
- PASV_MAX_PORT: ${FTPS_PASV_MAX}
- volumes:
- - ./tls/ftp.crt:/etc/ssl/private/ftp.crt:ro
- - ./tls/ftp.key:/etc/ssl/private/ftp.key:ro
- - ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
- - ./config/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
- - ./files/onap/:/srv/
- restart: on-failure
- command: vsftpd /etc/vsftpd_ssl.conf
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json
deleted file mode 100644
index 7b38e05d6..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "type": "object",
- "properties": {
- "simulatorParams": {
- "type": "object",
- "properties": {
- "testDuration": {
- "type": "string"
- },
- "messageInterval": {
- "type": "string"
- }
- },
- "required": [
- "testDuration",
- "messageInterval"
- ]
- },
- "commonEventHeaderParams": {
- "type": "object",
- "properties": {
- "eventName": {
- "type": "string"
- },
- "nfNamingCode": {
- "type": "string"
- },
- "nfcNamingCode": {
- "type": "string"
- }
- },
- "required": [
- "eventName"
- ]
- },
-
-
- "pnfRegistrationParams": {
- "type": "object",
- "properties": {
- "serialNumber": {
- "type": "string"
- },
- "vendorName": {
- "type": "string"
- },
- "oamV4IpAddress": {
- "type": "string"
- },
- "oamV6IpAddress": {
- "type": "string"
- },
- "unitFamily": {
- "type": "string"
- },
- "modelNumber": {
- "type": "string"
- },
- "softwareVersion": {
- "type": "string"
- },
- "unitType": {
- "type": "string"
- }
- },
- "required": [
- "serialNumber",
- "vendorName",
- "oamV4IpAddress",
- "oamV6IpAddress",
- "unitFamily",
- "modelNumber",
- "softwareVersion",
- "unitType"
- ]
- },
- "notificationParams": {
- "type": "object",
- "properties": {
- "changeIdentifier": {
- "type": "string"
- },
- "changeType": {
- "type": "string"
- }
- },
- "required": [
- "changeIdentifier",
- "changeType"
- ]
- }
- },
-
- "oneOf": [
- {
- "required": [
- "simulatorParams",
- "commonEventHeaderParams",
- "pnfRegistrationParams"
- ]
- },
- {
- "required": [
- "simulatorParams",
- "commonEventHeaderParams",
- "notificationParams"
- ]
- }
- ]
-
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json
deleted file mode 100644
index 76a661ca7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json
+++ /dev/null
@@ -1,2432 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "title": "VES Event Listener Common Event Format",
- "type": "object",
- "properties": {
- "event": {"$ref": "#/definitions/event"},
- "eventList": {"$ref": "#/definitions/eventList"}
- },
-
- "definitions": {
- "schemaHeaderBlock": {
- "description": "schema date, version, author and associated API",
- "type": "object",
- "properties": {
- "associatedApi": {
- "description": "VES Event Listener",
- "type": "string"
- },
- "lastUpdatedBy": {
- "description": "re2947",
- "type": "string"
- },
- "schemaDate": {
- "description": "July 31, 2018",
- "type": "string"
- },
- "schemaVersion": {
- "description": "30.0.1",
- "type": "number"
- }
- }
- },
- "schemaLicenseAndCopyrightNotice": {
- "description": "Copyright (c) 2018, AT&T Intellectual Property. All rights reserved",
- "type": "object",
- "properties": {
- "apacheLicense2.0": {
- "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:",
- "type": "string"
- },
- "licenseUrl": {
- "description": "http://www.apache.org/licenses/LICENSE-2.0",
- "type": "string"
- },
- "asIsClause": {
- "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
- "type": "string"
- },
- "permissionsAndLimitations": {
- "description": "See the License for the specific language governing permissions and limitations under the License.",
- "type": "string"
- }
- }
- },
- "arrayOfJsonObject": {
- "description": "array of json objects described by name, schema and other meta-information",
- "type": "array",
- "items": {
- "$ref": "#/definitions/jsonObject"
- }
- },
- "arrayOfNamedHashMap": {
- "description": "array of named hashMaps",
- "type": "array",
- "items": {
- "$ref": "#/definitions/namedHashMap"
- }
- },
- "codecsInUse": {
- "description": "number of times an identified codec was used over the measurementInterval",
- "type": "object",
- "properties": {
- "codecIdentifier": { "type": "string" },
- "numberInUse": { "type": "integer" }
- },
- "additionalProperties": false,
- "required": [ "codecIdentifier", "numberInUse" ]
- },
- "commonEventHeader": {
- "description": "fields common to all events",
- "type": "object",
- "properties": {
- "domain": {
- "description": "the eventing domain associated with the event",
- "type": "string",
- "enum": [
- "fault",
- "heartbeat",
- "measurement",
- "mobileFlow",
- "notification",
- "other",
- "pnfRegistration",
- "sipSignaling",
- "stateChange",
- "syslog",
- "thresholdCrossingAlert",
- "voiceQuality"
- ]
- },
- "eventId": {
- "description": "event key that is unique to the event source",
- "type": "string"
- },
- "eventName": {
- "description": "unique event name",
- "type": "string"
- },
- "eventType": {
- "description": "for example - applicationNf, guestOS, hostOS, platform",
- "type": "string"
- },
- "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" },
- "lastEpochMicrosec": {
- "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "nfcNamingCode": {
- "description": "3 character network function component type, aligned with vfc naming standards",
- "type": "string"
- },
- "nfNamingCode": {
- "description": "4 character network function type, aligned with nf naming standards",
- "type": "string"
- },
- "nfVendorName": {
- "description": "network function vendor name",
- "type": "string"
- },
- "priority": {
- "description": "processing priority",
- "type": "string",
- "enum": [
- "High",
- "Medium",
- "Normal",
- "Low"
- ]
- },
- "reportingEntityId": {
- "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "reportingEntityName": {
- "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName",
- "type": "string"
- },
- "sequence": {
- "description": "ordering of events communicated by an event source instance or 0 if not needed",
- "type": "integer"
- },
- "sourceId": {
- "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "sourceName": {
- "description": "name of the entity experiencing the event issue",
- "type": "string"
- },
- "startEpochMicrosec": {
- "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "timeZoneOffset": {
- "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm",
- "type": "string"
- },
- "version": {
- "description": "version of the event header",
- "type": "string",
- "enum": [ "4.0.1" ]
- },
- "vesEventListenerVersion": {
- "description": "version of the VES Event Listener API",
- "type": "string",
- "enum": [ "7.0.1" ]
- }
- },
- "additionalProperties": false,
- "required": [ "domain", "eventId", "eventName", "lastEpochMicrosec",
- "priority", "reportingEntityName", "sequence", "sourceName",
- "startEpochMicrosec", "version", "vesEventListenerVersion" ]
- },
- "counter": {
- "description": "performance counter",
- "type": "object",
- "properties": {
- "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] },
- "hashMap": { "$ref": "#/definitions/hashMap" },
- "thresholdCrossed": { "type": "string" }
- },
- "additionalProperties": false,
- "required": [ "criticality", "hashMap", "thresholdCrossed" ]
- },
- "cpuUsage": {
- "description": "usage of an identified CPU",
- "type": "object",
- "properties": {
- "cpuCapacityContention": {
- "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuDemandAvg": {
- "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuDemandMhz": {
- "description": "CPU demand in megahertz",
- "type": "number"
- },
- "cpuDemandPct": {
- "description": "CPU demand as a percentage of the provisioned capacity",
- "type": "number"
- },
- "cpuIdentifier": {
- "description": "cpu identifer",
- "type": "string"
- },
- "cpuIdle": {
- "description": "percentage of CPU time spent in the idle task",
- "type": "number"
- },
- "cpuLatencyAvg": {
- "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs",
- "type": "number"
- },
- "cpuOverheadAvg": {
- "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuSwapWaitTime": {
- "description": "swap wait time. in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuUsageInterrupt": {
- "description": "percentage of time spent servicing interrupts",
- "type": "number"
- },
- "cpuUsageNice": {
- "description": "percentage of time spent running user space processes that have been niced",
- "type": "number"
- },
- "cpuUsageSoftIrq": {
- "description": "percentage of time spent handling soft irq interrupts",
- "type": "number"
- },
- "cpuUsageSteal": {
- "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing",
- "type": "number"
- },
- "cpuUsageSystem": {
- "description": "percentage of time spent on system tasks running the kernel",
- "type": "number"
- },
- "cpuUsageUser": {
- "description": "percentage of time spent running un-niced user space processes",
- "type": "number"
- },
- "cpuWait": {
- "description": "percentage of CPU time spent waiting for I/O operations to complete",
- "type": "number"
- },
- "percentUsage": {
- "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "cpuIdentifier", "percentUsage" ]
- },
- "diskUsage": {
- "description": "usage of an identified disk",
- "type": "object",
- "properties": {
- "diskBusResets": {
- "description": "number of bus resets over the measurementInterval",
- "type": "number"
- },
- "diskCommandsAborted": {
- "description": "number of disk commands aborted over the measurementInterval",
- "type": "number"
- },
- "diskCommandsAvg": {
- "description": "average number of commands per second over the measurementInterval",
- "type": "number"
- },
- "diskFlushRequests": {
- "description": "total flush requests of the disk cache over the measurementInterval",
- "type": "number"
- },
- "diskFlushTime": {
- "description": "milliseconds spent on disk cache flushing over the measurementInterval",
- "type": "number"
- },
- "diskIdentifier": {
- "description": "disk identifier",
- "type": "string"
- },
- "diskIoTimeAvg": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval",
- "type": "number"
- },
- "diskIoTimeLast": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskIoTimeMax": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskIoTimeMin": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadAvg": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadLast": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadMax": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadMin": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteAvg": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteLast": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteMax": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteMin": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadAvg": {
- "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadLast": {
- "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadMax": {
- "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadMin": {
- "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteAvg": {
- "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteLast": {
- "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteMax": {
- "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteMin": {
- "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadAvg": {
- "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadLast": {
- "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadMax": {
- "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadMin": {
- "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteAvg": {
- "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteLast": {
- "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteMax": {
- "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteMin": {
- "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsAvg": {
- "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsLast": {
- "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsMax": {
- "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsMin": {
- "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskReadCommandsAvg": {
- "description": "average number of read commands issued per second to the disk over the measurementInterval",
- "type": "number"
- },
- "diskTime": {
- "description": "nanoseconds spent on disk cache reads/writes within the measurement interval",
- "type": "number"
- },
- "diskTimeReadAvg": {
- "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadLast": {
- "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadMax": {
- "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadMin": {
- "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteAvg": {
- "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteLast": {
- "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteMax": {
- "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteMin": {
- "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskTotalReadLatencyAvg": {
- "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval",
- "type": "number"
- },
- "diskTotalWriteLatencyAvg": {
- "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval",
- "type": "number"
- },
- "diskWeightedIoTimeAvg": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeLast": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeMax": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeMin": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval",
- "type": "number"
- },
- "diskWriteCommandsAvg": {
- "description": "average number of write commands issued per second to the disk over the measurementInterval",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "diskIdentifier" ]
- },
- "endOfCallVqmSummaries": {
- "description": "provides end of call voice quality metrics",
- "type": "object",
- "properties": {
- "adjacencyName": {
- "description": " adjacency name",
- "type": "string"
- },
- "endpointAverageJitter": {
- "description": "endpoint average jitter",
- "type": "number"
- },
- "endpointDescription": {
- "description": "either Caller or Callee",
- "type": "string",
- "enum": ["Caller", "Callee"]
- },
- "endpointMaxJitter": {
- "description": "endpoint maximum jitter",
- "type": "number"
- },
- "endpointRtpOctetsDiscarded": {
- "description": "",
- "type": "number"
- },
- "endpointRtpOctetsLost": {
- "description": "endpoint RTP octets lost",
- "type": "number"
- },
- "endpointRtpOctetsReceived": {
- "description": "",
- "type": "number"
- },
- "endpointRtpOctetsSent": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsDiscarded": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsLost": {
- "description": "endpoint RTP packets lost",
- "type": "number"
- },
- "endpointRtpPacketsReceived": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsSent": {
- "description": "",
- "type": "number"
- },
- "localAverageJitter": {
- "description": "Local average jitter",
- "type": "number"
- },
- "localAverageJitterBufferDelay": {
- "description": "Local average jitter delay",
- "type": "number"
- },
- "localMaxJitter": {
- "description": "Local maximum jitter",
- "type": "number"
- },
- "localMaxJitterBufferDelay": {
- "description": "Local maximum jitter delay",
- "type": "number"
- },
- "localRtpOctetsDiscarded": {
- "description": "",
- "type": "number"
- },
- "localRtpOctetsLost": {
- "description": "Local RTP octets lost",
- "type": "number"
- },
- "localRtpOctetsReceived": {
- "description": "",
- "type": "number"
- },
- "localRtpOctetsSent": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsDiscarded": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsLost": {
- "description": "Local RTP packets lost",
- "type": "number"
- },
- "localRtpPacketsReceived": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsSent": {
- "description": "",
- "type": "number"
- },
- "mosCqe": {
- "description": "1-5 1dp",
- "type": "number"
- },
- "oneWayDelay": {
- "description": "one-way path delay in milliseconds",
- "type": "number"
- },
- "packetLossPercent": {
- "description" : "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)",
- "type": "number"
- },
- "rFactor": {
- "description": "0-100",
- "type": "number"
- },
- "roundTripDelay": {
- "description": "millisecs",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "adjacencyName", "endpointDescription" ]
- },
- "event": {
- "description": "the root level of the common event format",
- "type": "object",
- "properties": {
- "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" },
- "faultFields": { "$ref": "#/definitions/faultFields" },
- "heartbeatFields": { "$ref": "#/definitions/heartbeatFields" },
- "measurementFields": { "$ref": "#/definitions/measurementFields" },
- "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" },
- "notificationFields": { "$ref": "#/definitions/notificationFields" },
- "otherFields": { "$ref": "#/definitions/otherFields" },
- "pnfRegistrationFields": { "$ref": "#/definitions/pnfRegistrationFields" },
- "sipSignalingFields": { "$ref": "#/definitions/sipSignalingFields" },
- "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" },
- "syslogFields": { "$ref": "#/definitions/syslogFields" },
- "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" },
- "voiceQualityFields": { "$ref": "#/definitions/voiceQualityFields" }
- },
- "additionalProperties": false,
- "required": [ "commonEventHeader" ]
- },
- "eventList": {
- "description": "array of events",
- "type": "array",
- "items": {
- "$ref": "#/definitions/event"
- }
- },
- "faultFields": {
- "description": "fields specific to fault events",
- "type": "object",
- "properties": {
- "alarmAdditionalInformation": { "$ref": "#/definitions/hashMap" },
- "alarmCondition": {
- "description": "alarm condition reported by the device",
- "type": "string"
- },
- "alarmInterfaceA": {
- "description": "card, port, channel or interface name of the device generating the alarm",
- "type": "string"
- },
- "eventCategory": {
- "description": "Event category, for example: license, link, routing, security, signaling",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventSourceType": {
- "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "faultFieldsVersion": {
- "description": "version of the faultFields block",
- "type": "string",
- "enum": [ "4.0" ]
- },
- "specificProblem": {
- "description": "short description of the alarm or problem",
- "type": "string"
- },
- "vfStatus": {
- "description": "virtual function status enumeration",
- "type": "string",
- "enum": [
- "Active",
- "Idle",
- "Preparing to terminate",
- "Ready to terminate",
- "Requesting termination"
- ]
- }
- },
- "additionalProperties": false,
- "required": [ "alarmCondition", "eventSeverity", "eventSourceType",
- "faultFieldsVersion", "specificProblem", "vfStatus" ]
- },
- "filesystemUsage": {
- "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second",
- "type": "object",
- "properties": {
- "blockConfigured": { "type": "number" },
- "blockIops": { "type": "number" },
- "blockUsed": { "type": "number" },
- "ephemeralConfigured": { "type": "number" },
- "ephemeralIops": { "type": "number" },
- "ephemeralUsed": { "type": "number" },
- "filesystemName": { "type": "string" }
- },
- "additionalProperties": false,
- "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured",
- "ephemeralIops", "ephemeralUsed", "filesystemName" ]
- },
- "gtpPerFlowMetrics": {
- "description": "Mobility GTP Protocol per flow metrics",
- "type": "object",
- "properties": {
- "avgBitErrorRate": {
- "description": "average bit error rate",
- "type": "number"
- },
- "avgPacketDelayVariation": {
- "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "avgPacketLatency": {
- "description": "average delivery latency",
- "type": "number"
- },
- "avgReceiveThroughput": {
- "description": "average receive throughput",
- "type": "number"
- },
- "avgTransmitThroughput": {
- "description": "average transmit throughput",
- "type": "number"
- },
- "durConnectionFailedStatus": {
- "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval",
- "type": "number"
- },
- "durTunnelFailedStatus": {
- "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval",
- "type": "number"
- },
- "flowActivatedBy": {
- "description": "Endpoint activating the flow",
- "type": "string"
- },
- "flowActivationEpoch": {
- "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available",
- "type": "number"
- },
- "flowActivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowActivationTime": {
- "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowDeactivatedBy": {
- "description": "Endpoint deactivating the flow",
- "type": "string"
- },
- "flowDeactivationEpoch": {
- "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time",
- "type": "number"
- },
- "flowDeactivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowDeactivationTime": {
- "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowStatus": {
- "description": "connection status at reporting time as a working / inactive / failed indicator value",
- "type": "string"
- },
- "gtpConnectionStatus": {
- "description": "Current connection state at reporting time",
- "type": "string"
- },
- "gtpTunnelStatus": {
- "description": "Current tunnel state at reporting time",
- "type": "string"
- },
- "ipTosCountList": { "$ref": "#/definitions/hashMap" },
- "ipTosList": {
- "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "largePacketRtt": {
- "description": "large packet round trip time",
- "type": "number"
- },
- "largePacketThreshold": {
- "description": "large packet threshold being applied",
- "type": "number"
- },
- "maxPacketDelayVariation": {
- "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "maxReceiveBitRate": {
- "description": "maximum receive bit rate",
- "type": "number"
- },
- "maxTransmitBitRate": {
- "description": "maximum transmit bit rate",
- "type": "number"
- },
- "mobileQciCosCountList": { "$ref": "#/definitions/hashMap" },
- "mobileQciCosList": {
- "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "numActivationFailures": {
- "description": "Number of failed activation requests, as observed by the reporting node",
- "type": "number"
- },
- "numBitErrors": {
- "description": "number of errored bits",
- "type": "number"
- },
- "numBytesReceived": {
- "description": "number of bytes received, including retransmissions",
- "type": "number"
- },
- "numBytesTransmitted": {
- "description": "number of bytes transmitted, including retransmissions",
- "type": "number"
- },
- "numDroppedPackets": {
- "description": "number of received packets dropped due to errors per virtual interface",
- "type": "number"
- },
- "numGtpEchoFailures": {
- "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2",
- "type": "number"
- },
- "numGtpTunnelErrors": {
- "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1",
- "type": "number"
- },
- "numHttpErrors": {
- "description": "Http error count",
- "type": "number"
- },
- "numL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, including retransmissions",
- "type": "number"
- },
- "numL7BytesTransmitted": {
- "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions",
- "type": "number"
- },
- "numLostPackets": {
- "description": "number of lost packets",
- "type": "number"
- },
- "numOutOfOrderPackets": {
- "description": "number of out-of-order packets",
- "type": "number"
- },
- "numPacketErrors": {
- "description": "number of errored packets",
- "type": "number"
- },
- "numPacketsReceivedExclRetrans": {
- "description": "number of packets received, excluding retransmission",
- "type": "number"
- },
- "numPacketsReceivedInclRetrans": {
- "description": "number of packets received, including retransmission",
- "type": "number"
- },
- "numPacketsTransmittedInclRetrans": {
- "description": "number of packets transmitted, including retransmissions",
- "type": "number"
- },
- "numRetries": {
- "description": "number of packet retries",
- "type": "number"
- },
- "numTimeouts": {
- "description": "number of packet timeouts",
- "type": "number"
- },
- "numTunneledL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, excluding retransmissions",
- "type": "number"
- },
- "roundTripTime": {
- "description": "round trip time",
- "type": "number"
- },
- "tcpFlagCountList": { "$ref": "#/definitions/hashMap" },
- "tcpFlagList": {
- "description": "Array of unique TCP Flags observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "timeToFirstByte": {
- "description": "Time in milliseconds between the connection activation and first byte received",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency",
- "avgReceiveThroughput", "avgTransmitThroughput",
- "flowActivationEpoch", "flowActivationMicrosec",
- "flowDeactivationEpoch", "flowDeactivationMicrosec",
- "flowDeactivationTime", "flowStatus",
- "maxPacketDelayVariation", "numActivationFailures",
- "numBitErrors", "numBytesReceived", "numBytesTransmitted",
- "numDroppedPackets", "numL7BytesReceived",
- "numL7BytesTransmitted", "numLostPackets",
- "numOutOfOrderPackets", "numPacketErrors",
- "numPacketsReceivedExclRetrans",
- "numPacketsReceivedInclRetrans",
- "numPacketsTransmittedInclRetrans",
- "numRetries", "numTimeouts", "numTunneledL7BytesReceived",
- "roundTripTime", "timeToFirstByte"
- ]
- },
- "hashMap": {
- "description": "an associative array which is an array of key:value pairs",
- "type": "object",
- "additionalProperties": { "type": "string" },
- "default": {}
- },
- "heartbeatFields": {
- "description": "optional field block for fields specific to heartbeat events",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "heartbeatFieldsVersion": {
- "description": "version of the heartbeatFields block",
- "type": "string",
- "enum": [ "3.0" ]
- },
- "heartbeatInterval": {
- "description": "current heartbeat interval in seconds",
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [ "heartbeatFieldsVersion", "heartbeatInterval" ]
- },
- "hugePages": {
- "description": "metrics on system hugepages",
- "type": "object",
- "properties": {
- "bytesFree": {
- "description": "number of free hugepages in bytes",
- "type": "number"
- },
- "bytesUsed": {
- "description": "number of used hugepages in bytes",
- "type": "number"
- },
- "hugePagesIdentifier": {
- "description": "hugePages identifier",
- "type": "number"
- },
- "percentFree": {
- "description": "number of free hugepages in percent",
- "type": "number"
- },
- "percentUsed": {
- "description": "number of free hugepages in percent",
- "type": "number"
- },
- "vmPageNumberFree": {
- "description": "number of free vmPages in numbers",
- "type": "number"
- },
- "vmPageNumberUsed": {
- "description": "number of used vmPages in numbers",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "hugePagesIdentifier" ]
- },
- "internalHeaderFields": {
- "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources",
- "type": "object"
- },
- "ipmi": {
- "description": "intelligent platform management interface metrics",
- "type": "object",
- "properties": {
- "exitAirTemperature": {
- "description": "system fan exit air flow temperature in celsius",
- "type": "number"
- },
- "frontPanelTemperature": {
- "description": "front panel temperature in celsius",
- "type": "number"
- },
- "ioModuleTemperature": {
- "description": "io module temperature in celsius",
- "type": "number"
- },
- "ipmiBaseboardTemperatureArray": {
- "description": "array of ipmiBaseboardTemperature objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBaseboardTemperature"
- }
- },
- "ipmiBaseboardVoltageRegulatorArray": {
- "description": "array of ipmiBaseboardVoltageRegulator objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBaseboardVoltageRegulator"
- }
- },
- "ipmiBatteryArray": {
- "description": "array of ipmiBattery objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBattery"
- }
- },
- "ipmiFanArray": {
- "description": "array of ipmiFan objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiFan"
- }
- },
- "ipmiHsbpArray": {
- "description": "array of ipmiHsbp objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiHsbp"
- }
- },
- "ipmiGlobalAggregateTemperatureMarginArray": {
- "description": "array of ipmiGlobalAggregateTemperatureMargin objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin"
- }
- },
- "ipmiNicArray": {
- "description": "array of ipmiNic objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiNic"
- }
- },
- "ipmiPowerSupplyArray": {
- "description": "array of ipmiPowerSupply objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiPowerSupply"
- }
- },
- "ipmiProcessorArray": {
- "description": "array of ipmiProcessor objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiProcessor"
- }
- },
- "systemAirflow": {
- "description": "airfflow in cubic feet per minute (cfm)",
- "type": "number"
- }
- },
- "additionalProperties": false
- },
- "ipmiBaseboardTemperature": {
- "description": "intelligent platform management interface (ipmi) baseboard temperature metrics",
- "type": "object",
- "properties": {
- "baseboardTemperatureIdentifier": {
- "description": "identifier for the location where the temperature is taken",
- "type": "string"
- },
- "baseboardTemperature": {
- "description": "baseboard temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "baseboardTemperatureIdentifier" ]
- },
- "ipmiBaseboardVoltageRegulator": {
- "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics",
- "type": "object",
- "properties": {
- "baseboardVoltageRegulatorIdentifier": {
- "description": "identifier for the baseboard voltage regulator",
- "type": "string"
- },
- "voltageRegulatorTemperature": {
- "description": "voltage regulator temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "baseboardVoltageRegulatorIdentifier" ]
- },
- "ipmiBattery": {
- "description": "intelligent platform management interface (ipmi) battery metrics",
- "type": "object",
- "properties": {
- "batteryIdentifier": {
- "description": "identifier for the battery",
- "type": "string"
- },
- "batteryType": {
- "description": "type of battery",
- "type": "string"
- },
- "batteryVoltageLevel": {
- "description": "battery voltage level",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "batteryIdentifier" ]
- },
- "ipmiFan": {
- "description": "intelligent platform management interface (ipmi) fan metrics",
- "type": "object",
- "properties": {
- "fanIdentifier": {
- "description": "identifier for the fan",
- "type": "string"
- },
- "fanSpeed": {
- "description": "fan speed in revolutions per minute (rpm)",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "fanIdentifier" ]
- },
- "ipmiGlobalAggregateTemperatureMargin": {
- "description": "intelligent platform management interface (ipmi) global aggregate temperature margin",
- "type": "object",
- "properties": {
- "ipmiGlobalAggregateTemperatureMarginIdentifier": {
- "description": "identifier for the ipmi global aggregate temperature margin metrics",
- "type": "string"
- },
- "globalAggregateTemperatureMargin": {
- "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "ipmiGlobalAggregateTemperatureMarginIdentifier", "globalAggregateTemperatureMargin" ]
- },
- "ipmiHsbp": {
- "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics",
- "type": "object",
- "properties": {
- "hsbpIdentifier": {
- "description": "identifier for the hot swap backplane power unit",
- "type": "string"
- },
- "hsbpTemperature": {
- "description": "hot swap backplane power temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "hsbpIdentifier" ]
- },
- "ipmiNic": {
- "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics",
- "type": "object",
- "properties": {
- "nicIdentifier": {
- "description": "identifier for the network interface control card",
- "type": "string"
- },
- "nicTemperature": {
- "description": "nic temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "nicIdentifier" ]
- },
- "ipmiPowerSupply": {
- "description": "intelligent platform management interface (ipmi) power supply metrics",
- "type": "object",
- "properties": {
- "powerSupplyIdentifier": {
- "description": "identifier for the power supply",
- "type": "string"
- },
- "powerSupplyInputPower": {
- "description": "input power in watts",
- "type": "number"
- },
- "powerSupplyCurrentOutputPercent": {
- "description": "current output voltage as a percentage of the design specified level",
- "type": "number"
- },
- "powerSupplyTemperature": {
- "description": "power supply temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "powerSupplyIdentifier" ]
- },
- "ipmiProcessor": {
- "description": "intelligent platform management interface processor metrics",
- "type": "object",
- "properties": {
- "processorIdentifier": {
- "description": "identifier for an ipmi processor",
- "type": "string"
- },
- "processorThermalControlPercent": {
- "description": "io module temperature in celsius",
- "type": "number"
- },
- "processorDtsThermalMargin": {
- "description": "front panel temperature in celsius",
- "type": "number"
- },
- "processorDimmAggregateThermalMarginArray": {
- "description": "array of processorDimmAggregateThermalMargin objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/processorDimmAggregateThermalMargin"
- }
- }
- },
- "additionalProperties": false,
- "required": [ "processorIdentifier" ]
- },
- "jsonObject": {
- "description": "json object schema, name and other meta-information along with one or more object instances",
- "type": "object",
- "properties": {
- "objectInstances": {
- "description": "one or more instances of the jsonObject",
- "type": "array",
- "items": {
- "$ref": "#/definitions/jsonObjectInstance"
- }
- },
- "objectName": {
- "description": "name of the JSON Object",
- "type": "string"
- },
- "objectSchema": {
- "description": "json schema for the object",
- "type": "string"
- },
- "objectSchemaUrl": {
- "description": "Url to the json schema for the object",
- "type": "string"
- },
- "nfSubscribedObjectName": {
- "description": "name of the object associated with the nfSubscriptonId",
- "type": "string"
- },
- "nfSubscriptionId": {
- "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "objectInstances", "objectName" ]
- },
- "jsonObjectInstance": {
- "description": "meta-information about an instance of a jsonObject along with the actual object instance",
- "type": "object",
- "properties": {
- "jsonObject": { "$ref": "#/definitions/jsonObject" },
- "objectInstance": {
- "description": "an instance conforming to the jsonObject objectSchema",
- "type": "object"
- },
- "objectInstanceEpochMicrosec": {
- "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "objectKeys": {
- "description": "an ordered set of keys that identifies this particular instance of jsonObject",
- "type": "array",
- "items": {
- "$ref": "#/definitions/key"
- }
- }
- },
- "additionalProperties": false
- },
- "key": {
- "description": "tuple which provides the name of a key along with its value and relative order",
- "type": "object",
- "properties": {
- "keyName": {
- "description": "name of the key",
- "type": "string"
- },
- "keyOrder": {
- "description": "relative sequence or order of the key with respect to other keys",
- "type": "integer"
- },
- "keyValue": {
- "description": "value of the key",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "keyName" ]
- },
- "latencyBucketMeasure": {
- "description": "number of counts falling within a defined latency bucket",
- "type": "object",
- "properties": {
- "countsInTheBucket": { "type": "number" },
- "highEndOfLatencyBucket": { "type": "number" },
- "lowEndOfLatencyBucket": { "type": "number" }
- },
- "additionalProperties": false,
- "required": [ "countsInTheBucket" ]
- },
- "load": {
- "description": "/proc/loadavg cpu utilization and io utilization metrics",
- "type": "object",
- "properties": {
- "longTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg",
- "type": "number"
- },
- "midTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg",
- "type": "number"
- },
- "shortTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg",
- "type": "number"
- }
- },
- "additionalProperties": false
- },
- "machineCheckException": {
- "description": "metrics on vm machine check exceptions",
- "type": "object",
- "properties": {
- "correctedMemoryErrors": {
- "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via  ECC) over the measurementInterval",
- "type": "number"
- },
- "correctedMemoryErrorsIn1Hr": {
- "description": "total hardware errors that were corrected by the hardware over the last one hour",
- "type": "number"
- },
- "uncorrectedMemoryErrors": {
- "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval",
- "type": "number"
- },
- "uncorrectedMemoryErrorsIn1Hr": {
- "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour",
- "type": "number"
- },
- "vmIdentifier": {
- "description": "virtual machine identifier associated with the machine check exception",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "vmIdentifier" ]
- },
- "measurementFields": {
- "description": "measurement fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "additionalMeasurements": {"$ref": "#/definitions/arrayOfNamedHashMap"},
- "additionalObjects": {"$ref": "#/definitions/arrayOfJsonObject"},
- "codecUsageArray": {
- "description": "array of codecs in use",
- "type": "array",
- "items": {
- "$ref": "#/definitions/codecsInUse"
- }
- },
- "concurrentSessions": {
- "description": "peak concurrent sessions for the VM or xNF over the measurementInterval",
- "type": "integer"
- },
- "configuredEntities": {
- "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF",
- "type": "integer"
- },
- "cpuUsageArray": {
- "description": "usage of an array of CPUs",
- "type": "array",
- "items": {
- "$ref": "#/definitions/cpuUsage"
- }
- },
- "diskUsageArray": {
- "description": "usage of an array of disks",
- "type": "array",
- "items": {
- "$ref": "#/definitions/diskUsage"
- }
- },
- "featureUsageArray": { "$ref": "#/definitions/hashMap" },
- "filesystemUsageArray": {
- "description": "filesystem usage of the VM on which the xNFC reporting the event is running",
- "type": "array",
- "items": {
- "$ref": "#/definitions/filesystemUsage"
- }
- },
- "hugePagesArray": {
- "description": "array of metrics on hugepPages",
- "type": "array",
- "items": {
- "$ref": "#/definitions/hugePages"
- }
- },
- "ipmi": { "$ref": "#/definitions/ipmi" },
- "latencyDistribution": {
- "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges",
- "type": "array",
- "items": {
- "$ref": "#/definitions/latencyBucketMeasure"
- }
- },
- "loadArray": {
- "description": "array of system load metrics",
- "type": "array",
- "items": {
- "$ref": "#/definitions/load"
- }
- },
- "machineCheckExceptionArray": {
- "description": "array of machine check exceptions",
- "type": "array",
- "items": {
- "$ref": "#/definitions/machineCheckException"
- }
- },
- "meanRequestLatency": {
- "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running",
- "type": "number"
- },
- "measurementInterval": {
- "description": "interval over which measurements are being reported in seconds",
- "type": "number"
- },
- "measurementFieldsVersion": {
- "description": "version of the measurementFields block",
- "type": "string",
- "enum": [ "4.0" ]
- },
- "memoryUsageArray": {
- "description": "memory usage of an array of VMs",
- "type": "array",
- "items": {
- "$ref": "#/definitions/memoryUsage"
- }
- },
- "numberOfMediaPortsInUse": {
- "description": "number of media ports in use",
- "type": "integer"
- },
- "requestRate": {
- "description": "peak rate of service requests per second to the xNF over the measurementInterval",
- "type": "number"
- },
- "nfcScalingMetric": {
- "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC",
- "type": "integer"
- },
- "nicPerformanceArray": {
- "description": "usage of an array of network interface cards",
- "type": "array",
- "items": {
- "$ref": "#/definitions/nicPerformance"
- }
- },
- "processStatsArray": {
- "description": "array of metrics on system processes",
- "type": "array",
- "items": {
- "$ref": "#/definitions/processStats"
- }
- }
- },
- "additionalProperties": false,
- "required": [ "measurementInterval", "measurementFieldsVersion" ]
- },
- "memoryUsage": {
- "description": "memory usage of an identified virtual machine",
- "type": "object",
- "properties": {
- "memoryBuffered": {
- "description": "kibibytes of temporary storage for raw disk blocks",
- "type": "number"
- },
- "memoryCached": {
- "description": "kibibytes of memory used for cache",
- "type": "number"
- },
- "memoryConfigured": {
- "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running",
- "type": "number"
- },
- "memoryDemand": {
- "description": "host demand in kibibytes",
- "type": "number"
- },
- "memoryFree": {
- "description": "kibibytes of physical RAM left unused by the system",
- "type": "number"
- },
- "memoryLatencyAvg": {
- "description": "Percentage of time the VM is waiting to access swapped or compressed memory",
- "type": "number"
- },
- "memorySharedAvg": {
- "description": "shared memory in kilobytes",
- "type": "number"
- },
- "memorySlabRecl": {
- "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes",
- "type": "number"
- },
- "memorySlabUnrecl": {
- "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes",
- "type": "number"
- },
- "memorySwapInAvg": {
- "description": "Amount of memory swapped-in from host cache in kibibytes",
- "type": "number"
- },
- "memorySwapInRateAvg": {
- "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second",
- "type": "number"
- },
- "memorySwapOutAvg": {
- "description": "Amount of memory swapped-out to host cache in kibibytes",
- "type": "number"
- },
- "memorySwapOutRateAvg": {
- "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second",
- "type": "number"
- },
- "memorySwapUsedAvg": {
- "description": "space used for caching swapped pages in the host cache in kibibytes",
- "type": "number"
- },
- "memoryUsed": {
- "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes",
- "type": "number"
- },
- "percentMemoryUsage": {
- "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "vmIdentifier": {
- "description": "virtual machine identifier associated with the memory metrics",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "memoryFree", "memoryUsed", "vmIdentifier" ]
- },
- "mobileFlowFields": {
- "description": "mobileFlow fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "applicationType": {
- "description": "Application type inferred",
- "type": "string"
- },
- "appProtocolType": {
- "description": "application protocol",
- "type": "string"
- },
- "appProtocolVersion": {
- "description": "application protocol version",
- "type": "string"
- },
- "cid": {
- "description": "cell id",
- "type": "string"
- },
- "connectionType": {
- "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc",
- "type": "string"
- },
- "ecgi": {
- "description": "Evolved Cell Global Id",
- "type": "string"
- },
- "flowDirection": {
- "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow",
- "type": "string"
- },
- "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" },
- "gtpProtocolType": {
- "description": "GTP protocol",
- "type": "string"
- },
- "gtpVersion": {
- "description": "GTP protocol version",
- "type": "string"
- },
- "httpHeader": {
- "description": "HTTP request header, if the flow connects to a node referenced by HTTP",
- "type": "string"
- },
- "imei": {
- "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "imsi": {
- "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "ipProtocolType": {
- "description": "IP protocol type e.g., TCP, UDP, RTP...",
- "type": "string"
- },
- "ipVersion": {
- "description": "IP protocol version e.g., IPv4, IPv6",
- "type": "string"
- },
- "lac": {
- "description": "location area code",
- "type": "string"
- },
- "mcc": {
- "description": "mobile country code",
- "type": "string"
- },
- "mnc": {
- "description": "mobile network code",
- "type": "string"
- },
- "mobileFlowFieldsVersion": {
- "description": "version of the mobileFlowFields block",
- "type": "string",
- "enum": [ "4.0" ]
- },
- "msisdn": {
- "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device",
- "type": "string"
- },
- "otherEndpointIpAddress": {
- "description": "IP address for the other endpoint, as used for the flow being reported on",
- "type": "string"
- },
- "otherEndpointPort": {
- "description": "IP Port for the reporting entity, as used for the flow being reported on",
- "type": "integer"
- },
- "otherFunctionalRole": {
- "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...",
- "type": "string"
- },
- "rac": {
- "description": "routing area code",
- "type": "string"
- },
- "radioAccessTechnology": {
- "description": "Radio Access Technology e.g., 2G, 3G, LTE",
- "type": "string"
- },
- "reportingEndpointIpAddr": {
- "description": "IP address for the reporting entity, as used for the flow being reported on",
- "type": "string"
- },
- "reportingEndpointPort": {
- "description": "IP port for the reporting entity, as used for the flow being reported on",
- "type": "integer"
- },
- "sac": {
- "description": "service area code",
- "type": "string"
- },
- "samplingAlgorithm": {
- "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied",
- "type": "integer"
- },
- "tac": {
- "description": "transport area code",
- "type": "string"
- },
- "tunnelId": {
- "description": "tunnel identifier",
- "type": "string"
- },
- "vlanId": {
- "description": "VLAN identifier used by this flow",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", "ipVersion",
- "mobileFlowFieldsVersion", "otherEndpointIpAddress", "otherEndpointPort",
- "reportingEndpointIpAddr", "reportingEndpointPort" ]
- },
- "namedHashMap": {
- "description": "a hashMap which is associated with and described by a name",
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "hashMap": { "$ref": "#/definitions/hashMap" }
- },
- "additionalProperties": false,
- "required": [ "name", "hashMap" ]
- },
- "nicPerformance": {
- "description": "describes the performance and errors of an identified network interface card",
- "type": "object",
- "properties": {
- "administrativeState": {
- "description": "administrative state",
- "type": "string",
- "enum": [ "inService", "outOfService" ]
- },
- "nicIdentifier": {
- "description": "nic identification",
- "type": "string"
- },
- "operationalState": {
- "description": "operational state",
- "type": "string",
- "enum": [ "inService", "outOfService" ]
- },
- "receivedBroadcastPacketsAccumulated": {
- "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedBroadcastPacketsDelta": {
- "description": "Count of broadcast packets received within the measurement interval",
- "type": "number"
- },
- "receivedDiscardedPacketsAccumulated": {
- "description": "Cumulative count of discarded packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedDiscardedPacketsDelta": {
- "description": "Count of discarded packets received within the measurement interval",
- "type": "number"
- },
- "receivedErrorPacketsAccumulated": {
- "description": "Cumulative count of error packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedErrorPacketsDelta": {
- "description": "Count of error packets received within the measurement interval",
- "type": "number"
- },
- "receivedMulticastPacketsAccumulated": {
- "description": "Cumulative count of multicast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedMulticastPacketsDelta": {
- "description": "Count of multicast packets received within the measurement interval",
- "type": "number"
- },
- "receivedOctetsAccumulated": {
- "description": "Cumulative count of octets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedOctetsDelta": {
- "description": "Count of octets received within the measurement interval",
- "type": "number"
- },
- "receivedTotalPacketsAccumulated": {
- "description": "Cumulative count of all packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedPercentDiscard": {
- "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "receivedPercentError": {
- "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.",
- "type": "number"
- },
- "receivedTotalPacketsDelta": {
- "description": "Count of all packets received within the measurement interval",
- "type": "number"
- },
- "receivedUnicastPacketsAccumulated": {
- "description": "Cumulative count of unicast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedUnicastPacketsDelta": {
- "description": "Count of unicast packets received within the measurement interval",
- "type": "number"
- },
- "receivedUtilization": {
- "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "speed": {
- "description": "Speed configured in mbps",
- "type": "number"
- },
- "transmittedBroadcastPacketsAccumulated": {
- "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedBroadcastPacketsDelta": {
- "description": "Count of broadcast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedDiscardedPacketsAccumulated": {
- "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedDiscardedPacketsDelta": {
- "description": "Count of discarded packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedErrorPacketsAccumulated": {
- "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedErrorPacketsDelta": {
- "description": "Count of error packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedMulticastPacketsAccumulated": {
- "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedMulticastPacketsDelta": {
- "description": "Count of multicast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedOctetsAccumulated": {
- "description": "Cumulative count of octets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedOctetsDelta": {
- "description": "Count of octets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedTotalPacketsAccumulated": {
- "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedTotalPacketsDelta": {
- "description": "Count of all packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedUnicastPacketsAccumulated": {
- "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedUnicastPacketsDelta": {
- "description": "Count of unicast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedPercentDiscard": {
- "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "transmittedPercentError": {
- "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "transmittedUtilization": {
- "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.",
- "type": "number"
- },
- "valuesAreSuspect": {
- "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions",
- "type": "string",
- "enum": [ "true", "false" ]
- }
- },
- "additionalProperties": false,
- "required": [ "nicIdentifier", "valuesAreSuspect" ]
- },
- "notificationFields": {
- "description": "notification fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"},
- "changeContact": {
- "description": "identifier for a contact related to the change",
- "type": "string"
- },
- "changeIdentifier": {
- "description": "system or session identifier associated with the change",
- "type": "string"
- },
- "changeType": {
- "description": "describes what has changed for the entity",
- "type": "string"
- },
- "newState": {
- "description": "new state of the entity",
- "type": "string"
- },
- "oldState": {
- "description": "previous state of the entity",
- "type": "string"
- },
- "notificationFieldsVersion": {
- "description": "version of the notificationFields block",
- "type": "string",
- "enum": [ "2.0" ]
- },
- "stateInterface": {
- "description": "card or port name of the entity that changed state",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "changeIdentifier", "changeType", "notificationFieldsVersion" ]
- },
- "otherFields": {
- "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration",
- "type": "object",
- "properties": {
- "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"},
- "hashMap": {"$ref": "#/definitions/hashMap"},
- "jsonObjects": {"$ref": "#/definitions/arrayOfJsonObject"},
- "otherFieldsVersion": {
- "description": "version of the otherFields block",
- "type": "string",
- "enum": [ "3.0" ]
- }
- },
- "additionalProperties": false,
- "required": [ "otherFieldsVersion" ]
- },
- "pnfRegistrationFields": {
- "description": "hardware device registration fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "lastServiceDate": {
- "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017",
- "type": "string"
- },
- "macAddress": {
- "description": "MAC address of OAM interface of the unit",
- "type": "string"
- },
- "manufactureDate": {
- "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016",
- "type": "string"
- },
- "modelNumber": {
- "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model",
- "type": "string"
- },
- "oamV4IpAddress": {
- "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF",
- "type": "string"
- },
- "oamV6IpAddress": {
- "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF",
- "type": "string"
- },
- "pnfRegistrationFieldsVersion": {
- "description": "version of the pnfRegistrationFields block",
- "type": "string",
- "enum": [ "2.0" ]
- },
- "serialNumber": {
- "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3",
- "type": "string"
- },
- "softwareVersion": {
- "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201",
- "type": "string"
- },
- "unitFamily": {
- "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU",
- "type": "string"
- },
- "unitType": {
- "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale",
- "type": "string"
- },
- "vendorName": {
- "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "pnfRegistrationFieldsVersion" ]
- },
- "processorDimmAggregateThermalMargin": {
- "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics",
- "type": "object",
- "properties": {
- "processorDimmAggregateThermalMarginIdentifier": {
- "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module",
- "type": "string"
- },
- "thermalMargin": {
- "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "processorDimmAggregateThermalMarginIdentifier", "thermalMargin" ]
- },
- "processStats": {
- "description": "metrics on system processes",
- "type": "object",
- "properties": {
- "forkRate": {
- "description": "the number of threads created since the last reboot",
- "type": "number"
- },
- "processIdentifier": {
- "description": "processIdentifier",
- "type": "string"
- },
- "psStateBlocked": {
- "description": "the number of processes in a blocked state",
- "type": "number"
- },
- "psStatePaging": {
- "description": "the number of processes in a paging state",
- "type": "number"
- },
- "psStateRunning": {
- "description": "the number of processes in a running state",
- "type": "number"
- },
- "psStateSleeping": {
- "description": "the number of processes in a sleeping state",
- "type": "number"
- },
- "psStateStopped": {
- "description": "the number of processes in a stopped state",
- "type": "number"
- },
- "psStateZombie": {
- "description": "the number of processes in a zombie state",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "processIdentifier" ]
- },
- "requestError": {
- "description": "standard request error data structure",
- "type": "object",
- "properties": {
- "messageId": {
- "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception",
- "type": "string"
- },
- "text": {
- "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1",
- "type": "string"
- },
- "url": {
- "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents",
- "type": "string"
- },
- "variables": {
- "description": "List of zero or more strings that represent the contents of the variables used by the message text",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "messageId", "text" ]
- },
- "sipSignalingFields": {
- "description": "sip signaling fields",
- "type": "object",
- "properties": {
- "additionalInformation": { "$ref": "#/definitions/hashMap"},
- "compressedSip": {
- "description": "the full SIP request/response including headers and bodies",
- "type": "string"
- },
- "correlator": {
- "description": "this is the same for all events on this call",
- "type": "string"
- },
- "localIpAddress": {
- "description": "IP address on xNF",
- "type": "string"
- },
- "localPort": {
- "description": "port on xNF",
- "type": "string"
- },
- "remoteIpAddress": {
- "description": "IP address of peer endpoint",
- "type": "string"
- },
- "remotePort": {
- "description": "port of peer endpoint",
- "type": "string"
- },
- "sipSignalingFieldsVersion": {
- "description": "version of the sipSignalingFields block",
- "type": "string",
- "enum": [ "3.0" ]
- },
- "summarySip": {
- "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)",
- "type": "string"
- },
- "vendorNfNameFields": {
- "$ref": "#/definitions/vendorNfNameFields"
- }
- },
- "additionalProperties": false,
- "required": [ "correlator", "localIpAddress", "localPort", "remoteIpAddress",
- "remotePort", "sipSignalingFieldsVersion", "vendorNfNameFields" ]
- },
- "stateChangeFields": {
- "description": "stateChange fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "newState": {
- "description": "new state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "oldState": {
- "description": "previous state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "stateChangeFieldsVersion": {
- "description": "version of the stateChangeFields block",
- "type": "string",
- "enum": [ "4.0" ]
- },
- "stateInterface": {
- "description": "card or port name of the entity that changed state",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "newState", "oldState", "stateChangeFieldsVersion", "stateInterface" ]
- },
- "syslogFields": {
- "description": "sysLog fields",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap" },
- "eventSourceHost": {
- "description": "hostname of the device",
- "type": "string"
- },
- "eventSourceType": {
- "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "syslogFacility": {
- "description": "numeric code from 0 to 23 for facility--see table in documentation",
- "type": "integer"
- },
- "syslogFieldsVersion": {
- "description": "version of the syslogFields block",
- "type": "string",
- "enum": [ "4.0" ]
- },
- "syslogMsg": {
- "description": "syslog message",
- "type": "string"
- },
- "syslogMsgHost": {
- "description": "hostname parsed from non-VES syslog message",
- "type": "string"
- },
- "syslogPri": {
- "description": "0-192 combined severity and facility",
- "type": "integer"
- },
- "syslogProc": {
- "description": "identifies the application that originated the message",
- "type": "string"
- },
- "syslogProcId": {
- "description": "a change in the value of this field indicates a discontinuity in syslog reporting",
- "type": "number"
- },
- "syslogSData": {
- "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs",
- "type": "string"
- },
- "syslogSdId": {
- "description": "0-32 char in format name@number for example ourSDID@32473",
- "type": "string"
- },
- "syslogSev": {
- "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8",
- "type": "string",
- "enum": [
- "Alert",
- "Critical",
- "Debug",
- "Emergency",
- "Error",
- "Info",
- "Notice",
- "Warning"
- ]
- },
- "syslogTag": {
- "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided",
- "type": "string"
- },
- "syslogTs": {
- "description": "timestamp parsed from non-VES syslog message",
- "type": "string"
- },
- "syslogVer": {
- "description": "IANA assigned version of the syslog protocol specification - typically 1",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [ "eventSourceType", "syslogFieldsVersion", "syslogMsg", "syslogTag" ]
- },
- "thresholdCrossingAlertFields": {
- "description": "fields specific to threshold crossing alert events",
- "type": "object",
- "properties": {
- "additionalFields": { "$ref": "#/definitions/hashMap"},
- "additionalParameters": {
- "description": "performance counters",
- "type": "array",
- "items": {
- "$ref": "#/definitions/counter"
- }
- },
- "alertAction": {
- "description": "Event action",
- "type": "string",
- "enum": [
- "CLEAR",
- "CONT",
- "SET"
- ]
- },
- "alertDescription": {
- "description": "Unique short alert description such as IF-SHUB-ERRDROP",
- "type": "string"
- },
- "alertType": {
- "description": "Event type",
- "type": "string",
- "enum": [
- "CARD-ANOMALY",
- "ELEMENT-ANOMALY",
- "INTERFACE-ANOMALY",
- "SERVICE-ANOMALY"
- ]
- },
- "alertValue": {
- "description": "Calculated API value (if applicable)",
- "type": "string"
- },
- "associatedAlertIdList": {
- "description": "List of eventIds associated with the event being reported",
- "type": "array",
- "items": { "type": "string" }
- },
- "collectionTimestamp": {
- "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "dataCollector": {
- "description": "Specific performance collector instance used",
- "type": "string"
- },
- "elementType": {
- "description": "type of network element - internal ATT field",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity or priority",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventStartTimestamp": {
- "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "interfaceName": {
- "description": "Physical or logical port or card (if applicable)",
- "type": "string"
- },
- "networkService": {
- "description": "network name - internal ATT field",
- "type": "string"
- },
- "possibleRootCause": {
- "description": "Reserved for future use",
- "type": "string"
- },
- "thresholdCrossingFieldsVersion": {
- "description": "version of the thresholdCrossingAlertFields block",
- "type": "string",
- "enum": [ "4.0" ]
- }
- },
- "additionalProperties": false,
- "required": [
- "additionalParameters",
- "alertAction",
- "alertDescription",
- "alertType",
- "collectionTimestamp",
- "eventSeverity",
- "eventStartTimestamp",
- "thresholdCrossingFieldsVersion"
- ]
- },
- "vendorNfNameFields": {
- "description": "provides vendor, nf and nfModule identifying information",
- "type": "object",
- "properties": {
- "vendorName": {
- "description": "network function vendor name",
- "type": "string"
- },
- "nfModuleName": {
- "description": "name of the nfModule generating the event",
- "type": "string"
- },
- "nfName": {
- "description": "name of the network function generating the event",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [ "vendorName" ]
- },
- "voiceQualityFields": {
- "description": "provides statistics related to customer facing voice products",
- "type": "object",
- "properties": {
- "additionalInformation": { "$ref": "#/definitions/hashMap"},
- "calleeSideCodec": {
- "description": "callee codec for the call",
- "type": "string"
- },
- "callerSideCodec": {
- "description": "caller codec for the call",
- "type": "string"
- },
- "correlator": {
- "description": "this is the same for all events on this call",
- "type": "string"
- },
- "endOfCallVqmSummaries": {
- "$ref": "#/definitions/endOfCallVqmSummaries"
- },
- "phoneNumber": {
- "description": "phone number associated with the correlator",
- "type": "string"
- },
- "midCallRtcp": {
- "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers",
- "type": "string"
- },
- "vendorNfNameFields": {
- "$ref": "#/definitions/vendorNfNameFields"
- },
- "voiceQualityFieldsVersion": {
- "description": "version of the voiceQualityFields block",
- "type": "string",
- "enum": [ "4.0" ]
- }
- },
- "additionalProperties": false,
- "required": [ "calleeSideCodec", "callerSideCodec", "correlator", "midCallRtcp",
- "vendorNfNameFields", "voiceQualityFieldsVersion" ]
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml
deleted file mode 100644
index 9d648bba7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<config xmlns="http://nokia.com/pnf-simulator">
- <itemValue1>42</itemValue1>
- <itemValue2>35</itemValue2>
-</config>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang
deleted file mode 100644
index d7fc2f26e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang
+++ /dev/null
@@ -1,9 +0,0 @@
-module pnf-simulator {
- namespace "http://nokia.com/pnf-simulator";
- prefix config;
- container config {
- config true;
- leaf itemValue1 {type uint32;}
- leaf itemValue2 {type uint32;}
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml
deleted file mode 100644
index abfa615cc..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml
+++ /dev/null
@@ -1,345 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.oparent</groupId>
- <artifactId>oparent</artifactId>
- <version>1.2.1</version>
- </parent>
-
- <groupId>org.onap.masspnfsimulator</groupId>
- <artifactId>masspnf-simulator</artifactId>
- <version>1.0.0-SNAPSHOT</version>
-
- <name>pnf-simulator-sandbox</name>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- <maven.build.timestamp.format>yyyyMMdd'T'HHmmss</maven.build.timestamp.format>
-
- <simulator.main.class>org.onap.pnfsimulator.Main</simulator.main.class>
- <docker.image.tag>latest</docker.image.tag>
- <junit.jupiter.version>5.1.0</junit.jupiter.version>
- <junit.vintage.version>5.1.0</junit.vintage.version>
- <docker.image.name>onap/${project.artifactId}</docker.image.name>
-
- <dependency.directory.name>libs</dependency.directory.name>
- <dependency.directory.location>${project.build.directory}/${dependency.directory.name}
- </dependency.directory.location>
-
- <onap.nexus.dockerregistry.daily>nexus3.onap.org:10003</onap.nexus.dockerregistry.daily>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter</artifactId>
- <version>2.0.2.RELEASE</version>
- <exclusions>
- <exclusion>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- <version>2.0.2.RELEASE</version>
- </dependency>
-<dependency>
- <groupId>com.fasterxml.jackson.dataformat</groupId>
- <artifactId>jackson-dataformat-yaml</artifactId>
- <version>2.9.8</version>
-</dependency>
-
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-core</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.7.25</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>2.6</version>
- </dependency>
- <dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20180130</version>
- </dependency>
- <dependency>
- <groupId>org.everit.json</groupId>
- <artifactId>org.everit.json.schema</artifactId>
- <version>1.3.0</version>
- </dependency>
- <dependency>
- <groupId>com.github.fge</groupId>
- <artifactId>json-schema-validator</artifactId>
- <version>2.2.6</version>
- </dependency>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.8.2</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.5.5</version>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>21.0</version>
- </dependency>
- <dependency>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- <version>1.4</version>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-lang3</artifactId>
- <version>3.7</version>
- </dependency>
- <dependency>
- <groupId>org.onosproject</groupId>
- <artifactId>jnc</artifactId>
- <version>1.0</version>
- </dependency>
-
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-engine</artifactId>
- <version>5.1.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-migrationsupport</artifactId>
- <version>${junit.jupiter.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>3.9.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.sshd</groupId>
- <artifactId>sshd-core</artifactId>
- <version>0.9.0</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>2.18.3</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-test</artifactId>
- <version>5.0.4.RELEASE</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <version>2.0.1.RELEASE</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptor>src/assembly/resources.xml</descriptor>
- <finalName>${project.artifactId}-${project.version}</finalName>
- </configuration>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.7.0</version>
- <configuration>
- <source>${maven.compiler.source}</source>
- <target>${maven.compiler.target}</target>
- <showWarnings>true</showWarnings>
- <showDeprecation>true</showDeprecation>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>3.0.2</version>
- <configuration>
- <archive>
- <manifestEntries>
- <Main-Class>${simulator.main.class}</Main-Class>
- <Build-Time>${maven.build.timestamp}</Build-Time>
- </manifestEntries>
- </archive>
- </configuration>
- </plugin>
- <plugin>
- <groupId>pl.project13.maven</groupId>
- <artifactId>git-commit-id-plugin</artifactId>
- <version>2.2.4</version>
- <executions>
- <execution>
- <id>get-commit-info</id>
- <goals>
- <goal>revision</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
- <generateGitPropertiesFile>true</generateGitPropertiesFile>
- <includeOnlyProperties>git.commit.id.abbrev</includeOnlyProperties>
- </configuration>
- </plugin>
- <plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.19</version>
- <dependencies>
- <dependency>
- <groupId>org.junit.platform</groupId>
- <artifactId>junit-platform-surefire-provider</artifactId>
- <version>1.1.1</version>
- </dependency>
- </dependencies>
- <configuration>
- <detail>true</detail>
- <printSummary>true</printSummary>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <configuration>
- <outputDirectory>${dependency.directory.location}</outputDirectory>
- <includeScope>runtime</includeScope>
- <silent>true</silent>
- </configuration>
- <executions>
- <execution>
- <id>copy-external-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.spotify</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>1.0.0</version>
- <configuration>
- <registryUrl>${onap.nexus.dockerregistry.daily}</registryUrl>
- <imageName>${onap.nexus.dockerregistry.daily}/${docker.image.name}</imageName>
- <forceTags>true</forceTags>
- <imageTags>
- <tag>${project.version}</tag>
- <tag>${project.version}-${maven.build.timestamp}</tag>
- </imageTags>
- <baseImage>openjdk:8-jre-alpine</baseImage>
- <cmd>java -cp ${dependency.directory.name}/*:${project.build.finalName}.jar ${simulator.main.class}</cmd>
- <resources>
- <resource>
- <targetPath>${dependency.directory.name}</targetPath>
- <directory>${dependency.directory.location}</directory>
- </resource>
- <resource>
- <targetPath>/</targetPath>
- <directory>${project.build.directory}</directory>
- <include>${project.build.finalName}.jar</include>
- </resource>
- </resources>
- <forceTags>true</forceTags>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jacoco</groupId>
- <artifactId>jacoco-maven-plugin</artifactId>
- <version>0.8.1</version>
- <configuration>
- <excludes>
- <exclude>org/onap/pnfsimulator/Main.class</exclude>
- </excludes>
- </configuration>
- <executions>
- <execution>
- <id>default-prepare-agent</id>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- </execution>
- <execution>
- <id>report</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>report</goal>
- </goals>
- </execution>
- <execution>
- <id>check</id>
- <goals>
- <goal>check</goal>
- </goals>
- <configuration>
- <rules>
- <rule>
- <element>CLASS</element>
- <limits>
- <limit>
- <value>COVEREDRATIO</value>
- <minimum>0.70</minimum>
- </limit>
- <limit>
- <counter>BRANCH</counter>
- <value>COVEREDRATIO</value>
- <minimum>0.75</minimum>
- </limit>
- </limits>
- </rule>
- </rules>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh
deleted file mode 100755
index 3de96dafe..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/usr/bin/env bash
-
-set -euo pipefail
-
-COMPOSE_FILE_NAME=docker-compose.yml
-NETOPEER_CONTAINER_NAME=netopeer
-SIMULATOR_CONTAINER_NAME=pnf-simulator
-SIMULATOR_PORT=5000
-
-SIMULATOR_BASE=http://localhost:$SIMULATOR_PORT/simulator/
-SIMULATOR_START_URL=$SIMULATOR_BASE/start
-SIMULATOR_STOP_URL=$SIMULATOR_BASE/stop
-SIMULATOR_STATUS_URL=$SIMULATOR_BASE/status
-
-RUNNING_COMPOSE_CONFIG=$COMPOSE_FILE_NAME
-
-function main(){
-
- COMMAND=${1:-"help"}
-
- case $COMMAND in
- "compose")
- compose $2 $3 $4 $5 $6 $7 $8 $9 "${10}" "${11}" "${12}" "${13}" "${14}" ;;
- #IPGW, #IPSUBNET, #I, #URLVES, #IPPNFSIM, #IPFILESERVER, #TYPEFILESERVER, #PORTSFTP, #PORTFTPS, #IPFTPS, #IPSFTP, #FTPS_PASV_MIN, #FTPS_PAST_MAX
- "build")
- build_image;;
- "start")
- start $COMPOSE_FILE_NAME;;
- "stop")
- if [[ -z ${2+x} ]]
- then
- echo "Error: action 'stop' requires the instance identifier"
- exit
- fi
- stop $2;;
- "run-simulator")
- run_simulator;;
- "trigger-simulator")
- trigger_simulator;;
- "stop-simulator")
- stop_simulator;;
- "status")
- get_status;;
- "clear-logs")
- clear_logs;;
- *)
- print_help;;
- esac
-}
-
-
-function get_pnfsim_ip() {
-
- export IPPNFSIM=$(cat ./config/config.yml | grep ippnfsim | awk -F'[ ]' '{print $2}')
- echo "PNF-Sim IP: " $IPPNFSIM
-
- export SIMULATOR_BASE=http://$IPPNFSIM:$SIMULATOR_PORT/simulator/
- export SIMULATOR_START_URL=$SIMULATOR_BASE/start
- export SIMULATOR_STOP_URL=$SIMULATOR_BASE/stop
- export SIMULATOR_STATUS_URL=$SIMULATOR_BASE/status
-}
-
-function compose(){
- #creating custom docker-compose based on IP arguments
- #creting config.json by injecting the same IP
-
- export IPGW=$1
- export IPSUBNET=$2
- export I=$3
- export URLVES=$4
- export IPPNFSIM=$5
- export IPFILESERVER=$6
- export TYPEFILESERVER=$7
- export PORTSFTP=$8
- export PORTFTPS=$9
- export IPFTPS=${10}
- export IPSFTP=${11}
- export FTPS_PASV_MIN=${12}
- export FTPS_PASV_MAX=${13}
- LOCALTIME=$(ls -l /etc/localtime)
- export TIMEZONE=${LOCALTIME//*zoneinfo\/}
-
- #will insert $I to distinguish containers, networks properly
- #docker compose cannot substitute these, as they are keys, not values.
- envsubst < docker-compose-template.yml > docker-compose-temporary.yml
- #variable substitution
- docker-compose -f docker-compose-temporary.yml config > docker-compose.yml
- rm docker-compose-temporary.yml
-
- ./ROP_file_creator.sh $I &
-
- write_config $URLVES $IPFILESERVER $TYPEFILESERVER $PORTSFTP $PORTFTPS $IPPNFSIM
-
- cd config
- envsubst < vsftpd_ssl-TEMPLATE.conf > vsftpd_ssl.conf
- cd -
-
- set_vsftpd_file_owner
-
-}
-
-function build_image(){
- if [ -f pom.xml ]; then
- mvn clean package docker:build -Dcheckstyle.skip
- else
- echo "pom.xml file not found"
- exit 1
- fi
-}
-
-function set_vsftpd_file_owner() {
- sudo chown root ./config/vsftpd_ssl.conf
-}
-
-
-function write_config(){
- #building a YML file for usage in Java
- echo "urlves: $1" > config/config.yml
- echo "urlsftp: sftp://onap:pano@$2:$4" >> config/config.yml
- echo "urlftps: ftps://onap:pano@$2:$5" >> config/config.yml
- echo "ippnfsim: $6" >> config/config.yml
- echo "typefileserver: $3" >> config/config.yml
-}
-
-function start(){
-
- get_pnfsim_ip
- if [[ $(running_containers) ]]; then
- echo "Simulator containers are already up"
- else
- echo "Starting simulator containers using netconf model specified in config/netconf.env"
- set_vsftpd_file_owner
- archive_logs
- docker-compose -f $1 up -d
- RUNNING_COMPOSE_CONFIG=$1
- fi
-}
-
-function running_containers(){
- docker-compose -f $COMPOSE_FILE_NAME ps -q
-}
-
-function stop(){
- get_pnfsim_ip
- kill $(ps -ef | grep "[.]/ROP_file_creator.sh $1" | head -n 1 | awk '{print $2}')
-
- if [[ $(running_containers) ]]; then
- docker-compose -f $RUNNING_COMPOSE_CONFIG down
- docker-compose -f $RUNNING_COMPOSE_CONFIG rm
- else
- echo "Simulator containers are already down"
- fi
-}
-
-function trigger_simulator(){
-get_pnfsim_ip
-cat << EndOfMessage
-Simulator response:
-$(curl -s -X POST -H "Content-Type: application/json" -H "X-ONAP-RequestID: 123" -H "X-InvocationID: 456" -d @config/config.json $SIMULATOR_START_URL)
-EndOfMessage
-}
-
-function run_simulator(){
-get_pnfsim_ip
-cat << EndOfMessage
-Simulator response:
-$(curl -s -X POST -H "Content-Type: application/json" -H "X-ONAP-RequestID: 123" -H "X-InvocationID: 456" -d @config/$CONFIG_JSON $SIMULATOR_START_URL)
-EndOfMessage
-}
-
-function stop_simulator(){
-get_pnfsim_ip
-cat << EndOfMessage
-Simulator response:
-$(curl -s -X POST $SIMULATOR_STOP_URL)
-EndOfMessage
-}
-
-function get_status(){
- get_pnfsim_ip
- if [[ $(running_containers) ]]; then
- print_status
- else
- echo "Simulator containers are down"
- fi
-}
-
-function print_status(){
-get_pnfsim_ip
-cat << EndOfMessage
-$(docker-compose -f $RUNNING_COMPOSE_CONFIG ps)
-
-Simulator response:
-$(curl -s -X GET $SIMULATOR_STATUS_URL)
-EndOfMessage
-}
-
-function print_help(){
-cat << EndOfMessage
-Available options:
-build - locally builds simulator image from existing code
-start - starts simulator and netopeer2 containers using remote simulator image and specified model name
-compose - customize the docker-compose and configuration based on arguments
-trigger-simulator - start monitoring the ROP files and report periodically
-run-simulator - starts sending PNF registration messages with parameters specified in config.json
-stop-simulator - stop sending PNF registration messages
-stop - stops both containers
-status - prints simulator status
-clear-logs - deletes log folder
-
-Starting simulation:
-- Setup the instance of this simulator by:
- - ./simulator.sh compose IPGW IPSUBNET I URLVES IPPNFSIM IPFILESERVER TYPEFILESERVER PORTSFTP PORTFTPS IPFTPS IPSFTP
- where IPGW and IPSUBNET will be used for docker network
- where I is the integer suffix to differentiate instances
- where URLVES is the URL of the VES collector
- where IPPNFSIM, IPFILESERVER, IPFTPS, IPSFTP are the IP addresses for containers
- where TYPEFILESERVER is the type of fileserver, i.e., FTPS or SFTP
- where PORTSFTP, PORTFTPS are the SFTP and FTPS ports
- e.g. ./simulator.sh compose 10.11.0.65 10.11.0.64 3 http://10.11.0.69:10000/eventListener/v7 10.11.0.2 10.11.0.66 ftps 2001 2002 10.11.0.67 10.11.0.68
-
-- Setup environment with "./simulator.sh start". It will download required docker images from the internet and run them on docker machine
-- To start the simulation use "./simulator.sh run-simulator", which will start sending PNF registration messages with parameters specified in config.json {TODO, might not be needed}
-
-To stop simulation use "./simulator.sh stop-simulator" command. To check simulator's status use "./simulator.sh status".
-If you want to change message parameters simply edit config.json, then start the simulation with "./simulator.sh run-simulator" again
-Logs are written to logs/pnf-simulator.log.
-
-If you change the source code you have to rebuild image with "./simulator.sh build" and run "./simulator.sh start" again
-EndOfMessage
-}
-
-function archive_logs(){
-
- if [ -d logs ]; then
- echo "Moving log file to archive"
- DIR_PATH=logs/archive/simulator[$(timestamp)]
- mkdir -p $DIR_PATH
- if [ -f logs/pnfsimulator.log ]; then
- mv logs/pnfsimulator.log $DIR_PATH
- fi
-
- if [ -f logs/*.xml ]; then
- mv logs/*.xml $DIR_PATH
- fi
-
- else
- mkdir logs
- fi
-}
-
-function clear_logs(){
-
- if [[ $(running_containers) ]]; then
- echo "Cannot delete logs when simulator is running"
- else
- rm -rf logs
- fi
-}
-
-function timestamp(){
- date "+%Y-%m-%d_%T"
-}
-
-main $@ \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml
deleted file mode 100644
index 063c7100a..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml
+++ /dev/null
@@ -1,75 +0,0 @@
-<assembly>
- <id>resources</id>
- <formats>
- <format>zip</format>
- </formats>
-
- <fileSets>
- <fileSet>
- <includes>
- <include>simulator.sh</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <includes>
- <include>docker-compose.yml</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0644</fileMode>
- </fileSet>
- <fileSet>
- <directory>config</directory>
- <outputDirectory>config</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>deployment</directory>
- <outputDirectory>deployment</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>ftpes</directory>
- <outputDirectory>ftpes</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- <fileMode>0644</fileMode>
-
- </fileSet>
- <fileSet>
- <directory>json_schema</directory>
- <outputDirectory>json_schema</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>netconf</directory>
- <outputDirectory>netconf</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>sftp</directory>
- <outputDirectory>sftp</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>ssh</directory>
- <outputDirectory>ssh</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
-
- </fileSet>
- </fileSets>
-</assembly> \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java
deleted file mode 100644
index 15c687e2c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package org.onap.pnfsimulator;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import java.io.File;
-
-public class ConfigurationProvider {
- static PnfSimConfig conf = null;
-
- public static PnfSimConfig getConfigInstance() {
-
- ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
- try {
- File file = new File("./config/config.yml");
-
- conf = mapper.readValue(file, PnfSimConfig.class);
- System.out.println("Ves URL: " + conf.getUrlves());
- System.out.println("SFTP URL: " + conf.getUrlsftp());
- System.out.println("FTPS URL: " + conf.getUrlftps());
- System.out.println("PNF sim IP: " + conf.getIppnfsim());
-
- } catch (Exception e) {
- e.printStackTrace();
- }
- return conf;
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java
deleted file mode 100644
index beb564da8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.onap.pnfsimulator;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-
-public class FileProvider {
-
- public List<String> getFiles() throws NoRopFilesException {
-
- List<String> files = queryFiles();
-
- files.sort(Collections.reverseOrder());
-
- List<String> fileListSorted = new ArrayList<>();
- for (String f : files) {
- fileListSorted.add(f);
- }
- return fileListSorted;
- }
-
- private static List<String> queryFiles() throws NoRopFilesException {
-
- File folder = new File("./files/onap/");
- File[] listOfFiles = folder.listFiles();
- if (listOfFiles == null || listOfFiles.length == 0) {
- throw new NoRopFilesException("No ROP files found in specified directory");
- }
-
- List<String> results = new ArrayList<>();
- for (int i = 0; i < listOfFiles.length; i++) {
- if (listOfFiles[i].isFile()) {
- results.add(listOfFiles[i].getName());
- }
- }
-
- return results;
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java
deleted file mode 100644
index a66bedbcb..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator;
-
-import java.util.concurrent.TimeUnit;
-import org.onap.pnfsimulator.message.MessageProvider;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.context.annotation.Bean;
-import org.springframework.scheduling.annotation.EnableAsync;
-
-@SpringBootApplication
-@EnableAsync
-public class Main {
-
- public static void main(String[] args) throws InterruptedException {
- SpringApplication.run(Main.class, args);
-
- TimeUnit.SECONDS.sleep(5);
- System.out.println("Start sending VES events");
-
-
- }
-
- @Bean
- public MessageProvider messageProvider() {
- return new MessageProvider();
- }
-
- @Bean
- public JSONValidator jsonValidator() {
- return new JSONValidator();
- }
-
-}
-
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java
deleted file mode 100644
index 3dd4aba1c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package org.onap.pnfsimulator;
-
-public class PnfSimConfig {
- private String urlves;
- private String urlftps;
- private String urlsftp;
- private String ippnfsim;
- private String typefileserver;
-
- public String getTypefileserver() {
- return typefileserver;
- }
-
- public void setTypefileserver(String typefileserver) {
- this.typefileserver = typefileserver;
- }
-
-
- public String getUrlves() {
- return urlves;
- }
-
- public void setUrlves(String urlves) {
- this.urlves = urlves;
- }
-
- public String getUrlftps() {
- return urlftps;
- }
-
- public void setUrlftps(String urlftps) {
- this.urlftps = urlftps;
- }
-
- public String getUrlsftp() {
- return urlsftp;
- }
-
- public void setUrlsftp(String urlsftp) {
- this.urlsftp = urlsftp;
- }
-
- public void setIppnfsim(String ippnfsim) {
- this.ippnfsim = ippnfsim;
- }
-
- public String getIppnfsim() {
- return ippnfsim;
- }
-
- @Override
- public String toString() {
- return "PnfSimConfig [vesip=" + urlves + ", urlftps=" + urlftps + ", urlsftp=" + urlsftp + ", ippnfsim="
- + ippnfsim + ", typefileserver=" + typefileserver + "]";
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml
deleted file mode 100644
index 0f6d9de6c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="/var/log/ONAP/pnfsimulator"/>
- <Property name="archive" value="/var/log/ONAP/pnfsimulator/archive"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${archive}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="debug">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java
deleted file mode 100644
index 8f6fe3b66..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.logging;
-
-public final class MDCVariables {
-
- public static final String X_ONAP_REQUEST_ID = "X-ONAP-RequestID";
- public static final String X_INVOCATION_ID = "X-InvocationID";
- public static final String REQUEST_ID = "RequestID";
- public static final String INVOCATION_ID = "InvocationID";
- public static final String INSTANCE_UUID = "InstanceUUID";
- public static final String RESPONSE_CODE = "ResponseCode";
- public static final String SERVICE_NAME = "ServiceName";
-
- private MDCVariables() {
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java
deleted file mode 100644
index ded991044..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.onap.pnfsimulator.message.MessageConstants.COMPRESSION;
-import static org.onap.pnfsimulator.message.MessageConstants.COMPRESSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_TYPE;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_TYPE_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.HASH_MAP;
-import static org.onap.pnfsimulator.message.MessageConstants.INTERNAL_HEADER_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.LAST_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.LOCATION;
-import static org.onap.pnfsimulator.message.MessageConstants.NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_LAST_SERVICE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_MANUFACTURE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY_NORMAL;
-import static org.onap.pnfsimulator.message.MessageConstants.REPORTING_ENTITY_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.SOURCE_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.START_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.TIME_ZONE_OFFSET;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION_NUMBER;
-import java.io.File;
-import java.util.List;
-import java.util.TimeZone;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-final class JSONObjectFactory {
-
- static JSONObject generateConstantCommonEventHeader() {
- JSONObject commonEventHeader = new JSONObject();
- long timestamp = System.currentTimeMillis();
- commonEventHeader.put(EVENT_ID, generateEventId());
- commonEventHeader.put(TIME_ZONE_OFFSET, generateTimeZone(timestamp));
- commonEventHeader.put(LAST_EPOCH_MICROSEC, timestamp);
- commonEventHeader.put(PRIORITY, PRIORITY_NORMAL);
- commonEventHeader.put(SEQUENCE, SEQUENCE_NUMBER);
- commonEventHeader.put(START_EPOCH_MICROSEC, timestamp);
- commonEventHeader.put(INTERNAL_HEADER_FIELDS, new JSONObject());
- commonEventHeader.put(VERSION, VERSION_NUMBER);
- commonEventHeader.put(VES_EVENT_LISTENER_VERSION, VES_EVENT_LISTENER_VERSION_NUMBER);
- String absPath = new File("").getAbsolutePath();
- String nodeName = absPath.substring(absPath.lastIndexOf(File.separator)+1);
- commonEventHeader.put(SOURCE_NAME, nodeName);
- commonEventHeader.put(REPORTING_ENTITY_NAME, nodeName);
- return commonEventHeader;
- }
-
- static JSONObject generatePnfRegistrationFields() {
- JSONObject pnfRegistrationFields = new JSONObject();
- pnfRegistrationFields.put(PNF_REGISTRATION_FIELDS_VERSION, PNF_REGISTRATION_FIELDS_VERSION_VALUE);
- pnfRegistrationFields.put(PNF_LAST_SERVICE_DATE, String.valueOf(System.currentTimeMillis()));
- pnfRegistrationFields.put(PNF_MANUFACTURE_DATE, String.valueOf(System.currentTimeMillis()));
- return pnfRegistrationFields;
- }
-
- static JSONObject generateNotificationFields() {
- JSONObject notificationFields = new JSONObject();
- notificationFields.put(NOTIFICATION_FIELDS_VERSION, NOTIFICATION_FIELDS_VERSION_VALUE);
- return notificationFields;
- }
-
- static JSONArray generateArrayOfNamedHashMap(List<String> fileList, String xnfUrl) {
- JSONArray arrayOfNamedHashMap = new JSONArray();
-
- for (String fileName : fileList) {
- JSONObject namedHashMap = new JSONObject();
- namedHashMap.put(NAME, fileName);
-
- JSONObject hashMap = new JSONObject();
- hashMap.put(FILE_FORMAT_TYPE, FILE_FORMAT_TYPE_VALUE);
- hashMap.put(LOCATION, xnfUrl.concat(fileName));
- hashMap.put(FILE_FORMAT_VERSION, FILE_FORMAT_VERSION_VALUE);
- hashMap.put(COMPRESSION, COMPRESSION_VALUE);
- namedHashMap.put(HASH_MAP, hashMap);
-
- arrayOfNamedHashMap.put(namedHashMap);
- }
-
-
- return arrayOfNamedHashMap;
- }
-
-
- static String generateEventId() {
- String timeAsString = String.valueOf(System.currentTimeMillis());
- return String.format("FileReady_%s", timeAsString);
- }
-
- static String generateTimeZone(long timestamp) {
- TimeZone timeZone = TimeZone.getDefault();
- int offsetInMillis = timeZone.getOffset(timestamp);
- String offsetHHMM = String.format("%02d:%02d", Math.abs(offsetInMillis / 3600000),
- Math.abs((offsetInMillis / 60000) % 60));
- return ("UTC" + (offsetInMillis >= 0 ? "+" : "-") + offsetHHMM);
- }
-
- private JSONObjectFactory() {
-
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java
deleted file mode 100644
index 6ff6e5dc8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-public final class MessageConstants {
-
- public static final String SIMULATOR_PARAMS = "simulatorParams";
- public static final String COMMON_EVENT_HEADER_PARAMS = "commonEventHeaderParams";
- public static final String PNF_REGISTRATION_PARAMS = "pnfRegistrationParams";
- public static final String NOTIFICATION_PARAMS = "notificationParams";
-
- static final String COMMON_EVENT_HEADER = "commonEventHeader";
- static final String PNF_REGISTRATION_FIELDS = "pnfRegistrationFields";
- static final String NOTIFICATION_FIELDS = "notificationFields";
- static final String EVENT = "event";
-
- //=============================================================================================
- //Simulation parameters
- public static final String VES_SERVER_URL = "vesServerUrl";
- public static final String TEST_DURATION = "testDuration";
- public static final String MESSAGE_INTERVAL = "messageInterval";
-
- //=============================================================================================
- //commonEventHeader
- //parameters
- static final String DOMAIN = "domain";
- static final String EVENT_ID = "eventId";
- static final String TIME_ZONE_OFFSET = "timeZoneOffset";
- static final String EVENT_TYPE = "eventType";
- static final String LAST_EPOCH_MICROSEC = "lastEpochMicrosec";
- static final String PRIORITY = "priority";
- static final String SEQUENCE = "sequence";
- static final String START_EPOCH_MICROSEC = "startEpochMicrosec";
- static final String INTERNAL_HEADER_FIELDS = "internalHeaderFields";
- static final String VERSION = "version";
- static final String VES_EVENT_LISTENER_VERSION = "vesEventListenerVersion";
- static final String SOURCE_NAME = "sourceName";
- static final String REPORTING_ENTITY_NAME = "reportingEntityName";
- //constant values
- static final int SEQUENCE_NUMBER = 0;
- static final String VERSION_NUMBER = "4.0.1";
- static final String VES_EVENT_LISTENER_VERSION_NUMBER = "7.0.1";
- static final String PRIORITY_NORMAL = "Normal";
-
- //=============================================================================================
- //PNF registration
- //parameters
- static final String PNF_REGISTRATION_FIELDS_VERSION = "pnfRegistrationFieldsVersion";
- static final String PNF_LAST_SERVICE_DATE = "lastServiceDate";
- static final String PNF_MANUFACTURE_DATE = "manufactureDate";
- //constant values
- static final String PNF_REGISTRATION_FIELDS_VERSION_VALUE = "2.0";
- static final String DOMAIN_PNF_REGISTRATION ="pnfRegistration";
-
- //=============================================================================================
- // Notifications
- //parameters
- static final String NOTIFICATION_FIELDS_VERSION = "notificationFieldsVersion";
- static final String ARRAY_OF_NAMED_HASH_MAP = "arrayOfNamedHashMap";
- static final String NAME = "name";
- static final String HASH_MAP = "hashMap";
- static final String FILE_FORMAT_TYPE = "fileFormatType";
- static final String LOCATION = "location";
- static final String FILE_FORMAT_VERSION = "fileFormatVersion";
- static final String COMPRESSION = "compression";
-
- //constant values
- static final String NOTIFICATION_FIELDS_VERSION_VALUE = "2.0";
- static final String DOMAIN_NOTIFICATION ="notification";
- static final String FILE_FORMAT_TYPE_VALUE = "org.3GPP.32.435#measCollec";
- static final String FILE_FORMAT_VERSION_VALUE = "V10";
- static final String COMPRESSION_VALUE = "gzip";
-
- private MessageConstants() {
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java
deleted file mode 100644
index c86362509..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.onap.pnfsimulator.message.MessageConstants.ARRAY_OF_NAMED_HASH_MAP;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_NOTIFICATION;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_PNF_REGISTRATION;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_TYPE;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-public class MessageProvider {
-
- public JSONObject createMessage(JSONObject commonEventHeaderParams, Optional<JSONObject> pnfRegistrationParams,
- Optional<JSONObject> notificationParams) {
- List<String> emptyList = new ArrayList<>();
- String emptyString = "";
- return createMessage(commonEventHeaderParams, pnfRegistrationParams, notificationParams, emptyList, emptyString);
- }
-
- public JSONObject createMessage(JSONObject commonEventHeaderParams, Optional<JSONObject> pnfRegistrationParams,
- Optional<JSONObject> notificationParams, List<String> fileList, String xnfUrl) {
-
- if (!pnfRegistrationParams.isPresent() && !notificationParams.isPresent()) {
- throw new IllegalArgumentException(
- "Both PNF registration and notification parameters objects are not present");
- }
- JSONObject event = new JSONObject();
-
- JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- Map<String, Object> commonEventHeaderFields = commonEventHeaderParams.toMap();
- commonEventHeaderFields.forEach((key, value) -> {
- commonEventHeader.put(key, value);
- });
-
- JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- pnfRegistrationParams.ifPresent(jsonObject -> {
- copyParametersToFields(jsonObject.toMap(), pnfRegistrationFields);
- commonEventHeader.put(DOMAIN, DOMAIN_PNF_REGISTRATION);
- commonEventHeader.put(EVENT_TYPE, DOMAIN_PNF_REGISTRATION);
- event.put(PNF_REGISTRATION_FIELDS, pnfRegistrationFields);
- });
-
- JSONObject notificationFields = JSONObjectFactory.generateNotificationFields();
- notificationParams.ifPresent(jsonObject -> {
- copyParametersToFields(jsonObject.toMap(), notificationFields);
- JSONArray arrayOfNamedHashMap = JSONObjectFactory.generateArrayOfNamedHashMap(fileList, xnfUrl);
- notificationFields.put(ARRAY_OF_NAMED_HASH_MAP, arrayOfNamedHashMap);
- commonEventHeader.put(DOMAIN, DOMAIN_NOTIFICATION);
- event.put(NOTIFICATION_FIELDS, notificationFields);
- });
-
- event.put(COMMON_EVENT_HEADER, commonEventHeader);
- JSONObject root = new JSONObject();
- root.put(EVENT, event);
- return root;
- }
-
- private void copyParametersToFields(Map<String, Object> paramersMap, JSONObject fieldsJsonObject) {
- paramersMap.forEach((key, value) -> {
- fieldsJsonObject.put(key, value);
- });
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java
deleted file mode 100644
index bb173aef2..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.TimerTask;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-//
-// public class NetconfConfigurationCheckingTask extends TimerTask {
-//
-// private static final Logger LOGGER =
-/// LoggerFactory.getLogger(NetconfConfigurationCheckingTask.class);
-//
-// private final NetconfConfigurationReader reader;
-// private final NetconfConfigurationWriter writer;
-// private final NetconfConfigurationCache cache;
-//
-// public NetconfConfigurationCheckingTask(NetconfConfigurationReader reader,
-// NetconfConfigurationWriter writer,
-// NetconfConfigurationCache cache) {
-// this.reader = reader;
-// this.writer = writer;
-// this.cache = cache;
-// }
-//
-// @Override
-// public void run() {
-// try {
-// String currentConfiguration = reader.read();
-// if (!currentConfiguration.equals(cache.getConfiguration())) {
-// LOGGER.info("Configuration has changed, new configuration:\n\n{}", currentConfiguration);
-// writer.writeToFile(currentConfiguration);
-// cache.update(currentConfiguration);
-// }
-// } catch (IOException | JNCException e) {
-// LOGGER.warn("Error during configuration reading: {}", e.getMessage());
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java
deleted file mode 100644
index 4e484b9d9..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================ Copyright (C)
-// * 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================ Licensed under
-// * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
-// * with the License. You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software distributed under the
-/// License
-// * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-/// express
-// * or implied. See the License for the specific language governing permissions and limitations
-/// under
-// * the License. ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.Timer;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-// import org.springframework.beans.factory.annotation.Autowired;
-//
-//// @Service
-// public class NetconfMonitorService {
-// private static final Logger LOGGER = LoggerFactory.getLogger(NetconfMonitorService.class);
-// private static final long timePeriod = 1000L;
-// private static final long startDelay = 0;
-//
-// private Timer timer;
-// private NetconfConfigurationReader reader;
-// private NetconfConfigurationWriter writer;
-// private NetconfConfigurationCache cache;
-//
-// @Autowired
-// public NetconfMonitorService(Timer timer, NetconfConfigurationReader reader,
-/// NetconfConfigurationWriter writer,
-// NetconfConfigurationCache cache) {
-// this.timer = timer;
-// this.reader = reader;
-// this.writer = writer;
-// this.cache = cache;
-// }
-//
-// // @PostConstruct
-// public void start() {
-// setStartConfiguration();
-// NetconfConfigurationCheckingTask task = new NetconfConfigurationCheckingTask(reader, writer,
-/// cache);
-// timer.scheduleAtFixedRate(task, startDelay, timePeriod);
-// }
-//
-// private void setStartConfiguration() {
-// try {
-// String configuration = reader.read();
-// writer.writeToFile(configuration);
-// cache.update(configuration);
-// } catch (IOException | JNCException e) {
-// LOGGER.warn("Error during configuration reading: {}", e.getMessage());
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java
deleted file mode 100644
index d97315ba4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import com.tailf.jnc.NetconfSession;
-// import com.tailf.jnc.SSHConnection;
-// import com.tailf.jnc.SSHSession;
-// import java.io.IOException;
-// import java.util.Map;
-// import java.util.Timer;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConnectionParams;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-// import org.springframework.context.annotation.Bean;
-// import org.springframework.context.annotation.Configuration;
-//
-// @Configuration
-// public class NetconfMonitorServiceConfiguration {
-//
-// private static final Logger LOGGER =
-/// LoggerFactory.getLogger(NetconfMonitorServiceConfiguration.class);
-// private static final Map<String, String> enviroment = System.getenv();
-//
-// private static final String LOG_PATH = "/var/log";
-//
-// private static final String NETCONF_ADDRESS = "NETCONF_ADDRESS";
-// private static final String NETCONF_PORT = "NETCONF_PORT";
-// private static final String NETCONF_MODEL = "NETCONF_MODEL";
-// private static final String NETCONF_MAIN_CONTAINER = "NETCONF_MAIN_CONTAINER";
-//
-// private static final String DEFAULT_NETCONF_ADDRESS = "localhost";
-// private static final int DEFAULT_NETCONF_PORT = 830;
-// private static final String DEFAULT_NETCONF_MODEL = "pnf-simulator";
-// private static final String DEFAULT_NETCONF_MAIN_CONTAINER = "config";
-//
-// private static final String DEFAULT_NETCONF_USER = "netconf";
-// private static final String DEFAULT_NETCONF_PASSWORD = "netconf";
-//
-// @Bean
-// public Timer timer() {
-// return new Timer("NetconfMonitorServiceTimer");
-// }
-//
-// @Bean
-// public NetconfConfigurationCache configurationCache() {
-// return new NetconfConfigurationCache();
-// }
-//
-// @Bean
-// public NetconfConfigurationReader configurationReader() throws IOException, JNCException {
-// NetconfConnectionParams params = resolveConnectionParams();
-// LOGGER.info("Configuration params are : {}", params);
-// NetconfSession session = createNetconfSession(params);
-// return new NetconfConfigurationReader(session, buildModelPath());
-// }
-//
-// NetconfSession createNetconfSession(NetconfConnectionParams params) throws IOException,
-/// JNCException {
-// SSHConnection sshConnection = new SSHConnection(params.address, params.port);
-// sshConnection.authenticateWithPassword(params.user, params.password);
-// return new NetconfSession( new SSHSession(sshConnection));
-// }
-//
-// @Bean
-// public NetconfConfigurationWriter netconfConfigurationWriter() {
-// return new NetconfConfigurationWriter(LOG_PATH);
-// }
-//
-// private String buildModelPath() {
-// return String.format("/%s:%s",
-// enviroment.getOrDefault(NETCONF_MODEL, DEFAULT_NETCONF_MODEL),
-// enviroment.getOrDefault(NETCONF_MAIN_CONTAINER, DEFAULT_NETCONF_MAIN_CONTAINER));
-// }
-//
-// NetconfConnectionParams resolveConnectionParams() {
-// return new NetconfConnectionParams(
-// enviroment.getOrDefault(NETCONF_ADDRESS, DEFAULT_NETCONF_ADDRESS),
-// resolveNetconfPort(),
-// DEFAULT_NETCONF_USER,
-// DEFAULT_NETCONF_PASSWORD);
-// }
-//
-// private int resolveNetconfPort() {
-// try {
-// return Integer.parseInt(enviroment.get(NETCONF_PORT));
-// } catch (NumberFormatException e) {
-// LOGGER.warn("Invalid netconf port: {}. Default netconf port {} is set.", e.getMessage(),
-// DEFAULT_NETCONF_PORT);
-// return DEFAULT_NETCONF_PORT;
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java
deleted file mode 100644
index 39721841b..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-public class NetconfConfigurationCache {
-
- private String configuration = "";
-
- public String getConfiguration() {
- return configuration;
- }
-
- public void update(String configuration) {
- this.configuration = configuration;
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java
deleted file mode 100644
index e41e58f78..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import java.io.IOException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class NetconfConfigurationReader {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfConfigurationReader.class);
- private final NetconfSession session;
- private final String netconfModelPath;
-
- public NetconfConfigurationReader(NetconfSession session, String netconfModelPath) {
- LOGGER.warn("netconfModelPath: {}", netconfModelPath);
- this.session = session;
- this.netconfModelPath = netconfModelPath;
- }
-
- public String read() throws IOException, JNCException {
- return session.getConfig(netconfModelPath).first().toXMLString();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java
deleted file mode 100644
index 40030796f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import java.io.BufferedWriter;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import org.onap.pnfsimulator.rest.util.DateUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NetconfConfigurationWriter {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfConfigurationWriter.class);
- private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss");
- private String pathToLog;
-
- public NetconfConfigurationWriter(String pathToLog) {
- this.pathToLog = pathToLog;
- }
-
- public void writeToFile(String configuration) {
- String fileName = String.format("%s/config[%s].xml", pathToLog, DateUtil.getTimestamp(dateFormat));
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(fileName))) {
- writer.write(configuration);
- LOGGER.info("Configuration wrote to file {}/{} ", pathToLog, fileName);
- } catch (IOException e) {
- LOGGER.warn("Failed to write configuration to file: {}", e.getMessage());
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java
deleted file mode 100644
index 1d6eb89bf..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-public class NetconfConnectionParams {
-
- public final String address;
- public final int port;
- public final String user;
- public final String password;
-
- public NetconfConnectionParams(String address, int port, String user, String password) {
- this.address = address;
- this.port = port;
- this.user = user;
- this.password = password;
- }
-
- @Override
- public String toString() {
- return String.format("NetconfConnectionParams{address=%s, port=%d, user=%s, password=%s}",
- address,
- port,
- user,
- password);
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
deleted file mode 100644
index 2a685eac8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.INSTANCE_UUID;
-import static org.onap.pnfsimulator.logging.MDCVariables.INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.RESPONSE_CODE;
-import static org.onap.pnfsimulator.logging.MDCVariables.SERVICE_NAME;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.message.MessageConstants.SIMULATOR_PARAMS;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.MESSAGE;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.REMAINING_TIME;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.SIMULATOR_STATUS;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.TIMESTAMP;
-import static org.springframework.http.HttpStatus.BAD_REQUEST;
-import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
-import static org.springframework.http.HttpStatus.OK;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Optional;
-import java.util.UUID;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.message.MessageConstants;
-import org.onap.pnfsimulator.rest.util.DateUtil;
-import org.onap.pnfsimulator.rest.util.ResponseBuilder;
-import org.onap.pnfsimulator.simulator.Simulator;
-import org.onap.pnfsimulator.simulator.SimulatorFactory;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-@RestController
-@RequestMapping("/simulator")
-public class SimulatorController {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(Simulator.class);
- private static final DateFormat RESPONSE_DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss,SSS");
- private final Marker ENTRY = MarkerFactory.getMarker("ENTRY");
- private Simulator simulator;
- private JSONValidator validator;
- private SimulatorFactory factory;
-
- @Autowired
- public SimulatorController(JSONValidator validator, SimulatorFactory factory) {
- this.validator = validator;
- this.factory = factory;
- }
-
- @PostMapping("start")
- public ResponseEntity start(@RequestHeader HttpHeaders headers, @RequestBody String message) {
- MDC.put(REQUEST_ID, headers.getFirst(X_ONAP_REQUEST_ID));
- MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID));
- MDC.put(INSTANCE_UUID, UUID.randomUUID().toString());
- MDC.put(SERVICE_NAME, "/simulator/start");
- LOGGER.info(ENTRY, "Simulator starting");
-
- if (isSimulatorRunning()) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator since it's already running").build();
- }
-
- try {
- validator.validate(message, "json_schema/input_validator.json");
- JSONObject root = new JSONObject(message);
- JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS);
- JSONObject commonEventHeaderParams = root.getJSONObject(COMMON_EVENT_HEADER_PARAMS);
- Optional<JSONObject> pnfRegistrationFields = root.has(MessageConstants.PNF_REGISTRATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.PNF_REGISTRATION_PARAMS))
- : Optional.empty();
- Optional<JSONObject> notificationFields = root.has(MessageConstants.NOTIFICATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.NOTIFICATION_PARAMS))
- : Optional.empty();
- simulator =
- factory.create(simulatorParams, commonEventHeaderParams, pnfRegistrationFields, notificationFields);
- simulator.start();
-
- MDC.put(RESPONSE_CODE, OK.toString());
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator started").build();
-
- } catch (JSONException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Cannot start simulator, invalid json format: {}", e.getMessage());
- LOGGER.debug("Received json has invalid format", e);
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator, invalid json format").build();
-
- } catch (ProcessingException | ValidationException | IOException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Json validation failed: {}", e.getMessage());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator - Json format is not compatible with schema definitions")
- .build();
-
- } catch (Exception e) {
- MDC.put(RESPONSE_CODE, INTERNAL_SERVER_ERROR.toString());
- LOGGER.error("Cannot start simulator - unexpected exception", e);
- return ResponseBuilder.status(INTERNAL_SERVER_ERROR)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Unexpected exception: " + e.getMessage()).build();
- } finally {
- MDC.clear();
- }
- }
-
- @PostMapping("startmassmode")
- public ResponseEntity startmassmode(@RequestHeader HttpHeaders headers, @RequestBody String message) {
- MDC.put(REQUEST_ID, headers.getFirst(X_ONAP_REQUEST_ID));
- MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID));
- MDC.put(INSTANCE_UUID, UUID.randomUUID().toString());
- MDC.put(SERVICE_NAME, "/simulator/start");
- LOGGER.info(ENTRY, "Simulator starting");
-
- if (isSimulatorRunning()) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator since it's already running").build();
- }
-
- try {
- validator.validate(message, "json_schema/input_validator.json");
- JSONObject root = new JSONObject(message);
- JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS);
- JSONObject commonEventHeaderParams = root.getJSONObject(COMMON_EVENT_HEADER_PARAMS);
- Optional<JSONObject> pnfRegistrationFields = root.has(MessageConstants.PNF_REGISTRATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.PNF_REGISTRATION_PARAMS))
- : Optional.empty();
- Optional<JSONObject> notificationFields = root.has(MessageConstants.NOTIFICATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.NOTIFICATION_PARAMS))
- : Optional.empty();
- simulator =
- factory.create(simulatorParams, commonEventHeaderParams, pnfRegistrationFields, notificationFields);
- simulator.start();
-
- MDC.put(RESPONSE_CODE, OK.toString());
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator started").build();
-
- } catch (JSONException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Cannot start simulator, invalid json format: {}", e.getMessage());
- LOGGER.debug("Received json has invalid format", e);
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator, invalid json format").build();
-
- } catch (ProcessingException | ValidationException | IOException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Json validation failed: {}", e.getMessage());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator - Json format is not compatible with schema definitions")
- .build();
-
- } catch (Exception e) {
- MDC.put(RESPONSE_CODE, INTERNAL_SERVER_ERROR.toString());
- LOGGER.error("Cannot start simulator - unexpected exception", e);
- return ResponseBuilder.status(INTERNAL_SERVER_ERROR)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Unexpected exception: " + e.getMessage()).build();
- } finally {
- MDC.clear();
- }
- }
-
-
-
- @GetMapping("status")
- public ResponseEntity status() {
- if (isSimulatorRunning()) {
- ResponseBuilder responseBuilder = ResponseBuilder.status(OK)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT)).put(SIMULATOR_STATUS, "RUNNING");
-
- return !simulator.isEndless() ? responseBuilder.put(REMAINING_TIME, simulator.getRemainingTime()).build()
- : responseBuilder.build();
- } else {
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(SIMULATOR_STATUS, "NOT RUNNING").build();
- }
- }
-
- @PostMapping("stop")
- public ResponseEntity stop() {
- if (isSimulatorRunning()) {
- simulator.interrupt();
-
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator successfully stopped").build();
- } else {
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot stop simulator, because it's not running").build();
- }
- }
-
- private boolean isSimulatorRunning() {
- return simulator != null && simulator.isAlive();
- }
-}
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java
deleted file mode 100644
index 284d58904..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import java.text.DateFormat;
-import java.util.Date;
-
-public final class DateUtil {
-
- private DateUtil() {
- }
-
- public static String getTimestamp(DateFormat dateFormat) {
-
- return dateFormat.format(new Date());
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
deleted file mode 100644
index 98f4588c1..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class ResponseBuilder {
-
- public static final String TIMESTAMP = "timestamp";
- public static final String MESSAGE = "message";
- public static final String SIMULATOR_STATUS = "simulatorStatus";
- public static final String REMAINING_TIME = "remainingTime";
-
- private HttpStatus httpStatus;
- private Map<String, Object> body = new LinkedHashMap<>();
-
- private ResponseBuilder(HttpStatus httpStatus) {
- this.httpStatus = httpStatus;
- }
-
- public static ResponseBuilder status(HttpStatus httpStatus) {
-
- return new ResponseBuilder(httpStatus);
- }
-
- public ResponseBuilder put(String key, Object value) {
-
- body.put(key, value);
- return this;
- }
-
- public ResponseEntity build() {
-
- if (body.isEmpty()) {
- return ResponseEntity.status(httpStatus).build();
- }
-
- return ResponseEntity.status(httpStatus).body(body);
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java
deleted file mode 100644
index ba114760f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.time.Duration;
-import java.time.Instant;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.message.MessageProvider;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapterImpl;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-
-public class Simulator extends Thread {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(Simulator.class);
- private final Marker EXIT = MarkerFactory.getMarker("EXIT");
- private Map<String, String> contextMap = MDC.getCopyOfContextMap();
- private boolean isEndless;
- private String vesUrl;
- private HttpClientAdapter httpClient;
- private JSONObject messageBody;
- private Duration duration;
- private Duration interval;
- private Instant endTime;
- private JSONObject commonEventHeaderParams;
- private Optional<JSONObject> pnfRegistrationParams;
- private Optional<JSONObject> notificationParams;
- private String xnfUrl;
- private static final String DEFAULT_OUTPUT_SCHEMA_PATH = "json_schema/output_validator_ves_schema_30.0.1.json";
- private FileProvider fileProvider;
- private Exception thrownException = null;
-
- private Simulator() {}
-
- public static Builder builder() {
- return new Builder();
- }
-
- @Override
- public void run() {
- setMdcContextMap(contextMap);
- LOGGER.info("Simulation started - duration: {}, interval: {}s", getDuration(), interval.getSeconds());
- endTime = Instant.now().plus(duration);
- while (isEndless || runningTimeNotExceeded()) {
- try {
-
- List<String> fileList = fileProvider.getFiles();
- MessageProvider messageProvider = new MessageProvider();
- JSONValidator validator = new JSONValidator();
- messageBody = messageProvider.createMessage(this.commonEventHeaderParams, this.pnfRegistrationParams,
- this.notificationParams, fileList, this.xnfUrl);
- validator.validate(messageBody.toString(), DEFAULT_OUTPUT_SCHEMA_PATH);
-
- LOGGER.info("Message to be sent:\n" + getMessage());
- httpClient.send(messageBody.toString(), vesUrl);
- Thread.sleep(interval.toMillis());
- } catch (InterruptedException | ValidationException | ProcessingException | IOException | NoRopFilesException e) {
- LOGGER.info("Simulation stopped due to an exception: " + e);
- thrownException = e;
- return;
- }
- }
- LOGGER.info(EXIT, "Simulation finished");
- MDC.clear();
- }
-
- private void setMdcContextMap(Map<String, String> mdcContextMap) {
- if (mdcContextMap != null)
- MDC.setContextMap(mdcContextMap);
- }
-
- private String getMessage() {
- return messageBody.toString(4);
- }
-
- private String getDuration() {
- return isEndless() ? "infinity" : duration.getSeconds() + "s";
- }
-
- private boolean runningTimeNotExceeded() {
- return Instant.now().isBefore(endTime);
- }
-
- public boolean isEndless() {
- return isEndless;
- }
-
- public Exception getThrownException() {
- return thrownException;
- }
-
- public long getRemainingTime() {
- return Duration.between(Instant.now(), endTime).getSeconds();
- }
-
- public static class Builder {
-
- private String vesUrl;
- private HttpClientAdapter httpClient;
- //private JSONObject messageBody;
- private Duration duration;
- private Duration interval;
- private Optional<JSONObject> notificationParams;
- private Optional<JSONObject> pnfRegistrationParams;
- private JSONObject commonEventHeaderParams;
- private String xnfUrl;
- private FileProvider fileProvider;
-
- private Builder() {
- this.vesUrl = "";
- this.httpClient = new HttpClientAdapterImpl();
- //this.messageBody = new JSONObject();
- this.duration = Duration.ZERO;
- this.interval = Duration.ZERO;
- this.commonEventHeaderParams = new JSONObject();
- }
-
- public Builder withVesUrl(String vesUrl) {
- this.vesUrl = vesUrl;
- return this;
- }
-
- public Builder withCustomHttpClientAdapter(HttpClientAdapter httpClient) {
- this.httpClient = httpClient;
- return this;
- }
-
- /*public Builder withMessageBody(JSONObject messageBody) {
- this.messageBody = messageBody;
- return this;
- }*/
-
- public Builder withDuration(Duration duration) {
- this.duration = duration;
- return this;
- }
-
-
- public Builder withInterval(Duration interval) {
- this.interval = interval;
- return this;
- }
-
- public Builder withCommonEventHeaderParams(JSONObject commonEventHeaderParams) {
- this.commonEventHeaderParams = commonEventHeaderParams;
- return this;
- }
-
- public Builder withNotificationParams(Optional<JSONObject> notificationParams) {
- this.notificationParams = notificationParams;
- return this;
- }
-
- public Builder withPnfRegistrationParams(Optional<JSONObject> pnfRegistrationParams) {
- this.pnfRegistrationParams = pnfRegistrationParams;
- return this;
- }
-
- public Builder withXnfUrl(String xnfUrl) {
- this.xnfUrl = xnfUrl;
- return this;
- }
-
- public Builder withFileProvider(FileProvider fileProvider) {
- this.fileProvider = fileProvider;
- return this;
- }
-
- public Simulator build() {
- Simulator simulator = new Simulator();
- simulator.vesUrl = this.vesUrl;
- simulator.httpClient = this.httpClient;
- //simulator.messageBody = this.messageBody;
- simulator.duration = this.duration;
- simulator.interval = this.interval;
- simulator.xnfUrl = this.xnfUrl;
- simulator.fileProvider = this.fileProvider;
- simulator.commonEventHeaderParams = this.commonEventHeaderParams;
- simulator.pnfRegistrationParams = this.pnfRegistrationParams;
- simulator.notificationParams = this.notificationParams;
- simulator.isEndless = duration.equals(Duration.ZERO);
- return simulator;
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java
deleted file mode 100644
index 851e6ad1d..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static java.lang.Integer.parseInt;
-import static org.onap.pnfsimulator.message.MessageConstants.MESSAGE_INTERVAL;
-import static org.onap.pnfsimulator.message.MessageConstants.TEST_DURATION;
-import java.time.Duration;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.ConfigurationProvider;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.PnfSimConfig;
-import org.springframework.stereotype.Service;
-
-@Service
-public class SimulatorFactory {
-
- public Simulator create(JSONObject simulatorParams, JSONObject commonEventHeaderParams,
- Optional<JSONObject> pnfRegistrationParams, Optional<JSONObject> notificationParams) {
- PnfSimConfig configuration = ConfigurationProvider.getConfigInstance();
-
- String xnfUrl = null;
- if (configuration.getTypefileserver().equals("sftp")) {
- xnfUrl = configuration.getUrlsftp() + "/";
- } else if (configuration.getTypefileserver().equals("ftps")) {
- xnfUrl = configuration.getUrlftps() + "/";
- }
-
- String urlVes = configuration.getUrlves();
- Duration duration = Duration.ofSeconds(parseInt(simulatorParams.getString(TEST_DURATION)));
- Duration interval = Duration.ofSeconds(parseInt(simulatorParams.getString(MESSAGE_INTERVAL)));
-
- return Simulator.builder().withVesUrl(urlVes).withXnfUrl(xnfUrl).withDuration(duration)
- .withFileProvider(new FileProvider()).withCommonEventHeaderParams(commonEventHeaderParams)
- .withNotificationParams(notificationParams).withPnfRegistrationParams(pnfRegistrationParams)
- .withInterval(interval).build();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java
deleted file mode 100644
index 47f2e3112..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-public interface HttpClientAdapter {
-
- void send(String content, String url);
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
deleted file mode 100644
index f0c9917f5..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.util.UUID;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.util.EntityUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-
-public class HttpClientAdapterImpl implements HttpClientAdapter {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientAdapterImpl.class);
- private static final String CONTENT_TYPE = "Content-Type";
- private static final String APPLICATION_JSON = "application/json";
- private final Marker INVOKE = MarkerFactory.getMarker("INVOKE");
- private static final RequestConfig CONFIG = RequestConfig.custom()
- .setConnectTimeout(1000)
- .setConnectionRequestTimeout(1000)
- .setSocketTimeout(1000)
- .build();
-
- private HttpClient client;
-
- public HttpClientAdapterImpl() {
- this.client = HttpClientBuilder
- .create()
- .setDefaultRequestConfig(CONFIG)
- .build();
- }
-
- @Override
- public void send(String content, String url) {
- try {
- HttpPost request = createRequest(content, url);
- HttpResponse response = client.execute(request);
- EntityUtils.consumeQuietly(response.getEntity());
- LOGGER.info(INVOKE, "Message sent, ves response code: {}", response.getStatusLine());
- } catch (IOException e) {
- LOGGER.warn("Error sending message to ves: {}", e.getMessage());
- }
- }
-
- HttpClientAdapterImpl(HttpClient client) {
- this.client = client;
- }
-
- private HttpPost createRequest(String content, String url) throws UnsupportedEncodingException {
- HttpPost request = new HttpPost(url);
- StringEntity stringEntity = new StringEntity(content);
- request.addHeader(CONTENT_TYPE, APPLICATION_JSON);
- request.addHeader(X_ONAP_REQUEST_ID, MDC.get(REQUEST_ID));
- request.addHeader(X_INVOCATION_ID, UUID.randomUUID().toString());
- request.setEntity(stringEntity);
- return request;
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java
deleted file mode 100644
index 89135f9b4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.validation;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import com.github.fge.jsonschema.core.report.LogLevel;
-import com.github.fge.jsonschema.core.report.ProcessingMessage;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.google.gson.JsonParser;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.stream.Collectors;
-import java.util.stream.StreamSupport;
-
-public class JSONValidator {
-
- public void validate(String data, String jsonSchemaPath)
- throws ValidationException, ProcessingException, IOException {
- String jsonSchema = readJsonSchemaAsString(jsonSchemaPath);
- JsonNode jsonData = JsonLoader.fromString(data);
- ProcessingReport report = createJsonSchema(jsonSchema).validate(jsonData);
-
- if (!report.isSuccess()) {
- throw new ValidationException(constructValidationErrors(report));
- }
- }
-
- private String readJsonSchemaAsString(String schemaPath) throws IOException {
- try (FileReader reader = new FileReader(schemaPath)) {
- return new JsonParser().parse(reader).toString();
- }
- }
-
- private JsonSchema createJsonSchema(String schema) throws ProcessingException, IOException {
- return JsonSchemaFactory.byDefault().getJsonSchema(JsonLoader.fromString(schema));
- }
-
- private String constructValidationErrors(ProcessingReport report) {
- return StreamSupport.stream(report.spliterator(), false)
- .filter(entry -> entry.getLogLevel() == LogLevel.ERROR)
- .map(ProcessingMessage::getMessage)
- .collect(Collectors.joining("\n"));
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties
deleted file mode 100644
index 9740eff3c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-server.port=5000
-logging.level.root=ERROR
-logging.level.org.springframework=ERROR
-logging.level.org.springframework.data=ERROR
-logging.level.org.onap.pnfsimulator=TRACE
-logging.file=logs/log/application.log \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java
deleted file mode 100644
index da41afd0c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.INTERNAL_HEADER_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.LAST_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_LAST_SERVICE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_MANUFACTURE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY_NORMAL;
-import static org.onap.pnfsimulator.message.MessageConstants.REPORTING_ENTITY_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.SOURCE_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.START_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.TIME_ZONE_OFFSET;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION_NUMBER;
-import org.json.JSONObject;
-import org.junit.jupiter.api.Test;
-
-public class JSONObjectFactoryTest {
-
- @Test
- public void generateConstantCommonEventHeader_shouldCreateProperly(){
- JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- assertEquals(11,commonEventHeader.toMap().size());
- assertTrue(commonEventHeader.has(EVENT_ID));
- assertTrue(commonEventHeader.has(TIME_ZONE_OFFSET));
- assertTrue(commonEventHeader.has(LAST_EPOCH_MICROSEC));
- assertTrue(commonEventHeader.has(PRIORITY));
- assertTrue(commonEventHeader.has(SEQUENCE));
- assertTrue(commonEventHeader.has(START_EPOCH_MICROSEC));
- assertTrue(commonEventHeader.has(INTERNAL_HEADER_FIELDS));
- assertTrue(commonEventHeader.has(VERSION));
- assertTrue(commonEventHeader.has(SOURCE_NAME));
- assertTrue(commonEventHeader.has(REPORTING_ENTITY_NAME));
- assertEquals(commonEventHeader.get(PRIORITY),PRIORITY_NORMAL);
- assertEquals(commonEventHeader.get(SEQUENCE),SEQUENCE_NUMBER);
- assertEquals(commonEventHeader.get(VERSION),VERSION_NUMBER);
- assertEquals(commonEventHeader.get(VES_EVENT_LISTENER_VERSION),VES_EVENT_LISTENER_VERSION_NUMBER);
- }
-
- @Test
- public void generateConstantPnfRegistrationFields_shouldCreateProperly(){
- JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- assertEquals(3,pnfRegistrationFields.toMap().size());
- assertTrue(pnfRegistrationFields.has(PNF_REGISTRATION_FIELDS_VERSION));
- assertEquals(pnfRegistrationFields.get(PNF_REGISTRATION_FIELDS_VERSION), PNF_REGISTRATION_FIELDS_VERSION_VALUE);
- assertTrue(pnfRegistrationFields.has(PNF_LAST_SERVICE_DATE));
- assertTrue(pnfRegistrationFields.has(PNF_MANUFACTURE_DATE));
- }
-
- @Test
- public void generateEventId_shouldCreateProperly(){
- String eventId = JSONObjectFactory.generateEventId();
- assertTrue(eventId.startsWith("FileReady_"));
- }
-
- @Test
- public void generateNotificationFields_shouldCreateProperly(){
- JSONObject notificationFields = JSONObjectFactory.generateNotificationFields();
- assertEquals(1,notificationFields.keySet().size());
- assertEquals(NOTIFICATION_FIELDS_VERSION_VALUE,notificationFields.get(NOTIFICATION_FIELDS_VERSION));
-
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java
deleted file mode 100644
index 0fa8a12ee..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-public class MessageProviderTest {
-
- private static final String testParamsPnfRegistration =
- "{\"pnfKey1\": \"pnfVal1\",\"pnfKey2\": \"pnfVal2\",\"pnfKey3\": \"pnfVal3\",\"pnfKey4\": \"pnfVal4\"}";
-
- private static final String testParamsNotification =
- "{\"notKey1\": \"notVal1\",\"notKey2\": \"notVal2\",\"notKey3\": \"notVal3\",\"notKey4\": \"notVal4\"}";
-
- private static MessageProvider messageProvider;
-
- @BeforeAll
- public static void setup() {
- messageProvider = new MessageProvider();
- }
-
- @Test
- public void createMessage_should_throw_when_given_empty_arguments() {
- assertThrows(IllegalArgumentException.class,
- () -> messageProvider.createMessage(new JSONObject(), Optional.empty(), Optional.empty()),
- "Params object cannot be null");
- }
-
- @Test
- public void createMessage_should_create_constant_message_when_no_params_specified() {
- JSONObject message = messageProvider.createMessage(new JSONObject(), Optional.ofNullable(new JSONObject()),
- Optional.ofNullable(new JSONObject()));
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS);
- JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS);
-
- JSONObject expectedCommonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- JSONObject expectedPnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- JSONObject expectedNotificationFields = JSONObjectFactory.generateNotificationFields();
-
- expectedCommonEventHeader
- .toMap()
- .forEach((key, val) -> assertTrue(commonEventHeader.has(key),
- () -> String.format("Key %s is not present", key)));
-
- expectedPnfRegistrationFields
- .toMap()
- .forEach((key, val) -> assertTrue(pnfRegistrationFields.has(key),
- () -> String.format("Key %s is not present", key)));
-
- expectedNotificationFields
- .toMap()
- .forEach((key, val) -> assertTrue(notificationFields.has(key),
- () -> String.format("Key %s is not present", key)));
- }
-
- @Test
- public void createMessage_should_throw_exception_when_params_specified_as_empty() {
- assertThrows(IllegalArgumentException.class,
- () -> messageProvider.createMessage(new JSONObject(), Optional.empty(),
- Optional.empty()));
- }
-
- @Test
- public void createMessage_should_add_specified_params_to_valid_subobjects_with_event_pnf_registration() {
- JSONObject message = messageProvider
- .createMessage(new JSONObject(), Optional.of(new JSONObject(testParamsPnfRegistration)), Optional.empty());
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- assertEquals(13, commonEventHeader.keySet().size());
-
- JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS);
- assertEquals("pnfVal1", pnfRegistrationFields.getString("pnfKey1"));
- assertEquals("pnfVal2", pnfRegistrationFields.getString("pnfKey2"));
- }
-
- @Test
- public void createMessage_should_add_specified_params_to_valid_subobjects_with_event_notification() {
- JSONObject message = messageProvider
- .createMessage(new JSONObject(), Optional.empty(), Optional.of(new JSONObject(testParamsNotification)));
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- assertEquals(12, commonEventHeader.keySet().size());
-
- JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS);
- assertEquals("notVal1", notificationFields.getString("notKey1"));
- assertEquals("notVal2", notificationFields.getString("notKey2"));
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java
deleted file mode 100644
index df5a13db2..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.mockito.ArgumentMatchers.any;
-// import static org.mockito.Mockito.never;
-// import static org.mockito.Mockito.verify;
-// import static org.mockito.Mockito.when;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-// import org.mockito.MockitoAnnotations;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-//
-// class NetconfConfigurationCheckingTaskTest {
-//
-// private NetconfConfigurationCheckingTask checkingTask;
-//
-// @Mock
-// private NetconfConfigurationReader reader;
-// @Mock
-// private NetconfConfigurationWriter writer;
-// @Mock
-// private NetconfConfigurationCache cache;
-//
-// @BeforeEach
-// void setup() {
-// MockitoAnnotations.initMocks(this);
-// checkingTask = new NetconfConfigurationCheckingTask(reader, writer, cache);
-// }
-//
-// @Test
-// void run_should_update_configuration_when_changed() throws IOException, JNCException {
-// String configuration = "newConfiguration";
-// when(reader.read()).thenReturn(configuration);
-// when(cache.getConfiguration()).thenReturn("oldConfiguration");
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache).getConfiguration();
-// verify(writer).writeToFile(configuration);
-// verify(cache).update(configuration);
-// }
-//
-// @Test
-// void run_should_not_update_configuration_when_same() throws IOException, JNCException {
-// String configuration = "configuration";
-// when(reader.read()).thenReturn(configuration);
-// when(cache.getConfiguration()).thenReturn("configuration");
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache).getConfiguration();
-// verify(writer, never()).writeToFile(configuration);
-// verify(cache, never()).update(configuration);
-// }
-//
-// @Test
-// void run_should_not_take_any_action_when_failed_to_read_configuration() throws IOException,
-/// JNCException {
-// when(reader.read()).thenThrow(new IOException());
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache, never()).getConfiguration();
-// verify(writer, never()).writeToFile(any());
-// verify(cache, never()).update(any());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java
deleted file mode 100644
index 3ff234b27..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.junit.jupiter.api.Assertions.assertNotNull;
-// import static org.mockito.ArgumentMatchers.any;
-// import static org.mockito.Mockito.doReturn;
-// import static org.mockito.Mockito.mock;
-// import static org.mockito.Mockito.spy;
-// import static org.mockito.Mockito.verify;
-//
-// import com.tailf.jnc.JNCException;
-// import com.tailf.jnc.NetconfSession;
-// import java.io.IOException;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-//
-// class NetconfMonitorServiceConfigurationTest {
-//
-// private NetconfMonitorServiceConfiguration configuration;
-//
-// @Mock
-// private NetconfSession netconfSession;
-//
-// @BeforeEach
-// void setup() {
-// netconfSession = mock(NetconfSession.class);
-// configuration = spy(new NetconfMonitorServiceConfiguration());
-// }
-//
-// @Test
-// void readNetconfConfiguration() throws IOException, JNCException {
-// doReturn(netconfSession).when(configuration).createNetconfSession(any());
-//
-// assertNotNull(configuration.configurationReader());
-// verify(configuration).createNetconfSession(any());
-// }
-//
-// @Test
-// void configurationCacheIsNotNull() {
-// assertNotNull(configuration.configurationCache());
-// }
-//
-// @Test
-// void netconfConfigurationWriterIsNotNull() {
-// assertNotNull(configuration.netconfConfigurationWriter());
-// }
-//
-// @Test
-// void timerIsNotNull() {
-// assertNotNull(configuration.timer());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java
deleted file mode 100644
index f8690c5ce..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.mockito.ArgumentMatchers.anyString;
-// import static org.mockito.Mockito.any;
-// import static org.mockito.Mockito.anyLong;
-// import static org.mockito.Mockito.doNothing;
-// import static org.mockito.Mockito.times;
-// import static org.mockito.Mockito.verify;
-// import static org.mockito.Mockito.when;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.Timer;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-// import org.mockito.MockitoAnnotations;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-//
-// class NetconfMonitorServiceTest {
-//
-// private NetconfMonitorService service;
-//
-// @Mock
-// private Timer timer;
-// @Mock
-// private NetconfConfigurationReader reader;
-// @Mock
-// private NetconfConfigurationWriter writer;
-// @Mock
-// private NetconfConfigurationCache cache;
-//
-// @BeforeEach
-// void setup() {
-// MockitoAnnotations.initMocks(this);
-// service = new NetconfMonitorService(timer, reader, writer, cache);
-// }
-//
-// @Test
-// void startNetconfService() throws IOException, JNCException {
-// when(reader.read()).thenReturn("message");
-// doNothing().when(writer).writeToFile(anyString());
-// doNothing().when(cache).update(anyString());
-//
-// service.start();
-//
-// verify(cache, times(1)).update(anyString());
-// verify(writer, times(1)).writeToFile(anyString());
-// verify(timer, times(1)).scheduleAtFixedRate(any(), anyLong(), anyLong());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java
deleted file mode 100644
index 56f62ac50..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import org.junit.jupiter.api.Test;
-
-public class NetconfConfigurationCacheTest {
-
- private static final String CONFIGURATION = "sampleConfiguration";
-
- @Test
- void changeConfigurationAfterUpdate() {
- NetconfConfigurationCache configurationCache = new NetconfConfigurationCache();
- configurationCache.update(CONFIGURATION);
-
- assertEquals(CONFIGURATION, configurationCache.getConfiguration());
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java
deleted file mode 100644
index 65b2bc32e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.NodeSet;
-import java.io.IOException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-class NetconfConfigurationReaderTest {
-
- private static final String NETCONF_MODEL_PATH = "";
- private static final String EXPECTED_STRING_XML = "<?xml version=\"1.0\"?>";
- private NetconfConfigurationReader reader;
-
- @Mock
- private NetconfSession netconfSession;
- @Mock
- private NodeSet nodeSet;
- @Mock
- private Element element;
-
- @BeforeEach
- void setup() {
- MockitoAnnotations.initMocks(this);
- reader = new NetconfConfigurationReader(netconfSession, NETCONF_MODEL_PATH);
- }
-
- @Test
- void properlyReadXML() throws IOException, JNCException {
- when(netconfSession.getConfig(anyString())).thenReturn(nodeSet);
- when(nodeSet.first()).thenReturn(element);
- when(element.toXMLString()).thenReturn(EXPECTED_STRING_XML);
-
- String result = reader.read();
-
- verify(netconfSession).getConfig(anyString());
- verify(nodeSet).first();
- verify(element).toXMLString();
- assertEquals(EXPECTED_STRING_XML, result);
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java
deleted file mode 100644
index 2baee21b7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import org.apache.commons.io.FileUtils;
-import org.junit.Rule;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.migrationsupport.rules.EnableRuleMigrationSupport;
-import org.junit.rules.TemporaryFolder;
-
-@EnableRuleMigrationSupport
-class NetconfConfigurationWriterTest {
-
- private static final String TEST_CONFIGURATION = "test-configuration";
-
- @Rule
- public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
- @Test
- void writeToFile_should_write_sample_config_when_directory_exists() throws IOException {
- File file = temporaryFolder.newFolder("temp");
- NetconfConfigurationWriter configurationWriter = new NetconfConfigurationWriter(file.getPath());
-
- configurationWriter.writeToFile(TEST_CONFIGURATION);
-
- File[] files = file.listFiles();
- assertEquals(1, files.length);
-
- String content = FileUtils.readFileToString(files[0], "UTF-8");
- assertEquals(TEST_CONFIGURATION, content);
- }
-
- @Test
- void writeToFile_should_not_write_config_when_directory_doesnt_exist() {
- String logFolderPath = "/not/existing/logs";
- NetconfConfigurationWriter configurationWriter = new NetconfConfigurationWriter(logFolderPath);
-
- configurationWriter.writeToFile(TEST_CONFIGURATION);
-
- assertFalse(Files.exists(Paths.get(logFolderPath)));
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
deleted file mode 100644
index d1db8d55c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.simulator.Simulator;
-import org.onap.pnfsimulator.simulator.SimulatorFactory;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-class SimulatorControllerTest {
-
- private static final String START_URL = "/simulator/start";
- private static final String STOP_URL = "/simulator/stop";
- private static final String STATUS_URL = "/simulator/status";
- private static final String JSON_MSG_EXPRESSION = "$.message";
- private static final String JSON_STATUS_EXPRESSION = "$.simulatorStatus";
- private static final String TEST_VES_URL = "http://localhost:10000/eventListener/v7";
- private static final String TEST_XNF_URL = "sftp://onap:pano@10.11.0.68" + "/";
- private static final String PROPER_JSON = "{\n" +
- " \"simulatorParams\": {\n" +
- " \"testDuration\": \"10\",\n" +
- " \"messageInterval\": \"1\"\n" +
- " },\n" +
- " \"commonEventHeaderParams\": {\n" +
- " \"eventName\": \"val11\",\n" +
- " \"nfNamingCode\": \"val12\",\n" +
- " \"nfcNamingCode\": \"val13\",\n" +
- " \"sourceName\": \"val14\",\n" +
- " \"sourceId\": \"val15\",\n" +
- " \"reportingEntityName\": \"val16\",\n" +
- " },\n" +
-
- " \"pnfRegistrationParams\": {\n" +
- " \"SerialNumber\": \"val1\",\n" +
- " \"VendorName\": \"val2\",\n" +
- " \"OamIpv4Address\": \"val3\",\n" +
- " \"OamIpv6Address\": \"val4\",\n" +
- " \"Family\": \"val5\",\n" +
- " \"ModelNumber\": \"val6\",\n" +
- " \"SoftwareVersion\": \"val7\",\n" +
- " }\n" +
- "}";
- private static final String WRONG_JSON = "{\n" +
- " \"mes\": {\n" +
- " \"vesServerUrl\": \"http://10.154.187.70:8080/eventListener/v5\",\n" +
- " \"testDuration\": \"10\",\n" +
- " \"messageInterval\": \"1\"\n" +
- " },\n" +
- " \"messageParams\": {\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- " }\n" +
- "}\n";
-
- private MockMvc mockMvc;
-
- @InjectMocks
- private SimulatorController controller;
-
- @Mock
- private SimulatorFactory factory;
- @Mock
- private JSONValidator validator;
-
- private Simulator simulator;
-
- private FileProvider fileProvider = mock(FileProvider.class);
-
- private void createSampleFileList() {
- List<String> fileList = new ArrayList<>();
- fileList.add("A20190401.1608+0000-1622+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
- fileList.add("A20190401.1623+0000-1637+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
-
- try {
- doReturn(fileList).when(fileProvider).getFiles();
- } catch (NoRopFilesException e) {
- e.printStackTrace();
- }
- }
-
- @BeforeEach
- void setup() {
- MockitoAnnotations.initMocks(this);
- createSampleFileList();
- simulator = createEndlessSimulator();
- mockMvc = MockMvcBuilders
- .standaloneSetup(controller)
- .build();
- }
-
- private Simulator createEndlessSimulator() {
- return spy(Simulator.builder()
- .withCustomHttpClientAdapter(mock(HttpClientAdapter.class))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider)
- .withInterval(Duration.ofMinutes(1))
- .build());
- }
-
- @Test
- void wrongJSONFormatOnStart() throws Exception {
- when(factory.create(any(),any(), any(),any())).thenReturn(simulator);
- doThrow(new ValidationException("")).when(validator).validate(anyString(), anyString());
-
- mockMvc.perform(post("/simulator/start").content(WRONG_JSON))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath("$.message").value("Cannot start simulator - Json format " +
- "is not compatible with schema definitions"));
- verify(validator).validate(anyString(), anyString());
- }
-
- @Test
- void startSimulatorProperly() throws Exception {
- startSimulator();
-
- verify(validator).validate(anyString(), anyString());
- verify(factory).create(any(),any(), any(),any());
- verify(simulator).start();
- }
-
- @Test
- void notStartWhenAlreadyRunning() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(post(START_URL).content(PROPER_JSON))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Cannot start simulator since it's already running"));
- }
-
- @Test
- void stopSimulatorWhenRunning() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(post(STOP_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Simulator successfully stopped"));
- }
-
- @Test
- void getNotRunningMessageWhenOff() throws Exception {
- mockMvc
- .perform(post(STOP_URL))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Cannot stop simulator, because it's not running"));
- }
-
- @Test
- void getRunningStatusWhenOn() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(get(STATUS_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_STATUS_EXPRESSION).value("RUNNING"));
- }
-
- @Test
- void getNotRunningStatusWhenOff() throws Exception {
- mockMvc
- .perform(get(STATUS_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_STATUS_EXPRESSION).value("NOT RUNNING"));
- }
-
- private void startSimulator() throws Exception {
- when(factory.create(any(), any(), any(),any())).thenReturn(simulator);
-
- mockMvc
- .perform(post(START_URL).content(PROPER_JSON))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Simulator started"));
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java
deleted file mode 100644
index 99b9af7ec..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import org.junit.jupiter.api.Test;
-
-class DateUtilTest {
-
- @Test
- void getFormattedDate() {
- Calendar currentCalendar = Calendar.getInstance();
- String expectedResult = String.valueOf(currentCalendar.get(Calendar.YEAR));
-
- assertEquals(expectedResult, DateUtil.getTimestamp(new SimpleDateFormat("yyyy")));
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
deleted file mode 100644
index 59e1e3b4f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import static org.junit.jupiter.api.Assertions.assertAll;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNull;
-
-import java.util.Map;
-import org.junit.jupiter.api.Test;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class ResponseBuilderTest {
-
-
- private static final HttpStatus SAMPLE_STATUS = HttpStatus.OK;
-
- @Test
- void response_should_have_empty_body_when_built_immediately() {
- ResponseEntity responseEntity = ResponseBuilder.status(SAMPLE_STATUS).build();
-
- assertAll(
- () -> assertEquals(responseEntity.getStatusCode(), SAMPLE_STATUS),
- () -> assertNull(responseEntity.getBody())
- );
- }
-
- @Test
- void builder_should_set_response_status_and_body() {
- String key = "key";
- String value = "value";
- ResponseEntity response = ResponseBuilder
- .status(SAMPLE_STATUS)
- .put(key, value)
- .build();
-
- Map<String, Object> body = (Map<String, Object>) response.getBody();
-
- assertAll(
- () -> assertEquals(SAMPLE_STATUS, response.getStatusCode()),
- () -> assertEquals(value, body.get(key))
- );
- }
-
-
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java
deleted file mode 100644
index d8e60c18d..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_SIMULATOR_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_PNF_REGISTRATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_SIMULATOR_PARAMS;
-import java.util.Optional;
-import org.json.JSONException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-class SimulatorFactoryTest {
-
-
- private SimulatorFactory simulatorFactory;
-
- @BeforeEach
- void setUp() {
- simulatorFactory = new SimulatorFactory();
- }
-
- @Test
- void should_successfully_create_simulator_given_valid_pnf_registration_params() {
- assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- VALID_PNF_REGISTRATION_PARAMS, Optional.empty()));
- }
-
- @Test
- void should_successfully_create_simulator_given_valid_notification_params_and_valid_output_message() {
- assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- Optional.empty(), VALID_NOTIFICATION_PARAMS));
- }
-
- @Test
- void should_throw_given_invalid_simulator_params() {
- assertThrows(
- JSONException.class,
- () -> simulatorFactory.create(INVALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- VALID_PNF_REGISTRATION_PARAMS, VALID_NOTIFICATION_PARAMS));
- }
-}
-
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java
deleted file mode 100644
index fb812b598..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.Assert.assertNull;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertTimeout;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_1;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_2;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_3;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_PNF_REGISTRATION_PARAMS;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.function.Executable;
-import org.mockito.Mockito;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-
-public class SimulatorTest {
-
- private static final String TEST_VES_URL = "http://localhost:10000/eventListener/v7";
- private static final String TEST_XNF_URL = "sftp://onap:pano@10.11.0.68" + "/";
- private FileProvider fileProvider = mock(FileProvider.class);
-
- private void createSampleFileList() {
- List<String> fileList = new ArrayList<>();
- fileList.add("A20190401.1608+0000-1622+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
- fileList.add("A20190401.1623+0000-1637+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
-
- try {
- doReturn(fileList).when(fileProvider).getFiles();
- } catch (NoRopFilesException e) {
- e.printStackTrace();
- }
- }
-
- @Test
- void builder_should_create_endless_simulator_when_duration_not_specified() {
- Simulator simulator = Simulator
- .builder()
- .withDuration(Duration.ofSeconds(1))
- .withVesUrl(TEST_VES_URL).build();
-
- assertFalse(simulator.isEndless());
-
- simulator = Simulator
- .builder()
- .withVesUrl(TEST_VES_URL).build();
-
- assertTrue(simulator.isEndless());
- }
-
- @Test
- void simulator_should_stop_when_interrupted() {
- createSampleFileList();
-
- HttpClientAdapter httpClientMock = Mockito.mock(HttpClientAdapter.class);
- Simulator simulator = Simulator.builder()
- .withInterval(Duration.ofSeconds(1))
- .withCustomHttpClientAdapter(httpClientMock)
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withCustomHttpClientAdapter(httpClientMock)
- .withFileProvider(fileProvider).build();
-
- simulator.start();
- simulator.interrupt();
-
- assertTimeout(Duration.ofSeconds(1), (Executable) simulator::join);
- }
-
- @Test
- void should_throw_noropfiles_exception_given_empty_filelist() {
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(VALID_PNF_REGISTRATION_PARAMS)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(new FileProvider()).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertTrue(e instanceof NoRopFilesException);
- }
-
- @Test
- void should_throw_validation_exception_given_invalid_params() {
- createSampleFileList();
-
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_1)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_2)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_3)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(VALID_PNF_REGISTRATION_PARAMS)
- .withNotificationParams(INVALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
- }
-
- @Test
- void simulator_should_send_fileready_message() {
- createSampleFileList();
-
- HttpClientAdapter httpClientMock = Mockito.mock(HttpClientAdapter.class);
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withCustomHttpClientAdapter(httpClientMock)
- .withFileProvider(fileProvider).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertNull(e);
-
- assertTimeout(Duration.ofMillis(150), (Executable) simulator::join);
- verify(httpClientMock, times(1)).send(anyString(), eq(TEST_VES_URL));
- }
-}
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java
deleted file mode 100644
index d92b3c2c5..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Optional;
-import org.json.JSONObject;
-
-public final class TestMessages {
-
- static final JSONObject VALID_SIMULATOR_PARAMS = new JSONObject(getContent("validSimulatorParams.json"));
- public static final JSONObject VALID_COMMON_EVENT_HEADER_PARAMS = new JSONObject(getContent("validCommonEventHeaderParams.json"));
- static final Optional<JSONObject> VALID_PNF_REGISTRATION_PARAMS = Optional
- .of(new JSONObject(getContent("validPnfRegistrationParams.json")));
- public static final Optional<JSONObject> VALID_NOTIFICATION_PARAMS = Optional
- .of(new JSONObject(getContent("validNotificationParams.json")));
-
- static final JSONObject INVALID_SIMULATOR_PARAMS = new JSONObject(
- "{\n" +
- " \"vesServerUrl\": \"http://10.42.111.42:8080/eventListener/v5\",\n" +
- " \"messageInterval\": \"1\"\n" +
- "}");
-
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_1 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfSerialNumber\": \"val1\",\n" +
- " \"pnfVendorName\": \"val2\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_2 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfVendorName\": \"val2\",\n" +
- " \"pnfOamIpv4Address\": \"val3\",\n" +
- " \"pnfOamIpv6Address\": \"val4\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_3 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfSerialNumber\": \"val1\",\n" +
- " \"pnfOamIpv4Address\": \"val3\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_NOTIFICATION_PARAMS = Optional.of(new JSONObject(
- "{\n" +
- " \"mother\": \"val1\",\n" +
- " \"father\": \"val3\",\n" +
- "}"));
-
-
- private TestMessages() {
- }
-
- private static String getContent(String fileName) {
- try {
- String pathAsString = TestMessages.class.getResource(fileName).getPath();
- StringBuilder stringBuilder = new StringBuilder();
- Files.readAllLines(Paths.get(pathAsString)).forEach(line -> {
- stringBuilder.append(line);
- });
- return stringBuilder.toString();
- } catch (IOException e) {
- throw new RuntimeException(String.format("Cannot read JSON file %s", fileName));
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
deleted file mode 100644
index a4fb9eb04..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import java.io.IOException;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-
-class HttpClientAdapterImplTest {
-
- private HttpClientAdapter adapter;
-
- @Mock
- private HttpClient httpClient;
- @Mock
- private HttpResponse httpResponse;
-
- @BeforeEach
- void setup() {
- initMocks(this);
- adapter = new HttpClientAdapterImpl(httpClient);
- }
-
- @Test
- void send_should_successfully_send_request_given_valid_url() throws IOException {
- doReturn(httpResponse).when(httpClient).execute(any());
-
- adapter.send("test-msg", "http://valid-url");
-
- verify(httpClient).execute(any());
- verify(httpResponse).getStatusLine();
- }
-
- @Test
- void send_should_not_send_request_given_invalid_url() throws IOException {
- doThrow(new IOException("test")).when(httpClient).execute(any());
-
- adapter.send("test-msg", "http://invalid-url");
-
- verify(httpClient).execute(any());
- verify(httpResponse, never()).getStatusLine();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java
deleted file mode 100644
index 30dfe065e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.validation;
-
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import com.github.fge.jsonschema.core.exceptions.InvalidSchemaException;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.net.URL;
-import org.json.JSONObject;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-class JSONValidatorTest {
-
- private final static String VALID_SCHEMA_NAME = "valid-test-schema.json";
- private final static String INVALID_SCHEMA_NAME = "invalid-test-schema.json";
-
- private JSONValidator validator;
-
- @BeforeEach
- void setUp() {
- validator = new JSONValidator();
- }
-
- @Test
- void validate_should_not_throw_given_valid_json() throws ProcessingException, IOException, ValidationException {
- validator.validate(getValidJsonString(), getResourcePath(VALID_SCHEMA_NAME));
- }
-
- @Test
- void validate_should_not_throw_when_optional_parameter_missing()
- throws ProcessingException, IOException, ValidationException {
-
- String invalidJsonString = new JSONObject()
- .put("key1", "value1")
- .put("key2", "value2")
- .toString();
-
- validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME));
- }
-
- @Test
- void validate_should_throw_when_mandatory_parameter_missing() {
-
- String invalidJsonString = new JSONObject()
- .put("key1", "value1")
- .put("key3", "value3")
- .toString();
-
- assertThrows(
- ValidationException.class,
- () -> validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_json_format() {
- String invalidJsonString = "{" +
- "\"key1\": \"value1\"" +
- "\"key2\": \"value2" +
- "}";
-
- assertThrows(
- IOException.class,
- () -> validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_schema_format() {
- assertThrows(
- InvalidSchemaException.class,
- () -> validator.validate(getValidJsonString(), getResourcePath(INVALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_schema_path() {
-
- assertThrows(
- IOException.class,
- () -> validator.validate(getValidJsonString(), "/not/existing/path/schema.json"));
- }
-
- private String getResourcePath(String schemaFileName) {
- URL result = getClass()
- .getClassLoader()
- .getResource(schemaFileName);
-
- if (result == null) {
- throw new IllegalArgumentException("Given file doesn't exist");
- } else {
- return result
- .toString()
- .replace("file:", "");
- }
- }
-
- private String getValidJsonString() {
- return new JSONObject()
- .put("key1", "value1")
- .put("key2", "value2")
- .put("key3", "value3")
- .toString();
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json
deleted file mode 100644
index 8c37c822b..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties": {
- "key1": {
- "type": "string"
- },
- "key2": {
- "type": "string"
- },
- "key3": {
- "type": "string"
- },
- "required": [
- "key1",
- "key2"
- ]
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml
deleted file mode 100644
index d7966fe60..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="${java.io.tmpdir}"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${log-path}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="info">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json
deleted file mode 100644
index b988da0f7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "eventName": "Noti_RnNode-Ericsson_FileReady",
- "nfNamingCode": "gNB",
- "nfcNamingCode": "oam"
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json
deleted file mode 100644
index af0cdf409..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady"
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json
deleted file mode 100644
index b95f8e60a..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "serialNumber": "6061ZW3",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4",
- "unitFamily": "BBU",
- "modelNumber": "val6",
- "softwareVersion": "val7",
- "unitType": "val8"
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json
deleted file mode 100644
index 018f185c4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "testDuration": "10",
- "messageInterval": "1"
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json
deleted file mode 100644
index 26e48a5e8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties": {
- "key1": {
- "type": "string"
- },
- "key2": {
- "type": "string"
- },
- "key3": {
- "type": "string"
- }
- },
- "required": [
- "key1",
- "key2"
- ]
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes
deleted file mode 100644
index f7d3c009b..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-*.gz binary \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz
deleted file mode 100644
index 1ec9ef412..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz
+++ /dev/null
Binary files differ
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md
deleted file mode 100644
index 5edfeddec..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# To verify the certificate expiration dates:
-
-openssl x509 -enddate -noout -in dfc.crt
-openssl x509 -enddate -noout -in ftp.crt
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt
deleted file mode 100644
index f747f20bb..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDFjCCAf4CCQCqH10CLXcbUDANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJT
-RTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UECgwBMDEMMAoGA1UECwwD
-RVNUMQwwCgYDVQQDDANFU1QwHhcNMTkwNDA0MDgwMjQwWhcNMjAwNDAzMDgwMjQw
-WjBNMQswCQYDVQQGEwJTRTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UE
-CgwBMDEMMAoGA1UECwwDRVNUMQwwCgYDVQQDDANFU1QwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDAY7wM9khd7OIaZsfZv8EzWUnLTepzSx6hlAvjSbU/
-ZBpq94QOJWt22hQQSHA+Vdl8184PoyOKX+qRfbKJCtyArbv0DoWjlv16WNs938A/
-0TGFh+6xA464/GQmP/AXEuI0tSa2GEgXkhZ0uy6Pmdq+8sD6YcRyVCeqLTq8bqSq
-YbpPrKnbZsd3l7PzpYCZgZLfWoENQ7nuT+C7j4pGGnPKpGn4ubiscV3nTI6nwU19
-ANexToikIL1v18z+gJdva8QtKih5dt9+2QJuJ6lPcwTa7xVkMmuVyr5FXwL11yII
-gKRtknWObU7BjcKkQOG3xnWXZTNzLV/th8GCCWJuRFPbAgMBAAEwDQYJKoZIhvcN
-AQELBQADggEBAGZa23j04vghZY+/81FyRsoeJs8o9aDeK5KLT9JUpRaZlu4AEXSN
-I3oEhencocc07DTndfxw7eSALnTD9ibGUxnRyEvgtW595ajb7A3TtEKGsFzXFvar
-y9RdVm6RRA4Bty8UyLW75ZpAlfc3twCmsRwKjShXeN90Yn3sjpcEU83q8fZBU35C
-xrgrW34gphPtuzZ7kvCJGjhYIfIsTcWObvDGUROtlRhb8w2v1K4G3UcfwDTBixK/
-e61mMUbhoqVyoMYgSuU4idY+n4Evjs5IwalXM1daUZmCGGZoreD/9aSpDEeUWnQy
-vqYC0YY0VJkOe+WI6JZ2r49BtRtl/jxWDUI=
------END CERTIFICATE-----
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt
deleted file mode 100644
index f412d013c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDFjCCAf4CCQDaiGveWOXqNjANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJT
-RTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UECgwBMDEMMAoGA1UECwwD
-RVNUMQwwCgYDVQQDDANFU1QwHhcNMTkwNDA0MDgwMjQwWhcNMjAwNDAzMDgwMjQw
-WjBNMQswCQYDVQQGEwJTRTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UE
-CgwBMDEMMAoGA1UECwwDRVNUMQwwCgYDVQQDDANFU1QwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDSsF7lN/gXpevQpGj8W/3g3h9AVOE83Z49yJAdyIHF
-PQz6PI+bKutYdORCUZkzsl2fegLzkXl4CmoVIkJRBL1SZkzQXKe+fjfuRr9PQKCC
-lp/LA161Qak+9pz2Oc1lfLbgEdv22RLji3akCQso3G9vlx+rLHPRgbew0iiTViJP
-v3CHwiY89t1ai149OEywhjsJBJjBoj6fvxfvv46QmK7FuV5Tz0vTL/eB/Z9P7jm+
-twHRz9Ae4s97c6UhbFKafHLrwdMK+yz7qe55tpgthCgoedeSB0gXWIiS7RY18dEN
-JUB/FAt64LfOiKBl0aUbcQOgUinorhCN8gcNTn7Hrn+1AgMBAAEwDQYJKoZIhvcN
-AQELBQADggEBAKVGHkTLe5R/fG/C7prxiknD+QXo9WACcZNVKMuKhpJwQh1iwc4h
-4tq9lj//giyRrt+yPVQF8pRTiIdeewLVyf5O1ugxzb68UtHzVJWD6ooFqGmyPFkm
-WOdLvtgbasGPZvO6y8HZA3kxKgONbYcL0sdtRWpp5b+KTEyvN/50jAtvkB/wlaee
-emgdRdsSVZqg1p8dUfF6j3Alzsuff7YzEZEZPoJKYdb1vikvj21+LdzTDSj5WRno
-PWXQhdTTqN5/TNMZRHJp/UZY6hVmQL+ILqVYGiOPotfxGNUyo+WsKJwZfZnq4adh
-BzdSIIsDCZB34Njz/qjIXh307/seNWWhNFw=
------END CERTIFICATE-----
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key
deleted file mode 100644
index f90c781d3..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEA0rBe5Tf4F6Xr0KRo/Fv94N4fQFThPN2ePciQHciBxT0M+jyP
-myrrWHTkQlGZM7Jdn3oC85F5eApqFSJCUQS9UmZM0Fynvn437ka/T0CggpafywNe
-tUGpPvac9jnNZXy24BHb9tkS44t2pAkLKNxvb5cfqyxz0YG3sNIok1YiT79wh8Im
-PPbdWotePThMsIY7CQSYwaI+n78X77+OkJiuxbleU89L0y/3gf2fT+45vrcB0c/Q
-HuLPe3OlIWxSmnxy68HTCvss+6nuebaYLYQoKHnXkgdIF1iIku0WNfHRDSVAfxQL
-euC3zoigZdGlG3EDoFIp6K4QjfIHDU5+x65/tQIDAQABAoIBAEs+G5XG6D4hzlbD
-8I53l/JvwT9rUMk46GNuNjG8wsOa6wCPEkY7DLOZg08/7vOsdo0WuOkdggDhz0Le
-6koe5DICQNqEzI9WakkZUQdPsEMS4dxRxsf6vCO1VRcGS5k78d+R4TmGCp3i16r7
-Y9Xi65UxpmiuRmqC5gQq+bysnTJXKUhK/3NCPa7Bwo7hgASJcI55Nk58KHokIv84
-7dweKuIIxeKAR4/094q243lTu1n273J+ckjVMWWZROIIn2E+CrjAVh59DaC7QX6d
-kWdPwvjYiwH2LBqzJ3dKvLmk6XZZ5bcjmBYXSiS1ahjQ8zhDdLoHBt/gDXcmtOVw
-gZPjgn0CgYEA6nJGMig2SRB25CqtA/gwLbQV54th0Vxj1SapoO+W4jIUEPBRJN1t
-0JkM9mTMWWBiEzZi4ICpJpgT/+iGXx6q6WZZqOvbWWS4yjuC+wLCttt2yriFkmlA
-eylz0rYTGm5gQ3wpAUuf0wOfqVICfQ2KnASY0p8g2fSjOI0/pULpX18CgYEA5g72
-UVspmBLqaA/PGYip/7neh00sGs95b9Wh1UqvcrstxkgR4LyMOReTBNwL+39kyZPv
-LNsfMtaiezIfSF+o77RiXnBsYQ/ZnKBNZ7cTRr76f5fKStzRSzHoQrf+98YkSfg3
-sI1vYH/hnfbd/6ti8Wiloc7O28IHCwG4vGXWPWsCgYEA4Oqaj1nmonfKJ6ENdSZw
-Shhff1BHmovxNrctuzi/Xue+OnXi0uQfiySZu/P926zMyjO97jVgkacKYNMZvj10
-qA/J6nXDbOJlKZaoVNlUJgsrztXxRwH0m3OsKzUD5LKJZZTC3fxIKy9pyA3mV0Rb
-eswqNL32zUKWKBXSPmCP9S8CgYEAoJIk3dfRCjF5pjQSinHWpYRniuwUMopI6gOj
-tqkwF9YJNvGqeL9g56BKnwOu4VSjVgaI0zgFaIhiU7ZCG1OZJ+UxG1VAb+aOLkG8
-hP1E2QYG9PNO4D2LXv5wa9DchrenMHGs/O9ao3HeWVCq1f4bSv+NS4h63Otp2wbS
-l7xEcg0CgYBcrapVzHfGn73Z9tgs7GOVEmoXKoT6Q8yrdwwhVrcWdDHHtLFPso21
-eA9dhbbawaEmIReWtNKvo+Q2kxn+WcLIL6jWaASsJH4cnnRl58IYcUv8cb6o6G34
-nTkWuhXm5XBUupacr8lqz5bXtoRBU7BcSnqXd20dTR5QEHU5Rrg/uw==
------END RSA PRIVATE KEY-----
diff --git a/test/mocks/mass-pnf-sim/requirements.txt b/test/mocks/mass-pnf-sim/requirements.txt
deleted file mode 100644
index 442e1c37b..000000000
--- a/test/mocks/mass-pnf-sim/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-argparse
-ipaddress
diff --git a/test/mocks/mass-pnf-sim/setup.sh b/test/mocks/mass-pnf-sim/setup.sh
deleted file mode 100755
index 4e49a7e3e..000000000
--- a/test/mocks/mass-pnf-sim/setup.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-
-virtualenv --version > /dev/null || { echo 'Virtualenv command is not available, exiting' ; sleep 10; exit 1; }
-pip3 --version > /dev/null || { echo 'python3-pip package is not available, exiting' ; sleep 10; exit 1; }
-
-
-if [ -d ".env" ]; then
- echo ".env is prepared"
-else
- virtualenv --no-site-packages --distribute -p python3 .env
-fi
-
-source .env/bin/activate && pip3 install -r requirements.txt \ No newline at end of file
diff --git a/test/mocks/netconf-pnp-simulator/README.md b/test/mocks/netconf-pnp-simulator/README.md
new file mode 100644
index 000000000..df3211844
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/README.md
@@ -0,0 +1,9 @@
+# NETCONF Plug-and-Play Simulator
+
+Instead of a single docker image aggregating all Yang models and simulation logic, this simulator uses a modular
+approach that is reflected on this directory structure:
+
+- engine: Contains only the core NETCONF engine and files required to build the
+ docker image;
+- modules: The modules containing the Yang models and its corresponding
+ applications goes here.
diff --git a/test/mocks/netconf-pnp-simulator/docs/README.rst b/test/mocks/netconf-pnp-simulator/docs/README.rst
new file mode 100644
index 000000000..ec2a15834
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/README.rst
@@ -0,0 +1,113 @@
+NETCONF Plug-and-Play Simulator
+===============================
+
+.. sectnum::
+
+|ci-badge| |release-badge| |docker-badge|
+
+.. |ci-badge| image:: https://github.com/blue-onap/netconf-pnp-simulator/workflows/CI/badge.svg
+ :alt: CI
+.. |release-badge| image:: https://img.shields.io/github/v/tag/blue-onap/netconf-pnp-simulator?label=Release
+ :alt: GitHub tag
+.. |docker-badge| image:: https://img.shields.io/badge/docker%20registry-Quay.io-red
+ :target: https://quay.io/repository/blue-onap/netconf-pnp-simulator?tab=tags
+
+Overview
+--------
+
+This project builds a modular engine that allows the creation of NETCONF-enabled devices simulators,
+either physical (PNF), virtual (VNF), or cloud-native (CNF)
+
+Simply put, it's a docker container running Sysrepo and Netopeer2 servers enhanced with a plugger script that
+performs the following actions at start-time:
+
+1. Configures TLS and SSH secure accesses to the Netopeer2 server;
+2. Installs multiple YANG models into sysrepo datastore;
+3. Launches the corresponding subscriber applications.
+
+The picture below unveils the architecture of this solution.
+
+.. image:: images/Architecture.png
+ :width: 511px
+
+A YANG module contains the following files:
+
+.. list-table::
+ :widths: 10 50
+ :header-rows: 1
+
+ * - Filename
+ - Purpose
+ * - ``model.yang``
+ - The YANG model specified according to `RFC-6020 <https://tools.ietf.org/html/rfc6020>`_ and named after the module's name, e.g., *mynetconf.yang*.
+ * - ``startup.json`` or ``startup.xml``
+ - An optional data file with the initial values of the model. Both JSON and XML formats are supported.
+ * - ``subscriber.py``
+ - The Python 3 application that implements the behavioral aspects of the YANG model. If you don't supply one, a generic subscriber that logs all received events will be used.
+ * - ``requirements.txt``
+ - [Optional] Lists the additional Python packages required by the application, specified in the `Requirements File Format <https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format>`_.
+
+Application
+-----------
+
+The ``subscriber.py`` application can implement any wanted passive or active behaviour:
+
+**Passive Behaviour**: The subscriber will receive an event for each modification externally applied to the YANG model.
+
+**Active Behaviour**: At any point in time the subscriber can proactively change its own YANG model.
+
+Runtime Configuration
+---------------------
+
+Customizing TLS and SSH accesses
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The distributed docker image comes with a sample configuration for TLS and SSH, that can be found at
+``/config/tls`` and ``/config/ssh`` directories respectively. The user can replace one or both configurations
+by mounting a custom directory under the respective TLS or SSH mounting point.
+
+TLS Configuration
+^^^^^^^^^^^^^^^^^
+
+You need to provide the following PEM files under ``/config/tls``:
+
+.. list-table::
+ :widths: 10 50
+ :header-rows: 1
+
+ * - File
+ - Contents
+ * - ``server_key.pem``
+ - The server's private key in plain (*not* protected by a passphrase).
+ * - ``server_cert.pem``
+ - The corresponding server's X.509v3 certificate.
+ * - ``ca.pem``
+ - The Certificate Authority (CA) certificate.
+
+.. TIP:: You can reload the configuration at runtime by running ``docker exec <CONTAINER NAME or ID> /opt/bin/reconfigure-tls.sh``
+
+SSH Configuration
+^^^^^^^^^^^^^^^^^
+
+For the SSH connection, you need to provide the public SSH key in one of these 3 files under ``/config/ssh``
+in order of preference:
+
+- ``id_ecdsa.pub``; or
+- ``id_dsa.pub``; or
+- ``id_rsa.pub``
+
+.. TIP:: You can reload the configuration at runtime by running ``docker exec <CONTAINER NAME or ID> /opt/bin/reconfigure-ssh.sh``
+
+Python Virtual Environment Support
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Python programs usually use additional packages not included in the standard Python distribution,
+like the ``requests`` package, for example.
+We support this scenario by creating isolated Python environments for each custom-provided module whenever
+a ``requirements.txt`` file is present in the module directory.
+
+Example Module
+--------------
+
+The directory ``examples/mynetconf`` contains an example YANG model and its subscriber along with a
+Docker Compose configuration file to launch a basic simulator.
diff --git a/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/data.json b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/data.json
new file mode 100644
index 000000000..63872eef9
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/data.json
@@ -0,0 +1,10 @@
+{
+ "mynetconf:netconflist": {
+ "netconf": [
+ {
+ "netconf-id": 3,
+ "netconf-param": 3
+ }
+ ]
+ }
+}
diff --git a/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/docker-compose.yml b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/docker-compose.yml
new file mode 100644
index 000000000..6266b6acf
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/docker-compose.yml
@@ -0,0 +1,12 @@
+version: '3'
+
+services:
+ netopeer2:
+ image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.2
+ container_name: mynetconf
+ restart: always
+ ports:
+ - "830:830"
+ - "6513:6513"
+ volumes:
+ - ./:/config/modules/mynetconf
diff --git a/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/model.yang b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/model.yang
new file mode 100644
index 000000000..6c8c36ab0
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/model.yang
@@ -0,0 +1,29 @@
+module mynetconf {
+ yang-version 1.1;
+ namespace "urn:mynetconf:test";
+
+ prefix nft;
+
+ organization
+ "mynetconf";
+ contact
+ "my netconf address";
+ description
+ "yang model for mynetconf";
+ revision "2019-03-01" {
+ description
+ "initial version";
+ }
+
+ container netconflist {
+ list netconf {
+ key netconf-id;
+ leaf netconf-id {
+ type uint16;
+ }
+ leaf netconf-param {
+ type uint32;
+ }
+ }
+ }
+}
diff --git a/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/subscriber.py b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/subscriber.py
new file mode 100755
index 000000000..612729675
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/examples/mynetconf/subscriber.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3
+
+__author__ = "Mislav Novakovic <mislav.novakovic@sartura.hr>"
+__copyright__ = "Copyright 2018, Deutsche Telekom AG"
+__license__ = "Apache 2.0"
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample application demonstrates use of Python programming language bindings for sysrepo library.
+# Original c application was rewritten in Python to show similarities and differences
+# between the two.
+#
+# Most notable difference is in the very different nature of languages, c is weakly statically typed language
+# while Python is strongly dynamically typed. Python code is much easier to read and logic easier to comprehend
+# for smaller scripts. Memory safety is not an issue but lower performance can be expected.
+#
+# The original c implementation is also available in the source, so one can refer to it to evaluate trade-offs.
+
+import sysrepo as sr
+import sys
+
+
+# Helper function for printing changes given operation, old and new value.
+def print_change(op, old_val, new_val):
+ if op == sr.SR_OP_CREATED:
+ print(f"CREATED: {new_val.to_string()}")
+ elif op == sr.SR_OP_DELETED:
+ print(f"DELETED: {old_val.to_string()}")
+ elif op == sr.SR_OP_MODIFIED:
+ print(f"MODIFIED: {old_val.to_string()} to {new_val.to_string()}")
+ elif op == sr.SR_OP_MOVED:
+ print(f"MOVED: {new_val.xpath()} after {old_val.xpath()}")
+
+
+# Helper function for printing events.
+def ev_to_str(ev):
+ if ev == sr.SR_EV_VERIFY:
+ return "verify"
+ elif ev == sr.SR_EV_APPLY:
+ return "apply"
+ elif ev == sr.SR_EV_ABORT:
+ return "abort"
+ else:
+ return "unknown"
+
+
+# Function to print current configuration state.
+# It does so by loading all the items of a session and printing them out.
+def print_current_config(session, module_name):
+ select_xpath = f"/{module_name}:*//*"
+
+ values = session.get_items(select_xpath)
+
+ if values is not None:
+ print("========== BEGIN CONFIG ==========")
+ for i in range(values.val_cnt()):
+ print(values.val(i).to_string(), end='')
+ print("=========== END CONFIG ===========")
+
+
+# Function to be called for subscribed client of given session whenever configuration changes.
+def module_change_cb(sess, module_name, event, private_ctx):
+ try:
+ print("========== Notification " + ev_to_str(event) + " =============================================")
+ if event == sr.SR_EV_APPLY:
+ print_current_config(sess, module_name)
+
+ print("========== CHANGES: =============================================")
+
+ change_path = f"/{module_name}:*"
+
+ it = sess.get_changes_iter(change_path)
+
+ while True:
+ change = sess.get_change_next(it)
+ if change is None:
+ break
+ print_change(change.oper(), change.old_val(), change.new_val())
+
+ print("========== END OF CHANGES =======================================")
+ except Exception as e:
+ print(e)
+
+ return sr.SR_ERR_OK
+
+
+def main():
+ # Notable difference between c implementation is using exception mechanism for open handling unexpected events.
+ # Here it is useful because `Connection`, `Session` and `Subscribe` could throw an exception.
+ try:
+ module_name = "ietf-interfaces"
+ if len(sys.argv) > 1:
+ module_name = sys.argv[1]
+ else:
+ print("\nYou can pass the module name to be subscribed as the first argument")
+
+ print(f"Application will watch for changes in {module_name}")
+
+ # connect to sysrepo
+ conn = sr.Connection(module_name)
+
+ # start session
+ sess = sr.Session(conn)
+
+ # subscribe for changes in running config */
+ subscribe = sr.Subscribe(sess)
+
+ subscribe.module_change_subscribe(module_name, module_change_cb)
+
+ try:
+ print_current_config(sess, module_name)
+ except Exception as e:
+ print(e)
+
+ print("========== STARTUP CONFIG APPLIED AS RUNNING ==========")
+
+ sr.global_loop()
+
+ print("Application exit requested, exiting.")
+
+ except Exception as e:
+ print(e)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/netconf-pnp-simulator/docs/images/Architecture.png b/test/mocks/netconf-pnp-simulator/docs/images/Architecture.png
new file mode 100644
index 000000000..da95c9142
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/docs/images/Architecture.png
Binary files differ
diff --git a/test/mocks/netconf-pnp-simulator/engine/Dockerfile b/test/mocks/netconf-pnp-simulator/engine/Dockerfile
new file mode 100644
index 000000000..fb91d0053
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/Dockerfile
@@ -0,0 +1,204 @@
+#-
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+FROM python:3.7.7-alpine3.11 as build
+
+ARG zlog_version=1.2.14
+ARG libyang_version=v1.0-r5
+ARG sysrepo_version=v0.7.9
+ARG libnetconf2_version=v0.12-r2
+ARG netopeer2_version=v0.7-r2
+
+WORKDIR /usr/src
+
+RUN set -eux \
+ && apk add \
+ autoconf \
+ bash \
+ build-base \
+ cmake \
+ curl-dev \
+ file \
+ git \
+ libev-dev \
+ libssh-dev \
+ openssh-keygen \
+ openssl \
+ openssl-dev \
+ pcre-dev \
+ pkgconfig \
+ protobuf-c-dev \
+ swig \
+ # for troubleshooting
+ ctags \
+ the_silver_searcher \
+ vim
+
+RUN git config --global advice.detachedHead false
+
+ENV PKG_CONFIG_PATH=/opt/lib64/pkgconfig
+ENV LD_LIBRARY_PATH=/opt/lib:/opt/lib64
+
+
+# libyang
+COPY patches/libyang/ ./patches/libyang/
+RUN set -eux \
+ && git clone --branch $libyang_version --depth 1 https://github.com/CESNET/libyang.git \
+ && cd libyang \
+ && for p in ../patches/libyang/*.patch; do patch -p1 -i $p; done \
+ && mkdir build && cd build \
+ && cmake -DCMAKE_BUILD_TYPE:String="Release" -DENABLE_BUILD_TESTS=OFF \
+ -DCMAKE_INSTALL_PREFIX:PATH=/opt \
+ -DGEN_LANGUAGE_BINDINGS=OFF \
+ .. \
+ && make -j2 \
+ && make install
+
+RUN set -eux \
+ && git clone --depth 1 https://github.com/sysrepo/libredblack.git \
+ && cd libredblack \
+ && ./configure --prefix=/opt --without-rbgen \
+ && make \
+ && make install
+
+# zlog
+RUN set -eux \
+ && git clone --branch $zlog_version --depth 1 https://github.com/HardySimpson/zlog \
+ && cd zlog/src \
+ && make PREFIX=/opt \
+ && make install PREFIX=/opt
+
+# sysrepo
+COPY patches/sysrepo/ ./patches/sysrepo/
+RUN set -eux \
+ && git clone --branch $sysrepo_version --depth 1 https://github.com/sysrepo/sysrepo.git \
+ && cd sysrepo \
+ && for p in ../patches/sysrepo/*.patch; do patch -p1 -i $p; done \
+ && mkdir build && cd build \
+ && cmake -DCMAKE_BUILD_TYPE:String="Release" -DENABLE_TESTS=OFF \
+ -DREPOSITORY_LOC:PATH=/opt/etc/sysrepo \
+ -DCMAKE_INSTALL_PREFIX:PATH=/opt \
+ -DGEN_PYTHON_VERSION=3 \
+ -DPYTHON_MODULE_PATH:PATH=/opt/lib/python3.7/site-packages \
+ -DBUILD_EXAMPLES=0 \
+ -DBUILD_CPP_EXAMPLES=0 \
+ .. \
+ && make -j2 \
+ && make install
+
+# libnetconf2
+COPY patches/libnetconf2/ ./patches/libnetconf2/
+RUN set -eux \
+ && git clone --branch $libnetconf2_version --depth 1 https://github.com/CESNET/libnetconf2.git \
+ && cd libnetconf2 \
+ && for p in ../patches/libnetconf2/*.patch; do patch -p1 -i $p; done \
+ && mkdir build && cd build \
+ && cmake -DCMAKE_BUILD_TYPE:String="Release" -DENABLE_BUILD_TESTS=OFF \
+ -DCMAKE_INSTALL_PREFIX:PATH=/opt \
+ -DENABLE_PYTHON=OFF \
+ .. \
+ && make \
+ && make install
+
+# keystore
+COPY patches/Netopeer2/ ./patches/Netopeer2/
+RUN set -eux \
+ && git clone --branch $netopeer2_version --depth 1 https://github.com/CESNET/Netopeer2.git \
+ && cd Netopeer2 \
+ && for p in ../patches/Netopeer2/*.patch; do patch -p1 -i $p; done \
+ && cd keystored \
+ && mkdir build && cd build \
+ && cmake -DCMAKE_BUILD_TYPE:String="Release" \
+ -DCMAKE_INSTALL_PREFIX:PATH=/opt \
+ -DMODEL_INSTALL=ON \
+ .. \
+ && make -j2 \
+ && make install
+
+# netopeer2
+RUN set -eux \
+ && cd Netopeer2/server \
+ && mkdir build && cd build \
+ && cmake -DCMAKE_BUILD_TYPE:String="Release" \
+ -DCMAKE_INSTALL_PREFIX:PATH=/opt \
+ .. \
+ && make -j2 \
+ && make install
+
+FROM python:3.7.7-alpine3.11 as stage0
+RUN apk upgrade --no-cache --available
+
+FROM scratch
+LABEL authors="eliezio.oliveira@est.tech"
+
+COPY --from=stage0 / /
+
+RUN set -eux \
+ && apk add --no-cache \
+ coreutils \
+ libcurl \
+ libev \
+ libssh \
+ openssl \
+ pcre \
+ protobuf-c \
+ xmlstarlet
+
+COPY --from=build /opt/ /opt/
+
+ENV LD_LIBRARY_PATH=/opt/lib:/opt/lib64
+ENV PYTHONPATH=/opt/lib/python3.7/site-packages
+
+COPY patches/supervisor/ /usr/src/patches/supervisor/
+
+RUN set -eux \
+ # the patches for supervisor package only work for 4.1 release
+ && pip install --no-cache-dir loguru supervisor==4.1.0 virtualenv \
+ && cd /usr/local/lib/python3.7/site-packages \
+ && for p in /usr/src/patches/supervisor/*.patch; do patch -p1 -i $p; done
+
+COPY config/ /config
+VOLUME /config
+COPY templates/ /templates
+
+# finish setup and add netconf user
+RUN adduser --system --disabled-password --gecos 'Netconf User' netconf
+
+# This is NOT a robust health check but it does help tox-docker to detect when
+# it can start the tests.
+HEALTHCHECK --interval=1s --start-period=2s --retries=10 CMD test -f /run/netopeer2-server.pid
+
+# SSH
+EXPOSE 830
+
+# TLS
+EXPOSE 6513
+
+COPY supervisord.conf /etc/supervisord.conf
+RUN mkdir /etc/supervisord.d
+
+COPY zlog.conf /opt/etc/
+
+# Sensible defaults for loguru configuration
+ENV LOGURU_FORMAT="<green>{time:YYYY-DD-MM HH:mm:ss.SSS}</green> {level: <5} [{module}] <lvl>{message}</lvl>"
+ENV LOGURU_COLORIZE=True
+
+COPY entrypoint.sh common.sh configure-*.sh reconfigure-*.sh generic_subscriber.py /opt/bin/
+
+CMD /opt/bin/entrypoint.sh
diff --git a/test/mocks/netconf-pnp-simulator/engine/LICENSE b/test/mocks/netconf-pnp-simulator/engine/LICENSE
new file mode 100644
index 000000000..c6aae559e
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/LICENSE
@@ -0,0 +1,13 @@
+Copyright (C) 2020 Nordix Foundation
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/test/mocks/netconf-pnp-simulator/engine/common.sh b/test/mocks/netconf-pnp-simulator/engine/common.sh
new file mode 100644
index 000000000..80e882a06
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/common.sh
@@ -0,0 +1,155 @@
+#!/bin/ash
+# shellcheck disable=SC2086
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+set -o errexit
+set -o pipefail
+set -o nounset
+[ "${SHELL_XTRACE:-false}" = "true" ] && set -o xtrace
+
+export PATH=/opt/bin:/usr/local/bin:/usr/bin:/bin
+
+CONFIG=/config
+TEMPLATES=/templates
+
+PROC_NAME=${0##*/}
+PROC_NAME=${PROC_NAME%.sh}
+
+WORKDIR=$(mktemp -d)
+trap "rm -rf $WORKDIR" EXIT
+
+function now_ms() {
+ # Requires coreutils package
+ date +"%Y-%m-%d %H:%M:%S.%3N"
+}
+
+function log() {
+ local level=$1
+ shift
+ local message="$*"
+ >&2 printf "%s %-5s [%s] %s\n" "$(now_ms)" $level $PROC_NAME "$message"
+}
+
+find_file() {
+ local dir=$1
+ shift
+ for app in "$@"; do
+ if [ -f $dir/$app ]; then
+ echo -n $dir/$app
+ break
+ fi
+ done
+}
+
+
+# Extracts the body of a PEM file by removing the dashed header and footer
+alias pem_body='grep -Fv -- -----'
+
+wait_for_file() {
+ local file=$1
+ local timeout=$2
+
+ local i=0
+ while [ $i -lt $timeout ]; do
+ if [ -e $file ]; then
+ return
+ fi
+ sleep 1
+ done
+
+ false
+}
+
+kill_service() {
+ local service=$1
+
+ pid_file=/run/${service}.pid
+ pid=$(cat $pid_file)
+ log INFO Killing $service pid=$pid
+ rm -f $pid_file
+ kill $pid
+ if ! wait_for_file $pid_file 10; then
+ log ERROR Timeout while waiting $service to restart
+ exit 1
+ fi
+}
+
+# ------------------------------------
+# SSH Common Definitions and Functions
+# ------------------------------------
+
+SSH_CONFIG=$CONFIG/ssh
+
+configure_ssh() {
+ local datastore=$1
+ local operation=$2
+ local dir=$3
+
+ log INFO Configure SSH ingress service
+ ssh_pubkey=$(find_file $SSH_CONFIG id_ecdsa.pub id_dsa.pub id_rsa.pub)
+ test -n "$ssh_pubkey"
+ name=${ssh_pubkey##*/}
+ name=${name%%.pub}
+ set -- $(cat $ssh_pubkey)
+ xmlstarlet ed --pf --omit-decl \
+ --update '//_:name[text()="netconf"]/following-sibling::_:authorized-key/_:name' --value "$name" \
+ --update '//_:name[text()="netconf"]/following-sibling::_:authorized-key/_:algorithm' --value "$1" \
+ --update '//_:name[text()="netconf"]/following-sibling::_:authorized-key/_:key-data' --value "$2" \
+ $dir/ietf-system.xml | \
+ sysrepocfg --datastore=$datastore --permanent --format=xml ietf-system --${operation}=-
+}
+
+
+# ------------------------------------
+# SSL Common Definitions and Functions
+# ------------------------------------
+
+TLS_CONFIG=$CONFIG/tls
+KEY_PATH=/opt/etc/keystored/keys
+
+configure_tls() {
+ local datastore=$1
+ local operation=$2
+ local dir=$3
+
+ log INFO Update server private key
+ cp $TLS_CONFIG/server_key.pem $KEY_PATH
+
+ log INFO Load CA and server certificates
+ ca_cert=$(pem_body $TLS_CONFIG/ca.pem)
+ server_cert=$(pem_body $TLS_CONFIG/server_cert.pem)
+ out=$(mktemp -p $WORKDIR ietf-keystore.XXXXXX.xml)
+ xmlstarlet ed --pf --omit-decl \
+ --update '//_:name[text()="server_cert"]/following-sibling::_:certificate' --value "$server_cert" \
+ --update '//_:name[text()="ca"]/following-sibling::_:certificate' --value "$ca_cert" \
+ $dir/ietf-keystore.xml > $out
+ sysrepocfg --datastore=$datastore --format=xml ietf-keystore --${operation}=$out
+ # The '--permanent' option was causing sysrepod to crash
+ if [ "$datastore" != "startup" ]; then
+ sysrepocfg --datastore=startup --format=xml ietf-keystore --${operation}=$out
+ fi
+
+ log INFO Configure TLS ingress service
+ ca_fingerprint=$(openssl x509 -noout -fingerprint -in $TLS_CONFIG/ca.pem | cut -d= -f2)
+ xmlstarlet ed --pf --omit-decl \
+ --update '//_:name[text()="netconf"]/preceding-sibling::_:fingerprint' --value "02:$ca_fingerprint" \
+ $dir/ietf-netconf-server.xml | \
+ sysrepocfg --datastore=$datastore --permanent --format=xml ietf-netconf-server --${operation}=-
+}
diff --git a/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/startup.xml b/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/startup.xml
new file mode 100644
index 000000000..453b3accf
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/startup.xml
@@ -0,0 +1,72 @@
+<turing-machine xmlns="http://example.net/turing-machine">
+ <transition-function>
+ <delta>
+ <label>left summand</label>
+ <input>
+ <state>0</state>
+ <symbol>1</symbol>
+ </input>
+ </delta>
+ <delta>
+ <label>separator</label>
+ <input>
+ <state>0</state>
+ <symbol>0</symbol>
+ </input>
+ <output>
+ <state>1</state>
+ <symbol>1</symbol>
+ </output>
+ </delta>
+ <delta>
+ <label>right summand</label>
+ <input>
+ <state>1</state>
+ <symbol>1</symbol>
+ </input>
+ </delta>
+ <delta>
+ <label>right end</label>
+ <input>
+ <state>1</state>
+ <symbol/>
+ </input>
+ <output>
+ <state>2</state>
+ <head-move>left</head-move>
+ </output>
+ </delta>
+ <delta>
+ <label>write separator</label>
+ <input>
+ <state>2</state>
+ <symbol>1</symbol>
+ </input>
+ <output>
+ <state>3</state>
+ <symbol>0</symbol>
+ <head-move>left</head-move>
+ </output>
+ </delta>
+ <delta>
+ <label>go home</label>
+ <input>
+ <state>3</state>
+ <symbol>1</symbol>
+ </input>
+ <output>
+ <head-move>left</head-move>
+ </output>
+ </delta>
+ <delta>
+ <label>final step</label>
+ <input>
+ <state>3</state>
+ <symbol/>
+ </input>
+ <output>
+ <state>4</state>
+ </output>
+ </delta>
+ </transition-function>
+</turing-machine>
diff --git a/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/turing-machine.yang b/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/turing-machine.yang
new file mode 100644
index 000000000..abd6794b0
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/config/modules/turing-machine/turing-machine.yang
@@ -0,0 +1,262 @@
+module turing-machine {
+
+ namespace "http://example.net/turing-machine";
+
+ prefix "tm";
+
+ description
+ "Data model for the Turing Machine.";
+
+ revision 2013-12-27 {
+ description
+ "Initial revision.";
+ }
+
+ /* Typedefs */
+
+ typedef tape-symbol {
+ type string {
+ length "0..1";
+ }
+ description
+ "Type of symbols appearing in tape cells.
+
+ A blank is represented as an empty string where necessary.";
+ }
+
+ typedef cell-index {
+ type int64;
+ description
+ "Type for indexing tape cells.";
+ }
+
+ typedef state-index {
+ type uint16;
+ description
+ "Type for indexing states of the control unit.";
+ }
+
+ typedef head-dir {
+ type enumeration {
+ enum left;
+ enum right;
+ }
+ default "right";
+ description
+ "Possible directions for moving the read/write head, one cell
+ to the left or right (default).";
+ }
+
+ /* Groupings */
+
+ grouping tape-cells {
+ description
+ "The tape of the Turing Machine is represented as a sparse
+ array.";
+ list cell {
+ key "coord";
+ description
+ "List of non-blank cells.";
+ leaf coord {
+ type cell-index;
+ description
+ "Coordinate (index) of the tape cell.";
+ }
+ leaf symbol {
+ type tape-symbol {
+ length "1";
+ }
+ description
+ "Symbol appearing in the tape cell.
+
+ Blank (empty string) is not allowed here because the
+ 'cell' list only contains non-blank cells.";
+ }
+ }
+ }
+
+ /* State data and Configuration */
+
+ container turing-machine {
+ description
+ "State data and configuration of a Turing Machine.";
+ leaf state {
+ type state-index;
+ config "false";
+ mandatory "true";
+ description
+ "Current state of the control unit.
+
+ The initial state is 0.";
+ }
+ leaf head-position {
+ type cell-index;
+ config "false";
+ mandatory "true";
+ description
+ "Position of tape read/write head.";
+ }
+ container tape {
+ config "false";
+ description
+ "The contents of the tape.";
+ uses tape-cells;
+ }
+ container transition-function {
+ description
+ "The Turing Machine is configured by specifying the
+ transition function.";
+ list delta {
+ key "label";
+ unique "input/state input/symbol";
+ description
+ "The list of transition rules.";
+ leaf label {
+ type string;
+ description
+ "An arbitrary label of the transition rule.";
+ }
+ container input {
+ description
+ "Input parameters (arguments) of the transition rule.";
+ leaf state {
+ type state-index;
+ mandatory "true";
+ description
+ "Current state of the control unit.";
+ }
+ leaf symbol {
+ type tape-symbol;
+ mandatory "true";
+ description
+ "Symbol read from the tape cell.";
+ }
+ }
+ container output {
+ description
+ "Output values of the transition rule.";
+ leaf state {
+ type state-index;
+ description
+ "New state of the control unit. If this leaf is not
+ present, the state doesn't change.";
+ }
+ leaf symbol {
+ type tape-symbol;
+ description
+ "Symbol to be written to the tape cell. If this leaf is
+ not present, the symbol doesn't change.";
+ }
+ leaf head-move {
+ type head-dir;
+ description
+ "Move the head one cell to the left or right";
+ }
+ }
+ }
+ }
+ }
+
+ /* RPCs */
+
+ rpc initialize {
+ description
+ "Initialize the Turing Machine as follows:
+
+ 1. Put the control unit into the initial state (0).
+
+ 2. Move the read/write head to the tape cell with coordinate
+ zero.
+
+ 3. Write the string from the 'tape-content' input parameter to
+ the tape, character by character, starting at cell 0. The
+ tape is othewise empty.";
+ input {
+ leaf tape-content {
+ type string;
+ default "";
+ description
+ "The string with which the tape shall be initialized. The
+ leftmost symbol will be at tape coordinate 0.";
+ }
+ }
+ }
+
+ rpc run {
+ description
+ "Start the Turing Machine operation.";
+ }
+
+ rpc run-until {
+ description
+ "Start the Turing Machine operation and let it run until it is halted
+ or ALL the defined breakpoint conditions are satisfied.";
+ input {
+ leaf state {
+ type state-index;
+ description
+ "What state the control unit has to be at for the execution to be paused.";
+ }
+ leaf head-position {
+ type cell-index;
+ description
+ "Position of tape read/write head for which the breakpoint applies.";
+ }
+ container tape {
+ description
+ "What content the tape has to have for the breakpoint to apply.";
+ uses tape-cells;
+ }
+ }
+ output {
+ leaf step-count {
+ type uint64;
+ description
+ "The number of steps executed since the last 'run-until' call.";
+ }
+ leaf halted {
+ type boolean;
+ description
+ "'True' if the Turing machine is halted, 'false' if it is only paused.";
+ }
+ }
+ }
+
+ /* Notifications */
+
+ notification halted {
+ description
+ "The Turing Machine has halted. This means that there is no
+ transition rule for the current state and tape symbol.";
+ leaf state {
+ type state-index;
+ mandatory "true";
+ description
+ "The state of the control unit in which the machine has
+ halted.";
+ }
+ }
+
+ notification paused {
+ description
+ "The Turing machine has reached a breakpoint and was paused.";
+ leaf state {
+ type state-index;
+ mandatory "true";
+ description
+ "State of the control unit in which the machine was paused.";
+ }
+ leaf head-position {
+ type cell-index;
+ mandatory "true";
+ description
+ "Position of tape read/write head when the machine was paused.";
+ }
+ container tape {
+ description
+ "Content of the tape when the machine was paused.";
+ uses tape-cells;
+ }
+ }
+}
+
diff --git a/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa b/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa
new file mode 100644
index 000000000..bef767251
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa
@@ -0,0 +1,27 @@
+-----BEGIN OPENSSH PRIVATE KEY-----
+b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn
+NhAAAAAwEAAQAAAQEA+KQmP43rUq7Dd06EMEiKKRMAxBabO5d9u2646qfGtL15hrjc7b5z
+Q7kMhg+ZSiw3ivnRks/mh3Qpw/9dKyFUlcZ8P0wshlhh7GB6V6itGXYieh11/BahspUyWV
+k4ki0g61MJPDKvLiJN0ncuRPchoCnJgORiJwULqjIWW87dSoS4cqeu/nSCfg92/yc0sj0R
+7fVzAaRk2DBLoYwTNmh+QvaZba5katjSBf3Ek9XonYU1dsDEqhpvY8AxuwsMQHmL2p9XGs
+2SKyH2O2v6w97G8Uj5cx/dp1IcPJtKp3iow1jvpMwiVZVkP6vXOoYOvTdrQXLWHpd1kxcN
+brG2xv2QhQAAA8A9tQcJPbUHCQAAAAdzc2gtcnNhAAABAQD4pCY/jetSrsN3ToQwSIopEw
+DEFps7l327brjqp8a0vXmGuNztvnNDuQyGD5lKLDeK+dGSz+aHdCnD/10rIVSVxnw/TCyG
+WGHsYHpXqK0ZdiJ6HXX8FqGylTJZWTiSLSDrUwk8Mq8uIk3Sdy5E9yGgKcmA5GInBQuqMh
+Zbzt1KhLhyp67+dIJ+D3b/JzSyPRHt9XMBpGTYMEuhjBM2aH5C9pltrmRq2NIF/cST1eid
+hTV2wMSqGm9jwDG7CwxAeYvan1cazZIrIfY7a/rD3sbxSPlzH92nUhw8m0qneKjDWO+kzC
+JVlWQ/q9c6hg69N2tBctYel3WTFw1usbbG/ZCFAAAAAwEAAQAAAQEA5a3kcxLrDV7Iyx3p
+eByaG6UlMP3c+ahQCeMWyBShtnXMm8mKs8cY/LckvXYNWPoNeMCaem63+eoxZo8vBldspk
+pKncIE4zkjg9H/UZhMBlgdMwah5XMWfTva3bRQUJ+FaDw1LLl5Hzmq/77+K9DJ7ASN0hWm
+IXtEsjxAcSBfRKQRDJbDJcniUOgmeeU6CwAv9IyLkGVJ77U8rTV6dq1LoqqTgFxPre/Uej
+AXUBPycwqH3eY+1sbF6+B5JrE3iwGBR1HFSJTqvPlGLYyQuwI/9IB6Lb6sNpYSLWMPsE6F
+4UasjHixgm+dggQcUF5FyA2d+FQ4fPTvkXNjhcelEOAWRQAAAIEA3hgaO/1j4A2fB02mho
+Nfc6XfGA24Z1MlFDGy2R+X9T73QGpLI24ruQ87V7tbNJapo1eMbiVZBzSN01oL2JILzQxJ
+ZFvjCKeNyTCEtpOqvB4kaU77H5T10qT81WkzlUwI/K5k5/rtur8VBioZo/2HsUrRNX/QBO
+/tKE4/xg6jl/AAAACBAP0R3HxL3xWBRi81+nF/g/WUcos0AwElOYXU1Ua1coBKXYr5Zh85
+pMrcjbInAtKD2QVPSzpvU8krpWSKCr8o4yTO/QtwTk6eIATRGjgJSPwxsFuvG4T6/gDOI7
+6ib6syomWm91rK4NTRcpSkgJzh4/OwiMRvWXbrcdvW6tk0ekHbAAAAgQD7hSlDCxgTFixb
+U40Zu5zn19/2GK+Vbr3wBWlaGsngjnpwq3ek3XGSBwJ7z7/J5B3RPEJxE8sdtd5APp7FF7
+fozImitj1D+WFP0Sy0HLLy6djn/rdFOoVZ4x00g5k9B7fqAd5YRaRaG8Uwnt18wcQnHZ1G
+7EBzCxu08G0XYEv1HwAAAAduZXRjb25mAQI=
+-----END OPENSSH PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa.pub b/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa.pub
new file mode 100644
index 000000000..8c5994e17
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/config/ssh/id_rsa.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD4pCY/jetSrsN3ToQwSIopEwDEFps7l327brjqp8a0vXmGuNztvnNDuQyGD5lKLDeK+dGSz+aHdCnD/10rIVSVxnw/TCyGWGHsYHpXqK0ZdiJ6HXX8FqGylTJZWTiSLSDrUwk8Mq8uIk3Sdy5E9yGgKcmA5GInBQuqMhZbzt1KhLhyp67+dIJ+D3b/JzSyPRHt9XMBpGTYMEuhjBM2aH5C9pltrmRq2NIF/cST1eidhTV2wMSqGm9jwDG7CwxAeYvan1cazZIrIfY7a/rD3sbxSPlzH92nUhw8m0qneKjDWO+kzCJVlWQ/q9c6hg69N2tBctYel3WTFw1usbbG/ZCF netconf
diff --git a/test/mocks/pnfsimulator/netconfsimulator/tls/ca.crt b/test/mocks/netconf-pnp-simulator/engine/config/tls/ca.pem
index 62593ab7c..62593ab7c 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/tls/ca.crt
+++ b/test/mocks/netconf-pnp-simulator/engine/config/tls/ca.pem
diff --git a/test/mocks/netconf-pnp-simulator/engine/config/tls/netopeer2-client.sh b/test/mocks/netconf-pnp-simulator/engine/config/tls/netopeer2-client.sh
new file mode 100755
index 000000000..535f3fe63
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/config/tls/netopeer2-client.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+# Performs a smoke-test of the NETCONF-Pnp-Simulator by establishing a TLS
+# connection and sending a dummy NETCONF Hello Message.
+
+set -euxo pipefail
+
+SERVER_HOST=localhost
+SERVER_PORT=6513
+
+SCRIPT_PATH=$(dirname $(realpath -s $0))
+
+CLIENT_CERT="
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIBBzANBgkqhkiG9w0BAQsFADCBjDELMAkGA1UEBhMCQ1ox
+FjAUBgNVBAgMDVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoM
+BkNFU05FVDEMMAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJ
+KoZIhvcNAQkBFhNleGFtcGxlY2FAbG9jYWxob3N0MB4XDTE1MDczMDA3MjcxOFoX
+DTM1MDcyNTA3MjcxOFowgYUxCzAJBgNVBAYTAkNaMRYwFAYDVQQIDA1Tb3V0aCBN
+b3JhdmlhMQ8wDQYDVQQKDAZDRVNORVQxDDAKBgNVBAsMA1RNQzEXMBUGA1UEAwwO
+ZXhhbXBsZSBjbGllbnQxJjAkBgkqhkiG9w0BCQEWF2V4YW1wbGVjbGllbnRAbG9j
+YWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAueCQaNQWoNmF
+K6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68SfFNaY06zZl8QB9W02nr5kWeeMY0
+VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt6jAWZDzVfopwpJPAzRPxACDftIqF
+GagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4VDUHSNVbglc+u4UbEzNIFXMdEFsJ
+ZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuHQwAHdubuB07ObM2z01UhyEdDvEYG
+HwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UEFI1yTYw+xZ42HgFx3uGwApCImxhb
+j69GBYWFqwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUXGpLeLnh2cSDARAV
+A7KrBxGYpo8wHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gwDQYJKoZI
+hvcNAQELBQADggEBAJPV3RTXFRtNyOU4rjPpYeBAIAFp2aqGc4t2J1c7oPp/1n+l
+ZvjnwtlJpZHxMM783e2ryDQ6dkvXDf8kpwKlg3U3mkJ3xKkDdWrM4QwghXdCN519
+aa9qmu0zdFL+jUAaWlQ5tsceOrvbusCcbMqiFGk/QfpHqPv52SVWbYyUx7IX7DE+
+UjgsLHycfV/tlcx4ZE6soTzl9VdgSL/zmzG3rjsr58J80rXckLgBhvijgBlIAJvW
+fC7D0vaouvBInSFXymdPVoUDZ30cdGLf+hI/i/TfsEMOinLrXVdkSGNo6FXAHKSv
+XeB9oFKSzhQ7OPyRyqvEPycUSw/qD6FVr80oDDc=
+-----END CERTIFICATE-----
+"
+
+CLIENT_KEY="
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAueCQaNQWoNmFK6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68
+SfFNaY06zZl8QB9W02nr5kWeeMY0VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt
+6jAWZDzVfopwpJPAzRPxACDftIqFGagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4
+VDUHSNVbglc+u4UbEzNIFXMdEFsJZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuH
+QwAHdubuB07ObM2z01UhyEdDvEYGHwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UE
+FI1yTYw+xZ42HgFx3uGwApCImxhbj69GBYWFqwIDAQABAoIBAQCZN9kR8DGu6V7y
+t0Ax68asL8O5B/OKaHWKQ9LqpVrXmikZJOxkbzoGldow/CIFoU+q+Zbwu9aDa65a
+0wiP7Hoa4Py3q5XNNUrOQDyU/OYC7cI0I83WS0lJ2zOJGYj8wKae5Z81IeQFKGHK
+4lsy1OGPAvPRGh7RjUUgRavA2MCwe07rWRuDb/OJFe4Oh56UMEjwMiNBtMNtncog
+j1vr/qgRJdf9tf0zlJmLvUJ9+HSFFV9I/97LJyFhb95gAfHkjdVroLVgT3Cho+4P
+WtZaKCIGD0OwfOG2nLV4leXvRUk62/LMlB8NI9+JF7Xm+HCKbaWHNWC7mvWSLV58
+Zl4AbUWRAoGBANyJ6SFHFRHSPDY026SsdMzXR0eUxBAK7G70oSBKKhY+O1j0ocLE
+jI2krHJBhHbLlnvJVyMUaCUOTS5m0uDw9hgSsAqeSL3hL38kxVZw+KNG9Ouno1Fl
+KnE/xXHlPQyeGs/P8nAMzHZxQtEsQdQayJEhK2XXHTsy7Q3MxDisfVJ1AoGBANfD
+34gB+OMx6pwj7zk3qWbYXSX8xjCZMR0ciko+h4xeMP2N8B0oyoqC+v1ABMAtJ3wG
+sGZd0hV9gwM7OUM3SEwkn6oeg1GemWLcn4rlSmTnZc4aeVwrEWlnSNFX3s4g9l4u
+k8Ugu4MVJYqH8HuDQ5Ggl6/QAwPzMSEdCW0O+jOfAoGAIBRbegC5+t6m7Yegz4Ja
+dxV1g98K6f58x+MDsQu4tYWV4mmrQgaPH2dtwizvlMwmdpkh+LNWNtWuumowkJHc
+akIFo3XExQIFg6wYnGtQb4e5xrGa2xMpKlIJaXjb+YLiCYqJDG2ALFZrTrvuU2kV
+9a5qfqTc1qigvNolTM0iaaUCgYApmrZWhnLUdEKV2wP813PNxfioI4afxlpHD8LG
+sCn48gymR6E+Lihn7vuwq5B+8fYEH1ISWxLwW+RQUjIneNhy/jjfV8TgjyFqg7or
+0Sy4KjpiNI6kLBXOakELRNNMkeSPopGR2E7v5rr3bGD9oAD+aqX1G7oJH/KgPPYd
+Vl7+ZwKBgQDcHyWYrimjyUgKaQD2GmoO9wdcJYQ59ke9K+OuGlp4ti5arsi7N1tP
+B4f09aeELM2ASIuk8Q/Mx0jQFnm8lzRFXdewgvdPoZW/7VufM9O7dGPOc41cm2Dh
+yrTcXx/VmUBb+/fnXVEgCv7gylp/wtdTGHQBQJHR81jFBz0lnLj+gg==
+-----END RSA PRIVATE KEY-----
+"
+
+DUMMY_HELLO='<hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"/>]]>]]>'
+
+(echo -n "$DUMMY_HELLO"; sleep 1) | \
+ openssl s_client -connect $SERVER_HOST:$SERVER_PORT \
+ -state \
+ -CAfile $SCRIPT_PATH/ca.pem \
+ -cert <(echo "$CLIENT_CERT") \
+ -key <(echo "$CLIENT_KEY")
diff --git a/test/mocks/pnfsimulator/netconfsimulator/tls/server_cert.crt b/test/mocks/netconf-pnp-simulator/engine/config/tls/server_cert.pem
index c0e03a3f0..c0e03a3f0 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/tls/server_cert.crt
+++ b/test/mocks/netconf-pnp-simulator/engine/config/tls/server_cert.pem
diff --git a/test/mocks/pnfsimulator/netconfsimulator/tls/server_key.pem b/test/mocks/netconf-pnp-simulator/engine/config/tls/server_key.pem
index d61c77bdf..d61c77bdf 100644
--- a/test/mocks/pnfsimulator/netconfsimulator/tls/server_key.pem
+++ b/test/mocks/netconf-pnp-simulator/engine/config/tls/server_key.pem
diff --git a/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh b/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh
new file mode 100755
index 000000000..4e1d17c25
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh
@@ -0,0 +1,102 @@
+#!/bin/ash
+# shellcheck disable=SC2086
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+set -eu
+
+HERE=${0%/*}
+source $HERE/common.sh
+
+MODELS_CONFIG=$CONFIG/modules
+BASE_VIRTUALENVS=$HOME/.local/share/virtualenvs
+GENERIC_SUBSCRIBER=/opt/bin/generic_subscriber.py
+
+install_and_configure_yang_model()
+{
+ local dir=$1
+ local model=$2
+
+ log INFO Importing Yang model \"$model\"
+ yang=$(find_file $dir $model.yang model.yang)
+ sysrepoctl --install --yang=$yang
+ data=$(find_file $dir startup.json startup.xml data.json data.xml)
+ if [ -n "$data" ]; then
+ log INFO Initialing Yang model \"$model\"
+ sysrepocfg --datastore=startup --import=$data $model
+ fi
+}
+
+configure_subscriber_execution()
+{
+ local dir=$1
+ local model=$2
+ local app=$3
+
+ APP_PATH=$PATH
+ if [ -r "$dir/requirements.txt" ]; then
+ env_dir=$(create_python_venv $dir $model)
+ APP_PATH=$env_dir/bin:$APP_PATH
+ fi
+ log INFO Preparing launching of module \"$model\" application
+ # shellcheck disable=SC2153
+ loguru_format="${LOGURU_FORMAT//\{module\}/$model}"
+ cat > /etc/supervisord.d/$model.conf <<EOF
+[program:subs-$model]
+command=$app $model
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+redirect_stderr=true
+autorestart=true
+environment=PATH=$APP_PATH,PYTHONUNBUFFERED="1",LOGURU_FORMAT="$loguru_format"
+EOF
+}
+
+create_python_venv()
+{
+ local dir=$1
+ local model=$2
+
+ log INFO Creating virtual environment for module $model
+ mkdir -p $BASE_VIRTUALENVS
+ env_dir=$BASE_VIRTUALENVS/$model
+ (
+ virtualenv --system-site-packages $env_dir
+ cd $env_dir
+ # shellcheck disable=SC1091
+ . ./bin/activate
+ pip install --no-cache-dir --requirement "$dir"/requirements.txt
+ ) 1>&2
+ echo $env_dir
+}
+
+for dir in "$MODELS_CONFIG"/*; do
+ if [ -d $dir ]; then
+ model=${dir##*/}
+ install_and_configure_yang_model $dir $model
+ app="$dir/subscriber.py"
+ if [ -x "$app" ]; then
+ log INFO Module $model is using its own subscriber
+ else
+ log WARN Module $model is using the generic subscriber
+ app=$GENERIC_SUBSCRIBER
+ fi
+ configure_subscriber_execution $dir $model $app
+ fi
+done
diff --git a/test/mocks/netconf-pnp-simulator/engine/container-tag.yaml b/test/mocks/netconf-pnp-simulator/engine/container-tag.yaml
new file mode 100644
index 000000000..9e6abab0d
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/container-tag.yaml
@@ -0,0 +1 @@
+tag: "2.8.6"
diff --git a/test/mocks/pnfsimulator/simulator-cli/requirements.txt b/test/mocks/netconf-pnp-simulator/engine/entrypoint.sh
index 4f962defb..378f33b3a 100644..100755
--- a/test/mocks/pnfsimulator/simulator-cli/requirements.txt
+++ b/test/mocks/netconf-pnp-simulator/engine/entrypoint.sh
@@ -1,8 +1,9 @@
-###
+#!/bin/ash
+# shellcheck disable=SC2086
+
+#-
# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
+# Copyright (C) 2020 Nordix Foundation.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,9 +16,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-###
-requests==2.20.1
-websockets==7.0
-asynctest
-mock
+
+set -eu
+
+HERE=${0%/*}
+source $HERE/common.sh
+
+configure_ssh startup merge $TEMPLATES
+configure_tls startup merge $TEMPLATES
+
+$HERE/configure-modules.sh
+
+exec /usr/local/bin/supervisord -c /etc/supervisord.conf
diff --git a/test/mocks/netconf-pnp-simulator/engine/generic_subscriber.py b/test/mocks/netconf-pnp-simulator/engine/generic_subscriber.py
new file mode 100755
index 000000000..66fd7b6ab
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/generic_subscriber.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+
+__author__ = "Mislav Novakovic <mislav.novakovic@sartura.hr>"
+__copyright__ = "Copyright 2018, Deutsche Telekom AG"
+__license__ = "Apache 2.0"
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample application demonstrates use of Python programming language bindings for sysrepo library.
+# Original c application was rewritten in Python to show similarities and differences
+# between the two.
+#
+# Most notable difference is in the very different nature of languages, c is weakly statically typed language
+# while Python is strongly dynamically typed. Python code is much easier to read and logic easier to comprehend
+# for smaller scripts. Memory safety is not an issue but lower performance can be expected.
+#
+# The original c implementation is also available in the source, so one can refer to it to evaluate trade-offs.
+
+import sys
+
+import sysrepo as sr
+from loguru import logger
+
+
+# Helper function for printing changes given operation, old and new value.
+def print_change(op, old_val, new_val):
+ if op == sr.SR_OP_CREATED:
+ logger.info(f"CREATED: {new_val.to_string()}")
+ elif op == sr.SR_OP_DELETED:
+ logger.info(f"DELETED: {old_val.to_string()}")
+ elif op == sr.SR_OP_MODIFIED:
+ logger.info(f"MODIFIED: {old_val.to_string()} to {new_val.to_string()}")
+ elif op == sr.SR_OP_MOVED:
+ logger.info(f"MOVED: {new_val.xpath()} after {old_val.xpath()}")
+
+
+# Helper function for printing events.
+def ev_to_str(ev):
+ if ev == sr.SR_EV_VERIFY:
+ return "verify"
+ elif ev == sr.SR_EV_APPLY:
+ return "apply"
+ elif ev == sr.SR_EV_ABORT:
+ return "abort"
+ else:
+ return "unknown"
+
+
+# Function to print current configuration state.
+# It does so by loading all the items of a session and printing them out.
+def print_current_config(session, module_name):
+ select_xpath = f"/{module_name}:*//*"
+
+ values = session.get_items(select_xpath)
+
+ if values is not None:
+ logger.info("========== BEGIN CONFIG ==========")
+ for i in range(values.val_cnt()):
+ logger.info(f" {values.val(i).to_string().strip()}")
+ logger.info("=========== END CONFIG ===========")
+
+
+# Function to be called for subscribed client of given session whenever configuration changes.
+def module_change_cb(sess, module_name, event, private_ctx):
+ try:
+ logger.info("========== Notification " + ev_to_str(event) + " =============================================")
+ if event == sr.SR_EV_APPLY:
+ print_current_config(sess, module_name)
+
+ logger.info("========== CHANGES: =============================================")
+
+ change_path = f"/{module_name}:*"
+
+ it = sess.get_changes_iter(change_path)
+
+ while True:
+ change = sess.get_change_next(it)
+ if change is None:
+ break
+ print_change(change.oper(), change.old_val(), change.new_val())
+
+ logger.info("========== END OF CHANGES =======================================")
+ except Exception as e:
+ logger.error(e)
+
+ return sr.SR_ERR_OK
+
+
+def main():
+ # Notable difference between c implementation is using exception mechanism for open handling unexpected events.
+ # Here it is useful because `Connection`, `Session` and `Subscribe` could throw an exception.
+ try:
+ module_name = sys.argv[1]
+ logger.info(f"Application will watch for changes in {module_name}")
+
+ # connect to sysrepo
+ conn = sr.Connection(module_name)
+
+ # start session
+ sess = sr.Session(conn)
+
+ # subscribe for changes in running config */
+ subscribe = sr.Subscribe(sess)
+
+ subscribe.module_change_subscribe(module_name, module_change_cb)
+
+ try:
+ print_current_config(sess, module_name)
+ except Exception as e:
+ logger.error(e)
+
+ logger.info("========== STARTUP CONFIG APPLIED AS RUNNING ==========")
+
+ sr.global_loop()
+
+ logger.info("Application exit requested, exiting.")
+
+ except Exception as e:
+ logger.error(e)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/01-fix-grep-count.patch b/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/01-fix-grep-count.patch
new file mode 100644
index 000000000..00bc93085
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/01-fix-grep-count.patch
@@ -0,0 +1,35 @@
+diff --git a/keystored/scripts/model-install.sh b/keystored/scripts/model-install.sh
+index a350950..671dd16 100755
+--- a/keystored/scripts/model-install.sh
++++ b/keystored/scripts/model-install.sh
+@@ -13,7 +13,7 @@ local_path=$(dirname $0)
+ is_yang_module_installed() {
+ module=$1
+
+- $SYSREPOCTL -l | grep --count "^$module [^|]*|[^|]*| Installed .*$" > /dev/null
++ $SYSREPOCTL -l | grep -c "^$module [^|]*|[^|]*| Installed .*$" > /dev/null
+ }
+
+ install_yang_module() {
+diff --git a/server/scripts/model-install.sh.in b/server/scripts/model-install.sh.in
+index 589d639..760ce42 100755
+--- a/server/scripts/model-install.sh.in
++++ b/server/scripts/model-install.sh.in
+@@ -13,7 +13,7 @@ shopt -s failglob
+ is_yang_module_installed() {
+ module=$1
+
+- $SYSREPOCTL -l | grep --count "^$module [^|]*|[^|]*| Installed .*$" > /dev/null
++ $SYSREPOCTL -l | grep -c "^$module [^|]*|[^|]*| Installed .*$" > /dev/null
+ }
+
+ install_yang_module() {
+@@ -31,7 +31,7 @@ enable_yang_module_feature() {
+ module=$1
+ feature=$2
+
+- if ! $SYSREPOCTL -l | grep --count "^$module [^|]*|[^|]*|[^|]*|[^|]*|[^|]*|[^|]*|.* $feature.*$" > /dev/null; then
++ if ! $SYSREPOCTL -l | grep -c "^$module [^|]*|[^|]*|[^|]*|[^|]*|[^|]*|[^|]*|.* $feature.*$" > /dev/null; then
+ echo "- Enabling feature $feature in $module..."
+ $SYSREPOCTL -m $module -e $feature
+ else
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/02-zlog.patch b/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/02-zlog.patch
new file mode 100644
index 000000000..804b6525c
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/Netopeer2/02-zlog.patch
@@ -0,0 +1,105 @@
+diff --git a/server/CMakeLists.txt b/server/CMakeLists.txt
+index f0c82c1..99c6a3d 100755
+--- a/server/CMakeLists.txt
++++ b/server/CMakeLists.txt
+@@ -130,6 +130,13 @@ add_library(serverobj OBJECT ${srcs})
+ # netopeer2-server target
+ add_executable(netopeer2-server $<TARGET_OBJECTS:serverobj> main.c)
+
++# dependencies - zlog
++find_library(ZLOG zlog)
++if(NOT ZLOG)
++ message(FATAL_ERROR "Unable to find zlog library.")
++endif()
++target_link_libraries(netopeer2-server ${ZLOG})
++
+ # dependencies - pthread
+ set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
+ find_package(Threads REQUIRED)
+diff --git a/server/log.c b/server/log.c
+index e660635..6b8117b 100644
+--- a/server/log.c
++++ b/server/log.c
+@@ -27,6 +27,8 @@
+ #include <nc_server.h>
+ #include <sysrepo.h>
+
++#include <zlog.h>
++
+ volatile uint8_t np2_verbose_level;
+ uint8_t np2_libssh_verbose_level;
+ uint8_t np2_sr_verbose_level;
+@@ -102,44 +104,24 @@ np2_err_location(void)
+ static void
+ np2log(int priority, const char *fmt, ...)
+ {
+- char *format;
+ va_list ap;
+
+ va_start(ap, fmt);
+- vsyslog(priority, fmt, ap);
+- va_end(ap);
+-
+- if (np2_stderr_log) {
+- format = malloc(11 + strlen(fmt) + 2);
+- if (!format) {
+- fprintf(stderr, "ERROR: Memory allocation failed (%s:%d)", __FILE__, __LINE__);
+- return;
+- }
+-
+- switch (priority) {
+- case LOG_ERR:
+- sprintf(format, "[ERR]: %s\n", fmt);
++ switch (priority) {
++ case LOG_INFO:
++ vdzlog_info(fmt, ap);
+ break;
+ case LOG_WARNING:
+- sprintf(format, "[WRN]: %s\n", fmt);
+- break;
+- case LOG_INFO:
+- sprintf(format, "[INF]: %s\n", fmt);
++ vdzlog_warn(fmt, ap);
+ break;
+ case LOG_DEBUG:
+- sprintf(format, "[DBG]: %s\n", fmt);
++ vdzlog_debug(fmt, ap);
+ break;
+ default:
+- sprintf(format, "[UNKNOWN]: %s\n", fmt);
++ vdzlog_error(fmt, ap);
+ break;
+- }
+-
+- va_start(ap, fmt);
+- vfprintf(stderr, format, ap);
+- va_end(ap);
+-
+- free(format);
+ }
++ va_end(ap);
+ }
+
+ /**
+diff --git a/server/main.c b/server/main.c
+index 601e8a8..9d28931 100644
+--- a/server/main.c
++++ b/server/main.c
+@@ -39,6 +39,8 @@
+ #include <nc_server.h>
+ #include <sysrepo.h>
+
++#include <zlog.h>
++
+ #include "common.h"
+ #include "operations.h"
+ #include "netconf_monitoring.h"
+@@ -1545,6 +1547,8 @@ main(int argc, char *argv[])
+ openlog("netopeer2-server", LOG_PID, LOG_DAEMON);
+ np2_stderr_log = 1;
+
++ dzlog_init("/opt/etc/zlog.conf", "netopeer2-server");
++
+ /* process command line options */
+ while ((c = getopt(argc, argv, OPTSTRING)) != -1) {
+ switch (c) {
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/01-configurable-PYTHON_MODULE_PATH.patch b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/01-configurable-PYTHON_MODULE_PATH.patch
new file mode 100644
index 000000000..3deb95c29
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/01-configurable-PYTHON_MODULE_PATH.patch
@@ -0,0 +1,14 @@
+--- a/python/CMakeLists.txt 2020-02-19 12:25:07.000000000 +0000
++++ b/python/CMakeLists.txt 2020-02-20 14:56:26.810463000 +0000
+@@ -22,7 +22,9 @@
+ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/docs/Makefile.in ${CMAKE_CURRENT_SOURCE_DIR}/docs/Makefile)
+ add_custom_target(pyapi ALL COMMAND ${PYTHON} ${SETUP_PY} build -b ${PYAPI_BUILD_DIR} ${DEBUG})
+ add_custom_target(pyapidoc COMMAND make -f ${CMAKE_CURRENT_SOURCE_DIR}/docs/Makefile html)
+- execute_process(COMMAND ${PYTHON} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True))"
+- OUTPUT_VARIABLE PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
++ if(NOT DEFINED PYTHON_MODULE_PATH)
++ execute_process(COMMAND ${PYTHON} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True))"
++ OUTPUT_VARIABLE PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
++ endif()
+ install(CODE "execute_process(COMMAND ${PYTHON} ${SETUP_PY} build -b ${PYAPI_BUILD_DIR} install --install-lib=\$ENV{DESTDIR}/${PYTHON_MODULE_PATH})")
+ endif()
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/02-fix-missing-include-dir.patch b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/02-fix-missing-include-dir.patch
new file mode 100644
index 000000000..556b9fd84
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/02-fix-missing-include-dir.patch
@@ -0,0 +1,11 @@
+--- a/python/setup.py.in 2020-02-20 20:04:33.000000000 +0000
++++ b/python/setup.py.in 2020-02-20 20:04:57.000000000 +0000
+@@ -13,7 +13,7 @@
+ "${CMAKE_CURRENT_COURCE_DIR}/rpc.h"
+ ],
+ libraries=["netconf2"],
+- extra_compile_args=["-Wall", "-I${CMAKE_CURRENT_BINARY_DIR}" @SSH_DEFINE@ @TLS_DEFINE@],
++ extra_compile_args=["-Wall", "-I${CMAKE_CURRENT_BINARY_DIR}", "-I${LIBYANG_INCLUDE_DIR}", "-I${LIBSSH_INCLUDE_DIR}" @SSH_DEFINE@ @TLS_DEFINE@],
+ extra_link_args=["-L${CMAKE_CURRENT_BINARY_DIR}/.."],
+ )
+
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/03-fix-missing-pthread_rwlockattr_setkind_np.patch b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/03-fix-missing-pthread_rwlockattr_setkind_np.patch
new file mode 100644
index 000000000..65537a017
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/03-fix-missing-pthread_rwlockattr_setkind_np.patch
@@ -0,0 +1,20 @@
+diff --git a/src/session_server.c b/src/session_server.c
+index 636b1a2..57f2854 100644
+--- a/src/session_server.c
++++ b/src/session_server.c
+@@ -560,6 +560,7 @@ nc_server_init(struct ly_ctx *ctx)
+ errno=0;
+
+ if (pthread_rwlockattr_init(&attr) == 0) {
++#ifdef PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP
+ if (pthread_rwlockattr_setkind_np(&attr, PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP) == 0) {
+ if (pthread_rwlock_init(&server_opts.endpt_lock, &attr) != 0) {
+ ERR("%s: failed to init rwlock(%s).", __FUNCTION__, strerror(errno));
+@@ -570,6 +571,7 @@ nc_server_init(struct ly_ctx *ctx)
+ } else {
+ ERR("%s: failed set attribute (%s).", __FUNCTION__, strerror(errno));
+ }
++#endif
+ pthread_rwlockattr_destroy(&attr);
+ } else {
+ ERR("%s: failed init attribute (%s).", __FUNCTION__, strerror(errno));
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/04-io-log.patch b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/04-io-log.patch
new file mode 100644
index 000000000..8c83e4b15
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/libnetconf2/04-io-log.patch
@@ -0,0 +1,27 @@
+diff --git a/src/io.c b/src/io.c
+index 9c4fa9f..830fc9a 100644
+--- a/src/io.c
++++ b/src/io.c
+@@ -432,7 +432,7 @@ nc_read_msg_io(struct nc_session *session, int io_timeout, struct lyxml_elem **d
+ nc_session_io_unlock(session, __func__);
+ io_locked = 0;
+
+- DBG("Session %u: received message:\n%s\n", session->id, msg);
++ VRB("Session %u: received message:\n%s", session->id, msg);
+
+ /* build XML tree */
+ *data = lyxml_parse_mem(session->ctx, msg, 0);
+@@ -718,7 +718,7 @@ nc_write(struct nc_session *session, const void *buf, size_t count)
+ return -1;
+ }
+
+- DBG("Session %u: sending message:\n%.*s\n", session->id, count, buf);
++ VRB("Session %u: sending message:\n%.*s", session->id, count, buf);
+
+ do {
+ switch (session->ti_type) {
+@@ -1346,4 +1346,3 @@ nc_realloc(void *ptr, size_t size)
+
+ return ret;
+ }
+-
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/libyang/01-configurable-PYTHON_MODULE_PATH.patch b/test/mocks/netconf-pnp-simulator/engine/patches/libyang/01-configurable-PYTHON_MODULE_PATH.patch
new file mode 100644
index 000000000..167297f06
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/libyang/01-configurable-PYTHON_MODULE_PATH.patch
@@ -0,0 +1,17 @@
+--- a/swig/python/CMakeLists.txt 2020-02-19 12:24:05.000000000 +0000
++++ b/swig/python/CMakeLists.txt 2020-02-20 14:54:59.279634000 +0000
+@@ -20,9 +20,11 @@
+
+ file(COPY "examples" DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+
+-execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True))"
+- OUTPUT_VARIABLE PYTHON_MODULE_PATH
+- OUTPUT_STRIP_TRAILING_WHITESPACE )
++if(NOT DEFINED PYTHON_MODULE_PATH)
++ execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True))"
++ OUTPUT_VARIABLE PYTHON_MODULE_PATH
++ OUTPUT_STRIP_TRAILING_WHITESPACE )
++endif()
+
+ install( TARGETS _${PYTHON_SWIG_BINDING} DESTINATION ${PYTHON_MODULE_PATH})
+ install( FILES "${CMAKE_CURRENT_BINARY_DIR}/${PYTHON_SWIG_BINDING}.py" DESTINATION ${PYTHON_MODULE_PATH})
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/supervisor/01-std-log-format.patch b/test/mocks/netconf-pnp-simulator/engine/patches/supervisor/01-std-log-format.patch
new file mode 100644
index 000000000..528a37415
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/supervisor/01-std-log-format.patch
@@ -0,0 +1,26 @@
+diff --git a/supervisor/loggers.py b/supervisor/loggers.py
+index 84d47ae..d23db3c 100644
+--- a/supervisor/loggers.py
++++ b/supervisor/loggers.py
+@@ -287,7 +287,7 @@ class LogRecord:
+ now = time.time()
+ msecs = (now - long(now)) * 1000
+ part1 = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(now))
+- asctime = '%s,%03d' % (part1, msecs)
++ asctime = '%s.%03d' % (part1, msecs)
+ levelname = LOG_LEVELS_BY_NUM[self.level]
+ msg = as_string(self.msg)
+ if self.kw:
+diff --git a/supervisor/options.py b/supervisor/options.py
+index 4e98340..fc19300 100644
+--- a/supervisor/options.py
++++ b/supervisor/options.py
+@@ -1463,7 +1463,7 @@ class ServerOptions(Options):
+
+ def make_logger(self):
+ # must be called after realize() and after supervisor does setuid()
+- format = '%(asctime)s %(levelname)s %(message)s\n'
++ format = '%(asctime)s %(levelname)-5s [supervisor] %(message)s\n'
+ self.logger = loggers.getLogger(self.loglevel)
+ if self.nodaemon:
+ loggers.handle_stdout(self.logger, format)
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/01-configurable-PYTHON_MODULE_PATH.patch b/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/01-configurable-PYTHON_MODULE_PATH.patch
new file mode 100644
index 000000000..3c6fa7b87
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/01-configurable-PYTHON_MODULE_PATH.patch
@@ -0,0 +1,21 @@
+diff --git a/swig/python/CMakeLists.txt b/swig/python/CMakeLists.txt
+index 7d00a8b7..dc06da00 100644
+--- a/swig/python/CMakeLists.txt
++++ b/swig/python/CMakeLists.txt
+@@ -24,10 +24,12 @@ swig_link_libraries(${PYTHON_SWIG_BINDING} ${PYTHON_LIBRARIES} Sysrepo-cpp)
+
+ file(COPY "examples" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}")
+
+-execute_process(COMMAND
+- ${PYTHON_EXECUTABLE} -c
+- "from distutils.sysconfig import get_python_lib; print(get_python_lib())"
+-OUTPUT_VARIABLE PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
++if(NOT DEFINED PYTHON_MODULE_PATH)
++ execute_process(COMMAND
++ ${PYTHON_EXECUTABLE} -c
++ "from distutils.sysconfig import get_python_lib; print(get_python_lib())"
++ OUTPUT_VARIABLE PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
++endif()
+
+ install( FILES "${CMAKE_CURRENT_BINARY_DIR}/_${PYTHON_SWIG_BINDING}.so" DESTINATION ${PYTHON_MODULE_PATH} )
+ install( FILES "${CMAKE_CURRENT_BINARY_DIR}/${PYTHON_SWIG_BINDING}.py" DESTINATION ${PYTHON_MODULE_PATH} )
diff --git a/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/02-zlog.patch b/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/02-zlog.patch
new file mode 100644
index 000000000..0223563c3
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/patches/sysrepo/02-zlog.patch
@@ -0,0 +1,172 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 14c8467..5af087e 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -105,6 +105,11 @@ configure_file(${PROJECT_SOURCE_DIR}/inc/sysrepo/values.h.in ${PROJECT_BINARY_DI
+ configure_file(${PROJECT_SOURCE_DIR}/inc/sysrepo/xpath.h ${PROJECT_BINARY_DIR}/inc/sysrepo/xpath.h COPYONLY)
+
+ # find required libraries
++find_library(ZLOG zlog)
++if(NOT ZLOG)
++ message(FATAL_ERROR "zlog must be installed.")
++endif()
++
+ find_package(EV REQUIRED)
+ include_directories(${EV_INCLUDE_DIR})
+
+diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
+index 342ad9d..d026a81 100644
+--- a/src/CMakeLists.txt
++++ b/src/CMakeLists.txt
+@@ -85,9 +85,9 @@ add_dependencies(SR_SRC COMMON)
+ add_dependencies(SR_ENGINE COMMON)
+
+ if(USE_AVL_LIB)
+- set(LINK_LIBRARIES pthread ${AVL_LIBRARIES} ${EV_LIBRARIES} ${PROTOBUF-C_LIBRARIES} ${YANG_LIBRARIES})
++ set(LINK_LIBRARIES pthread ${AVL_LIBRARIES} ${EV_LIBRARIES} ${PROTOBUF-C_LIBRARIES} ${YANG_LIBRARIES} ${ZLOG})
+ else(USE_AVL_LIB)
+- set(LINK_LIBRARIES pthread ${REDBLACK_LIBRARIES} ${EV_LIBRARIES} ${PROTOBUF-C_LIBRARIES} ${YANG_LIBRARIES})
++ set(LINK_LIBRARIES pthread ${REDBLACK_LIBRARIES} ${EV_LIBRARIES} ${PROTOBUF-C_LIBRARIES} ${YANG_LIBRARIES} ${ZLOG})
+ endif(USE_AVL_LIB)
+
+ #handle rt library that doesn't exist on OS X
+diff --git a/src/common/sr_logger.c b/src/common/sr_logger.c
+index 8dd6f31..ea94044 100644
+--- a/src/common/sr_logger.c
++++ b/src/common/sr_logger.c
+@@ -29,6 +29,8 @@
+ #include <stdarg.h>
+ #include <pthread.h>
+
++#include <zlog.h>
++
+ #include "sr_common.h"
+ #include "sr_logger.h"
+
+@@ -76,6 +78,7 @@ void
+ sr_logger_init(const char *app_name)
+ {
+ #if SR_LOGGING_ENABLED
++ dzlog_init("/opt/etc/zlog.conf", app_name);
+ if (NULL != sr_syslog_identifier) {
+ /* if some syslog identifier was already set, release it as we are going to set new one */
+ free((char*)sr_syslog_identifier);
+diff --git a/src/common/sr_logger.h b/src/common/sr_logger.h
+index 37c3487..c95a68d 100644
+--- a/src/common/sr_logger.h
++++ b/src/common/sr_logger.h
+@@ -31,6 +31,8 @@
+ #include <syslog.h>
+ #include <pthread.h>
+
++#include <zlog.h>
++
+ #include "sr_constants.h"
+
+ /**
+@@ -156,37 +158,31 @@ extern __thread char strerror_buf [SR_MAX_STRERROR_LEN]; /**< thread local buffe
+ /**
+ * Internal output macro
+ */
+-#define SR_LOG__INTERNAL(LL, MSG, ...) \
+- do { \
+- if (sr_ll_stderr >= LL) \
+- SR_LOG__STDERR(LL, MSG, __VA_ARGS__) \
+- if (sr_ll_syslog >= LL) \
+- SR_LOG__SYSLOG(LL, MSG, __VA_ARGS__) \
+- if (NULL != sr_log_callback) \
+- SR_LOG__CALLBACK(LL, MSG, __VA_ARGS__) \
+- } while(0)
+-
+ #if SR_LOGGING_ENABLED
+
+ /** Prints an error message (with format specifiers). */
+-#define SR_LOG_ERR(MSG, ...) SR_LOG__INTERNAL(SR_LL_ERR, MSG, __VA_ARGS__)
++#define SR_LOG_ERR(MSG, ...) dzlog(__FILE__, sizeof(__FILE__)-1, __func__, sizeof(__func__)-1, \
++ __LINE__, ZLOG_LEVEL_ERROR, MSG, __VA_ARGS__)
+ /** Prints an error message. */
+-#define SR_LOG_ERR_MSG(MSG) SR_LOG__INTERNAL(SR_LL_ERR, MSG "%s", "")
++#define SR_LOG_ERR_MSG(MSG) SR_LOG_ERR(MSG "%s", "")
+
+ /** Prints a warning message (with format specifiers). */
+-#define SR_LOG_WRN(MSG, ...) SR_LOG__INTERNAL(SR_LL_WRN, MSG, __VA_ARGS__)
++#define SR_LOG_WRN(MSG, ...) dzlog(__FILE__, sizeof(__FILE__)-1, __func__, sizeof(__func__)-1, \
++ __LINE__, ZLOG_LEVEL_WARN, MSG, __VA_ARGS__)
+ /** Prints a warning message. */
+-#define SR_LOG_WRN_MSG(MSG) SR_LOG__INTERNAL(SR_LL_WRN, MSG "%s", "")
++#define SR_LOG_WRN_MSG(MSG) SR_LOG_WRN(MSG "%s", "")
+
+ /** Prints an informational message (with format specifiers). */
+-#define SR_LOG_INF(MSG, ...) SR_LOG__INTERNAL(SR_LL_INF, MSG, __VA_ARGS__)
++#define SR_LOG_INF(MSG, ...) dzlog(__FILE__, sizeof(__FILE__)-1, __func__, sizeof(__func__)-1, \
++ __LINE__, ZLOG_LEVEL_INFO, MSG, __VA_ARGS__)
+ /** Prints an informational message. */
+-#define SR_LOG_INF_MSG(MSG) SR_LOG__INTERNAL(SR_LL_INF, MSG "%s", "")
++#define SR_LOG_INF_MSG(MSG) SR_LOG_INF(MSG "%s", "")
+
+ /** Prints a development debug message (with format specifiers). */
+-#define SR_LOG_DBG(MSG, ...) SR_LOG__INTERNAL(SR_LL_DBG, MSG, __VA_ARGS__)
++#define SR_LOG_DBG(MSG, ...) dzlog(__FILE__, sizeof(__FILE__)-1, __func__, sizeof(__func__)-1, \
++ __LINE__, ZLOG_LEVEL_DEBUG, MSG, __VA_ARGS__)
+ /** Prints a development debug message. */
+-#define SR_LOG_DBG_MSG(MSG) SR_LOG__INTERNAL(SR_LL_DBG, MSG "%s", "")
++#define SR_LOG_DBG_MSG(MSG) SR_LOG_DBG(MSG "%s", "")
+
+ #else
+ #define SR_LOG_ERR(...)
+diff --git a/src/executables/sysrepocfg.c b/src/executables/sysrepocfg.c
+index 0000951..f48ed5e 100644
+--- a/src/executables/sysrepocfg.c
++++ b/src/executables/sysrepocfg.c
+@@ -2000,6 +2000,9 @@ main(int argc, char* argv[])
+ }
+ }
+
++ /* init logger */
++ sr_logger_init("sysrepocfg");
++
+ /* set log levels */
+ sr_log_stderr(SR_LL_ERR);
+ sr_log_syslog(SR_LL_NONE);
+diff --git a/src/executables/sysrepoctl.c b/src/executables/sysrepoctl.c
+index 3b02e7d..60ffd7e 100644
+--- a/src/executables/sysrepoctl.c
++++ b/src/executables/sysrepoctl.c
+@@ -1311,6 +1311,9 @@ main(int argc, char* argv[])
+ search_dir_count = 1;
+ }
+
++ /* init logger */
++ sr_logger_init("sysrepoctl");
++
+ /* set log levels */
+ sr_log_stderr(SR_LL_ERR);
+ sr_log_syslog(SR_LL_NONE);
+diff --git a/src/clientlib/client_library.c b/src/clientlib/client_library.c
+index c3da2e5..b3beab7 100644
+--- a/src/clientlib/client_library.c
++++ b/src/clientlib/client_library.c
+@@ -377,6 +377,11 @@ sr_connect(const char *app_name, const sr_conn_options_t opts, sr_conn_ctx_t **c
+
+ CHECK_NULL_ARG2(app_name, conn_ctx_p);
+
++ if (0 == connections_cnt) {
++ /* this is the first connection - initialize logging */
++ sr_logger_init(app_name);
++ }
++
+ SR_LOG_DBG_MSG("Connecting to Sysrepo Engine.");
+
+ /* create the connection */
+@@ -385,11 +390,6 @@ sr_connect(const char *app_name, const sr_conn_options_t opts, sr_conn_ctx_t **c
+
+ pthread_mutex_lock(&global_lock);
+
+- if (0 == connections_cnt) {
+- /* this is the first connection - initialize logging */
+- sr_logger_init(app_name);
+- }
+-
+ /* attempt to connect to sysrepo daemon socket */
+ rc = cl_socket_connect(connection, SR_DAEMON_SOCKET);
+ if (SR_ERR_OK != rc) {
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/client/__init__.py b/test/mocks/netconf-pnp-simulator/engine/reconfigure-ssh.sh
index aa8b4f995..7d3863340 100644..100755
--- a/test/mocks/pnfsimulator/simulator-cli/cli/client/__init__.py
+++ b/test/mocks/netconf-pnp-simulator/engine/reconfigure-ssh.sh
@@ -1,8 +1,8 @@
-###
+#!/bin/ash
+# shellcheck disable=SC2086
+
# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
+# Copyright (C) 2020 Nordix Foundation.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,5 +15,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-###
+
+set -eu
+
+HERE=${0%/*}
+source $HERE/common.sh
+
+SSH_CONFIG=$CONFIG/ssh
+
+sysrepocfg --format=xml --export=$WORKDIR/ietf-system.xml ietf-system
+configure_ssh running import $WORKDIR
+
+kill_service netopeer2-server
diff --git a/test/mocks/netconf-pnp-simulator/engine/reconfigure-tls.sh b/test/mocks/netconf-pnp-simulator/engine/reconfigure-tls.sh
new file mode 100755
index 000000000..10f32873a
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/reconfigure-tls.sh
@@ -0,0 +1,31 @@
+#!/bin/ash
+# shellcheck disable=SC2086
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+set -eu
+
+HERE=${0%/*}
+source $HERE/common.sh
+
+sysrepocfg --format=xml --export=$WORKDIR/ietf-keystore.xml ietf-keystore
+sysrepocfg --format=xml --export=$WORKDIR/ietf-netconf-server.xml ietf-netconf-server
+configure_tls running import $WORKDIR
+
+kill_service netopeer2-server
diff --git a/test/mocks/pnfsimulator/simulator-cli/setup.py b/test/mocks/netconf-pnp-simulator/engine/supervisord.conf
index 26578a047..980ac36c3 100644
--- a/test/mocks/pnfsimulator/simulator-cli/setup.py
+++ b/test/mocks/netconf-pnp-simulator/engine/supervisord.conf
@@ -1,8 +1,6 @@
-###
+#-
# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
+# Copyright (C) 2020 Nordix Foundation.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,20 +13,40 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-###
-import setuptools
+[supervisord]
+user=root
+nodaemon=true
+logfile=/dev/null
+logfile_maxbytes=0
+loglevel=info
+
+[program:sysrepod]
+command=/opt/bin/sysrepod -d -l3
+autorestart=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+redirect_stderr=true
+priority=1
+
+[program:sysrepo-plugind]
+command=/opt/bin/sysrepo-plugind -d -l3
+autorestart=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+redirect_stderr=true
+priority=2
+
+[program:netopeer2-server]
+command=/opt/bin/netopeer2-server -d -v3
+autorestart=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+redirect_stderr=true
+priority=3
-setuptools.setup(
- name="pnf_simulator_cli",
- version="5.0.0",
- description="Command line interface which allows to communicate with PNF SIMULATOR",
- packages=setuptools.find_packages(),
- data_files=['cli/data/logging.ini'],
- classifiers=["Programming Language :: Python :: 3"],
- install_requires=[
- 'requests==2.20.1',
- 'websockets==7.0'
- ]
-)
+[include]
+files=/etc/supervisord.d/*.conf
diff --git a/test/mocks/netconf-pnp-simulator/engine/templates/ietf-keystore.xml b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-keystore.xml
new file mode 100644
index 000000000..ef02dedef
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-keystore.xml
@@ -0,0 +1,20 @@
+<keystore xmlns="urn:ietf:params:xml:ns:yang:ietf-keystore">
+ <private-keys>
+ <private-key>
+ <name>server_key</name>
+ <certificate-chains>
+ <certificate-chain>
+ <name>server_cert</name>
+ <certificate></certificate>
+ </certificate-chain>
+ </certificate-chains>
+ </private-key>
+ </private-keys>
+ <trusted-certificates>
+ <name>trusted_ca_list</name>
+ <trusted-certificate>
+ <name>ca</name>
+ <certificate></certificate>
+ </trusted-certificate>
+ </trusted-certificates>
+</keystore>
diff --git a/test/mocks/netconf-pnp-simulator/engine/templates/ietf-netconf-server.xml b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-netconf-server.xml
new file mode 100644
index 000000000..a6b6bedb1
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-netconf-server.xml
@@ -0,0 +1,27 @@
+<netconf-server xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-server">
+ <listen>
+ <endpoint>
+ <name>tls_listen_endpt</name>
+ <tls>
+ <address>0.0.0.0</address>
+ <port>6513</port>
+ <certificates>
+ <certificate>
+ <name>server_cert</name>
+ </certificate>
+ </certificates>
+ <client-auth>
+ <trusted-ca-certs>trusted_ca_list</trusted-ca-certs>
+ <cert-maps>
+ <cert-to-name>
+ <id>1</id>
+ <fingerprint></fingerprint>
+ <map-type xmlns:x509c2n="urn:ietf:params:xml:ns:yang:ietf-x509-cert-to-name">x509c2n:specified</map-type>
+ <name>netconf</name>
+ </cert-to-name>
+ </cert-maps>
+ </client-auth>
+ </tls>
+ </endpoint>
+ </listen>
+</netconf-server>
diff --git a/test/mocks/netconf-pnp-simulator/engine/templates/ietf-system.xml b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-system.xml
new file mode 100644
index 000000000..93b662f02
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/templates/ietf-system.xml
@@ -0,0 +1,12 @@
+<system xmlns="urn:ietf:params:xml:ns:yang:ietf-system">
+ <authentication>
+ <user>
+ <name>netconf</name>
+ <authorized-key>
+ <name></name>
+ <algorithm></algorithm>
+ <key-data></key-data>
+ </authorized-key>
+ </user>
+ </authentication>
+</system>
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/README b/test/mocks/netconf-pnp-simulator/engine/tests/README
new file mode 100644
index 000000000..295585dc2
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/README
@@ -0,0 +1,2 @@
+Borrowed from https://github.com/sysrepo/sysrepo-netopeer2-smoketests
+with some minor fixes
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/README b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/README
new file mode 100644
index 000000000..725b6b69b
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/README
@@ -0,0 +1,2 @@
+The files 'ca.pem', 'server_key.pem', and 'server_cert.pem' were copied from
+../../../config/tls directory.
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/ca.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/ca.pem
new file mode 100644
index 000000000..62593ab7c
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/ca.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID7TCCAtWgAwIBAgIJAMtE1NGAR5KoMA0GCSqGSIb3DQEBBQUAMIGMMQswCQYD
+VQQGEwJDWjEWMBQGA1UECAwNU291dGggTW9yYXZpYTENMAsGA1UEBwwEQnJubzEP
+MA0GA1UECgwGQ0VTTkVUMQwwCgYDVQQLDANUTUMxEzARBgNVBAMMCmV4YW1wbGUg
+Q0ExIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVjYUBsb2NhbGhvc3QwHhcNMTQwNzI0
+MTQxOTAyWhcNMjQwNzIxMTQxOTAyWjCBjDELMAkGA1UEBhMCQ1oxFjAUBgNVBAgM
+DVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoMBkNFU05FVDEM
+MAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJKoZIhvcNAQkB
+FhNleGFtcGxlY2FAbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEArD3TDHPAMT2Z84orK4lMlarbgooIUCcRZyLe+QM+8KY8Hn+mGaxPEOTS
+L3ywszqefB/Utm2hPKLHX684iRC14ID9WDGHxPjvoPArhgFhfV+qnPfxKTgxZC12
+uOj4u1V9y+SkTCocFbRfXVBGpojrBuDHXkDMDEWNvr8/52YCv7bGaiBwUHolcLCU
+bmtKILCG0RNJyTaJpXQdAeq5Z1SJotpbfYFFtAXB32hVoLug1dzl2tjG9sb1wq3Q
+aDExcbC5w6P65qOkNoyym9ne6QlQagCqVDyFn3vcqkRaTjvZmxauCeUxXgJoXkyW
+cm0lM1KMHdoTArmchw2Dz0yHHSyDAQIDAQABo1AwTjAdBgNVHQ4EFgQUc1YQIqjZ
+sHVwlea0AB4N+ilNI2gwHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gw
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAI/1KH60qnw9Xs2RGfi0/
+IKf5EynXt4bQX8EIyVKwSkYKe04zZxYfLIl/Q2HOPYoFmm3daj5ddr0ZS1i4p4fT
+UhstjsYWvXs3W/HhVmFUslakkn3PrswhP77fCk6eEJLxdfyJ1C7Uudq2m1isZbKi
+h+XF0mG1LxJaDMocSz4eAya7M5brwjy8DoOmA1TnLQFCVcpn+sCr7VC4wE/JqxyV
+hBCk/MuGqqM3B1j90bGFZ112ZOecyE0EDSr6IbiRBtmeNbEwOFjKXhNLYdxpBZ9D
+8A/368OckZkCrVLGuJNxK9UwCVTe8IhotHUqU9EqFDmxdV8oIdU/OzUwwNPA/Bd/
+9g==
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_cert.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_cert.pem
new file mode 100644
index 000000000..8e52dacfd
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_cert.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIBBzANBgkqhkiG9w0BAQsFADCBjDELMAkGA1UEBhMCQ1ox
+FjAUBgNVBAgMDVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoM
+BkNFU05FVDEMMAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJ
+KoZIhvcNAQkBFhNleGFtcGxlY2FAbG9jYWxob3N0MB4XDTE1MDczMDA3MjcxOFoX
+DTM1MDcyNTA3MjcxOFowgYUxCzAJBgNVBAYTAkNaMRYwFAYDVQQIDA1Tb3V0aCBN
+b3JhdmlhMQ8wDQYDVQQKDAZDRVNORVQxDDAKBgNVBAsMA1RNQzEXMBUGA1UEAwwO
+ZXhhbXBsZSBjbGllbnQxJjAkBgkqhkiG9w0BCQEWF2V4YW1wbGVjbGllbnRAbG9j
+YWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAueCQaNQWoNmF
+K6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68SfFNaY06zZl8QB9W02nr5kWeeMY0
+VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt6jAWZDzVfopwpJPAzRPxACDftIqF
+GagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4VDUHSNVbglc+u4UbEzNIFXMdEFsJ
+ZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuHQwAHdubuB07ObM2z01UhyEdDvEYG
+HwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UEFI1yTYw+xZ42HgFx3uGwApCImxhb
+j69GBYWFqwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUXGpLeLnh2cSDARAV
+A7KrBxGYpo8wHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gwDQYJKoZI
+hvcNAQELBQADggEBAJPV3RTXFRtNyOU4rjPpYeBAIAFp2aqGc4t2J1c7oPp/1n+l
+ZvjnwtlJpZHxMM783e2ryDQ6dkvXDf8kpwKlg3U3mkJ3xKkDdWrM4QwghXdCN519
+aa9qmu0zdFL+jUAaWlQ5tsceOrvbusCcbMqiFGk/QfpHqPv52SVWbYyUx7IX7DE+
+UjgsLHycfV/tlcx4ZE6soTzl9VdgSL/zmzG3rjsr58J80rXckLgBhvijgBlIAJvW
+fC7D0vaouvBInSFXymdPVoUDZ30cdGLf+hI/i/TfsEMOinLrXVdkSGNo6FXAHKSv
+XeB9oFKSzhQ7OPyRyqvEPycUSw/qD6FVr80oDDc=
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_key.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_key.pem
new file mode 100644
index 000000000..7ccdab10c
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/client_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAueCQaNQWoNmFK6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68
+SfFNaY06zZl8QB9W02nr5kWeeMY0VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt
+6jAWZDzVfopwpJPAzRPxACDftIqFGagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4
+VDUHSNVbglc+u4UbEzNIFXMdEFsJZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuH
+QwAHdubuB07ObM2z01UhyEdDvEYGHwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UE
+FI1yTYw+xZ42HgFx3uGwApCImxhbj69GBYWFqwIDAQABAoIBAQCZN9kR8DGu6V7y
+t0Ax68asL8O5B/OKaHWKQ9LqpVrXmikZJOxkbzoGldow/CIFoU+q+Zbwu9aDa65a
+0wiP7Hoa4Py3q5XNNUrOQDyU/OYC7cI0I83WS0lJ2zOJGYj8wKae5Z81IeQFKGHK
+4lsy1OGPAvPRGh7RjUUgRavA2MCwe07rWRuDb/OJFe4Oh56UMEjwMiNBtMNtncog
+j1vr/qgRJdf9tf0zlJmLvUJ9+HSFFV9I/97LJyFhb95gAfHkjdVroLVgT3Cho+4P
+WtZaKCIGD0OwfOG2nLV4leXvRUk62/LMlB8NI9+JF7Xm+HCKbaWHNWC7mvWSLV58
+Zl4AbUWRAoGBANyJ6SFHFRHSPDY026SsdMzXR0eUxBAK7G70oSBKKhY+O1j0ocLE
+jI2krHJBhHbLlnvJVyMUaCUOTS5m0uDw9hgSsAqeSL3hL38kxVZw+KNG9Ouno1Fl
+KnE/xXHlPQyeGs/P8nAMzHZxQtEsQdQayJEhK2XXHTsy7Q3MxDisfVJ1AoGBANfD
+34gB+OMx6pwj7zk3qWbYXSX8xjCZMR0ciko+h4xeMP2N8B0oyoqC+v1ABMAtJ3wG
+sGZd0hV9gwM7OUM3SEwkn6oeg1GemWLcn4rlSmTnZc4aeVwrEWlnSNFX3s4g9l4u
+k8Ugu4MVJYqH8HuDQ5Ggl6/QAwPzMSEdCW0O+jOfAoGAIBRbegC5+t6m7Yegz4Ja
+dxV1g98K6f58x+MDsQu4tYWV4mmrQgaPH2dtwizvlMwmdpkh+LNWNtWuumowkJHc
+akIFo3XExQIFg6wYnGtQb4e5xrGa2xMpKlIJaXjb+YLiCYqJDG2ALFZrTrvuU2kV
+9a5qfqTc1qigvNolTM0iaaUCgYApmrZWhnLUdEKV2wP813PNxfioI4afxlpHD8LG
+sCn48gymR6E+Lihn7vuwq5B+8fYEH1ISWxLwW+RQUjIneNhy/jjfV8TgjyFqg7or
+0Sy4KjpiNI6kLBXOakELRNNMkeSPopGR2E7v5rr3bGD9oAD+aqX1G7oJH/KgPPYd
+Vl7+ZwKBgQDcHyWYrimjyUgKaQD2GmoO9wdcJYQ59ke9K+OuGlp4ti5arsi7N1tP
+B4f09aeELM2ASIuk8Q/Mx0jQFnm8lzRFXdewgvdPoZW/7VufM9O7dGPOc41cm2Dh
+yrTcXx/VmUBb+/fnXVEgCv7gylp/wtdTGHQBQJHR81jFBz0lnLj+gg==
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_cert.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_cert.pem
new file mode 100644
index 000000000..c0e03a3f0
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_cert.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIBCDANBgkqhkiG9w0BAQsFADCBjDELMAkGA1UEBhMCQ1ox
+FjAUBgNVBAgMDVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoM
+BkNFU05FVDEMMAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJ
+KoZIhvcNAQkBFhNleGFtcGxlY2FAbG9jYWxob3N0MB4XDTE1MDczMDA3MjU1MFoX
+DTM1MDcyNTA3MjU1MFowgYUxCzAJBgNVBAYTAkNaMRYwFAYDVQQIDA1Tb3V0aCBN
+b3JhdmlhMQ8wDQYDVQQKDAZDRVNORVQxDDAKBgNVBAsMA1RNQzEXMBUGA1UEAwwO
+ZXhhbXBsZSBzZXJ2ZXIxJjAkBgkqhkiG9w0BCQEWF2V4YW1wbGVzZXJ2ZXJAbG9j
+YWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsdI1TBjzX1Pg
+QXFuPCw5/kQwU7qkrhirMcFAXhI8EoXepPa9fKAVuMjHW32P6nNzDpnhFe0YGdNl
+oIEN3hJJ87cVOqj4o7zZMbq3zVG2L8As7MTA8tYXm2fSC/0rIxxRRemcGUXM0q+4
+LEACjZj2pOKonaivF5VbhgNjPCO1Jj/TamUc0aViE577C9L9EiObGM+bGbabWk/K
+WKLsvxUc+sKZXaJ7psTVgpggJAkUszlmwOQgFiMSR53E9/CAkQYhzGVCmH44Vs6H
+zs3RZjOTbce4wr4ongiA5LbPeSNSCFjy9loKpaE1rtOjkNBVdiNPCQTmLuODXUTK
+gkeL+9v/OwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU83qEtQDFzDvLoaII
+vqiU6k7j1uswHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gwDQYJKoZI
+hvcNAQELBQADggEBAJ+QOLi4gPWGofMkLTqSsbv5xRvTw0xa/sJnEeiejtygAu3o
+McAsyevSH9EYVPCANxzISPzd9SFaO56HxWgcxLn9vi8ZNvo2wIp9zucNu285ced1
+K/2nDZfBmvBxXnj/n7spwqOyuoIc8sR7P7YyI806Qsfhk3ybNZE5UHJFZKDRQKvR
+J1t4nk9saeo87kIuNEDfYNdwYZzRfXoGJ5qIJQK+uJJv9noaIhfFowDW/G14Ji5p
+Vh/YtvnOPh7aBjOj8jmzk8MqzK+TZgT7GWu48Nd/NaV8g/DNg9hlN047LaNsJly3
+NX3+VBlpMnA4rKwl1OnmYSirIVh9RJqNwqe6k/k=
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_key.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_key.pem
new file mode 100644
index 000000000..d61c77bdf
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_initial/server_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAsdI1TBjzX1PgQXFuPCw5/kQwU7qkrhirMcFAXhI8EoXepPa9
+fKAVuMjHW32P6nNzDpnhFe0YGdNloIEN3hJJ87cVOqj4o7zZMbq3zVG2L8As7MTA
+8tYXm2fSC/0rIxxRRemcGUXM0q+4LEACjZj2pOKonaivF5VbhgNjPCO1Jj/TamUc
+0aViE577C9L9EiObGM+bGbabWk/KWKLsvxUc+sKZXaJ7psTVgpggJAkUszlmwOQg
+FiMSR53E9/CAkQYhzGVCmH44Vs6Hzs3RZjOTbce4wr4ongiA5LbPeSNSCFjy9loK
+paE1rtOjkNBVdiNPCQTmLuODXUTKgkeL+9v/OwIDAQABAoIBAG/4MG1JbL4C/7vV
+pBcpth7Aaznd1eJ2UB4VVOWnT8JOH2L6p1h5KRRhAP9AMkXsCnAQPyZiVAG3FlAZ
+01SZaY2YJDr6uQ3JVW4155TWtgSdWux//Ass+lJ17lJ0SRxjsV13ez6CsDWeRjc+
+2xy0S+KJgqk71XzhJG9fZLYyuddp3U/i3xFPUAcQM9xXKxcaD7g6LJf+a9pt6rim
+Eqq/pjJxDgTsRLARsazYuxrlOB445mvnLiYhOf2/MvI80jIUKaj8BeAhg49UIg/k
+mIh0xdevkcxBFer/BjBjscWaFjx14D6nkFMw7vtCum5KfalLN2edZKAzByOudGD4
+5KnRp3ECgYEA6vnSoNGg9Do80JOpXRGYWhcR1lIDO5yRW5rVagncCcW5Pn/GMtNd
+x2q6k1ks8mXKR9CxZrxZGqeYObZ9a/5SLih7ZkpiVWXG8ZiBIPhP6lnwm5OeIqLa
+hr0BYWcRfrGg1phj5uySZgsVBE+D8jH42O9ccdvrWv1OiryAHfKIcwMCgYEAwbs+
+HfQtvHOQXSYNhtOeA7IetkGy3cKVg2oILNcROvI96hS0MZKt1Rko0UAapx96eCIr
+el7vfdT0eUzNqt2wTKp1zmiG+SnX3fMDJNzMwu/jb/b4wQ20IHWNDnqcqTUVRUnL
+iksLFoHbTxsN5NpEQExcSt/zzP4qi1W2Bmo18WkCgYEAnhrk16LVux9ohiulHONW
+8N9u+BeM51JtGAcxrDzgGo85Gs2czdwc0K6GxdiN/rfxCKtqgqcfCWlVaxfYgo7I
+OxiwF17blXx7BVrJICcUlqpX1Ebac5HCmkCYqjJQuj/I6jv1lI7/3rt8M79RF+j5
++PXt7Qq97SZd78nwJrZni4MCgYAiPjZ8lOyAouyhilhZvI3xmUpUbMhw6jQDRnqr
+clhZUvgeqAoxuPuA7zGHywzq/WVoVqHYv28Vjs6noiu4R/chlf+8vD0fTYYadRnZ
+Ki4HRt+sqrrNZN6x3hVQudt3DSr1VFXl293Z3JonIWETUoE93EFz+qHdWg+rETtb
+ZuqiAQKBgD+HI/syLECyO8UynuEaDD7qPl87PJ/CmZLMxa2/ZZUjhaXAW7CJMaS6
+9PIzsLk33y3O4Qer0wx/tEdfnxMTBJrgGt/lFFdAKhSJroZ45l5apiavg1oZYp89
+jSd0lVxWSmrBjBZLnqOl336gzaBVkBD5ND+XUPdR1UuVQExJlem4
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/README b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/README
new file mode 100644
index 000000000..89c12e26f
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/README
@@ -0,0 +1 @@
+Based on https://gist.github.com/zapstar/4b51d7cfa74c7e709fcdaace19233443
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca.pem
new file mode 100644
index 000000000..037188ee0
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDhTCCAm2gAwIBAgIJAMYVrUQvhZDMMA0GCSqGSIb3DQEBCwUAMGAxCzAJBgNV
+BAYTAlVTMRMwEQYDVQQIDApBY21lIFN0YXRlMRIwEAYDVQQHDAlBY21lIENpdHkx
+EjAQBgNVBAoMCUFjbWUgSW5jLjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wHhcNMjAw
+NDE4MTgyMDM1WhcNNDAwNDE3MTgyMDM1WjBgMQswCQYDVQQGEwJVUzETMBEGA1UE
+CAwKQWNtZSBTdGF0ZTESMBAGA1UEBwwJQWNtZSBDaXR5MRIwEAYDVQQKDAlBY21l
+IEluYy4xFDASBgNVBAMMC2V4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAlgIH3JqXrqqGMfz4pvgR6ZHtxKhyhtiH2RXLll4gubzKtLYo
+OwXIQjxXOi1Pcz7NIIGs19q4BJkLj0ogghM9pEKZT9elHOKLyx2yZdQl2FbSj4W3
+QoYeMKy7XHMQD35lXrG3FugyyywIRsqQQrmfp68OPCWanB5nWdddiu7aYgeHZwPY
+3jQ1XjOiHpoFSwV1/4VG1rHB55AqqFIc05Hwr9D3x4iXD6TaWO925ijfnJgCh1Ze
+fk2LT8v2imKjgIyXvgmut/ZXU+2Adcsn3f1HBA8rDdWlAuJAE5Ik4Kb2YPShEMFf
+w2RnQfWHQoghIfIhpGEpeszoWlJyd02R3C5jOQIDAQABo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwICBDAdBgNVHQ4EFgQUgkGhQz8P8R3yGIU8tVqU
+DuqrfskwDQYJKoZIhvcNAQELBQADggEBAIjqdOE/TwuOp+xDicIzwcZKtiDCESqd
+9hdqGoQC3Et0d98o6t4TmiqbT+uTcxdWPlDnEFGx6logE/pHZxb1IVKryMcKPIPH
+EyT7JN9KBiR2z0LLD9Ov/BC24HQk0JDbv8bC7ZWYL7nUzG/4n2IU2JYO1iGztiTj
+p4es4UxcnyzPEgN4FEICK4AYUuJAZ7KLVY8LbZAOAuOMt5HnnR+7SFMGYCkfFXTM
+ct3VHnnueA+XSX0vUN9hns+b59kUpC5dzTmPfxXRL1HSaZwkmUxqpAeDfPIkHuTm
+433XjfEI7wMU+00E3Hf08VWaXEp2daQgI32RmKlZO9AUd0c/nro2jLE=
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca_key.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca_key.pem
new file mode 100644
index 000000000..887f1a151
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/ca_key.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCWAgfcmpeuqoYx
+/Pim+BHpke3EqHKG2IfZFcuWXiC5vMq0tig7BchCPFc6LU9zPs0ggazX2rgEmQuP
+SiCCEz2kQplP16Uc4ovLHbJl1CXYVtKPhbdChh4wrLtccxAPfmVesbcW6DLLLAhG
+ypBCuZ+nrw48JZqcHmdZ112K7tpiB4dnA9jeNDVeM6IemgVLBXX/hUbWscHnkCqo
+UhzTkfCv0PfHiJcPpNpY73bmKN+cmAKHVl5+TYtPy/aKYqOAjJe+Ca639ldT7YB1
+yyfd/UcEDysN1aUC4kATkiTgpvZg9KEQwV/DZGdB9YdCiCEh8iGkYSl6zOhaUnJ3
+TZHcLmM5AgMBAAECggEAGm6pK/ohmCl8E/rbZbB4l4ubNffollI5PctVYF2drpzR
+qx4d4KiYLPOs+xdY1JnQU1YGOtLTchv1qX4KVGFHj1Yc5bC962UP9O56rO7A7GoA
+GEIblKFFWJZXPWcZAWHoQtNVy7eGm75ahv7ShK9oroduHrMRl0jUNUR5uy1zVapw
+47m3Trzo7u1QF194N2SqQJajGVkwWmQ8V77+dvSnesoq5ZNLteLPooqDnesSZxFE
+Hus0ZuWz4WcCl9+OUXCZG9Q/lNm3aZMIR1ShpPC74KuKyfTjLoqACt8+8WQr/XD5
+tLDfm0EY+xdnaCke3HdESxTXDXCErHItYNrSRKOaAQKBgQDHAIRmqNuWqKWrd7hz
+cRanfzk7iHSKb40+EzSNEvNht+i/PrfuyU7e0aUQjQUwIPMznGGZHE+NIcRPPxSS
+zPD+Qye+cXMSXS08rB9LZe/VYHXBnFAHAH0rt63UzjnvNqsg6uH40rXuYPPcbtyP
+a74RUShNBp0F3zgegpdEoB0DCQKBgQDA+RsW3WCbm/eBrS/J6wb6Xd8/tj8hOJjP
+aMsijWK9F0LOwLgnrBO1tmrOcO7UPCk3MY4aMlPxyQ43JajoJ+HzHosj8plX3fT7
+/6c6hDyZmYDcghxs5aCcWn0lOoafvHzzNYK7Wrgh4twxFoSpy7QuETlYi8ifPr3j
+zjkz+YV6sQKBgQCE1LqLz9BrOv0CfDI5lFXbzdcE/utTcGxl7+nW9LxSELEh3ppl
+oCeuIV+9sXOyEXxkidC3o6cR/GUNxHxWFMgT3/2KaC24J0vHwNhOuqcg1XckmdLt
+KY1jfgJhFpqjKumFWmMldHiNuldsXu+IKBHBe1ucNnrfbYUHEIIqA3n6CQKBgBYj
+vl7mMTJJN6FSHFx/MYLCCF4H68BE/Qs2y6+AJybop0qPQ9GRZYWAk0pyHISPDm99
+qP8KbSUdWxsqn/Faugqpo28RY1R4a6YJ08bb6xP4T5d8+gPoaH/nxdnimBV1i6Rf
+rEsQgnWo0Hh1S+0rKNXsNfcZun/CtAiR3XBAHXdRAoGAXX97DyQmPaT28XGrT6Mq
+Hus73yJnEtSaRtl2HB9d7CEdKZyai5rnW7jV+WibxSNJbL0dTF5EPlzwCElnR6lD
+d0elYWbjEEr1z0QNEGKJTgH3IAlCnpv2ATqthRjAcxvrIZ/Pd9mh/2AjWl/2Wfd9
+a3/CHQC6qqYkGz2aBx3OZ3w=
+-----END PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_cert.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_cert.pem
new file mode 100644
index 000000000..d0f348933
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_cert.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDgTCCAmmgAwIBAgIBAzANBgkqhkiG9w0BAQsFADBgMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQWNtZSBTdGF0ZTESMBAGA1UEBwwJQWNtZSBDaXR5MRIwEAYDVQQK
+DAlBY21lIEluYy4xFDASBgNVBAMMC2V4YW1wbGUuY29tMB4XDTIwMDQxODE4MjAz
+NVoXDTMwMDQxODE4MjAzNVowZzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkFjbWUg
+U3RhdGUxEjAQBgNVBAcMCUFjbWUgQ2l0eTESMBAGA1UECgwJQWNtZSBJbmMuMRsw
+GQYDVQQDDBJjbGllbnQuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQC/H/NjHx1yQYEiQF2he+RpkuubLJ83rpPKg6ArT+06SADYAmHM
+VYIG0QguIXn3Alp+VnRc5rqNgteQ6Z90ykrf9wY61PpPmUZd4LB7MXI04VlJqQhP
+MCt9O5Y53hV9ZXXxUwRJEZeC2qxMellDpwaO0G6RaWjjP/KpTIJfgvv0cEJdKBy4
+aJptr65dVg51JN3kNRWUf5hz5gKs2SwgBt2nkiRvSdo8lzxNQjeKKAcfGHEcUjB5
+DMNcCIMgFnW7S8aQVkFeOfQN3VOaDGfKA/lMxD9k93+cPIt9hiTwXPBvheaRiQrZ
+O1rDq9ctW4kf63H5zFOKJyaqhHoHpJ67ezs/AgMBAAGjPzA9MAwGA1UdEwEB/wQC
+MAAwDgYDVR0PAQH/BAQDAgWgMB0GA1UdDgQWBBROdFRq9lmHHgYayhAhnQ1D4RJ6
+0TANBgkqhkiG9w0BAQsFAAOCAQEAQ5fJIV6RhWLEACvxEA91e6NnT7WYNjcSV4Qq
+mJfQT7qEq8OrhLLCytew5HzWFrUt5hJvzp9j7T4oHTTqEggg0VABGBUdBAu5oi7j
+OAaT1sKekhe/LIBAeASMmgxlT+NzGBG0nUqUC4VI/36ZgiDDLbeeoPw3m4sZJ1KD
+EwVdI1HCIRA6Y0B8Fwlx2t6XFyiTsJoR3MlANyK+hRhdsFUWnLPmQBt4AGwJUhsU
+ljUDaz7D3qbl2V7nqxhChUVDIobDlw9v+asGzdsqll4EmNOszaQTGWhlv5BFbHoG
+u5ibVC6vISg27mbViL0OIQDNq016k8GJJZsLN/L0HMyyXYPcQQ==
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_key.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_key.pem
new file mode 100644
index 000000000..80fe4e91a
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/client_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAvx/zYx8dckGBIkBdoXvkaZLrmyyfN66TyoOgK0/tOkgA2AJh
+zFWCBtEILiF59wJaflZ0XOa6jYLXkOmfdMpK3/cGOtT6T5lGXeCwezFyNOFZSakI
+TzArfTuWOd4VfWV18VMESRGXgtqsTHpZQ6cGjtBukWlo4z/yqUyCX4L79HBCXSgc
+uGiaba+uXVYOdSTd5DUVlH+Yc+YCrNksIAbdp5Ikb0naPJc8TUI3iigHHxhxHFIw
+eQzDXAiDIBZ1u0vGkFZBXjn0Dd1TmgxnygP5TMQ/ZPd/nDyLfYYk8Fzwb4XmkYkK
+2Ttaw6vXLVuJH+tx+cxTiicmqoR6B6Seu3s7PwIDAQABAoIBAE3CihvCBRD/ZbKx
+zWZuKbhqdkFkHkNhW/ABLaFxm2si8HTyQygHgieT1GgwZpcA9iCAvEcv+KaqnVnw
+M1gpFd2Ze4dkL5NDIUYArMzyiSzKorE9fIv7ZTZGkBBrMwMZzKqqxAuWhLZQkdlr
+zfWgdyKT2uh+opYS5n/LCSAjAq+oaG7qICZq2V6NS2kKYJxBSnEalYaAQ++df3Bx
+D34iQA55AhKYrTcpwjmoVOxg5Itz8k1k07X+k8JQ953YHi8chwVDTFEG52cq+HVu
+tcMMrGEzYBzT4FjOsOZ3hjT7EVgTmEonQr26GuE5ZSjyvsfp05X+G40vBNu4SMRM
+WsT4PIECgYEA7MiO5mosIMW7ipoCEW5GCK7uJ+4H7d4EvKc4sCnxHnhVpH0kZU88
+4q7q8aKh25vKT5iNqCBE7SdJqlLGK1ooRQJqG2lXBElTDwOP71R8C8jfSNFFr1XI
+wbeqIJhuNveQPROep10UpwPG8JWAogYqr3lEky+loSuBvQSNjYnQPPkCgYEAzqLI
+iN5gHbQtza11iZkYESwDCyJNebynckhx3NLQQNQ1gUs3giO+HCO7Nqa4KbRhbmLn
+Ajan8dklNoTPSrGvFWRY5I098xbHQb35LPC1BPZDbI00VkJ3sGB4H0J9rf56sIDD
+BB5mN12xYNk4Jl1WgEurmxH5jWGLQmINUlBwX/cCgYAfQ1fCym/rH9BkO3Ncc8/h
+Y59kPERlvrOnaPjOIauJV2APaMp+adjjIS86Gjv+r/IlUkIZ2bDgExjh2S37GVtJ
+yUjTN7Rah4fk6pZ9hg0ezTXV+nOV8+Ce2y4mQZoDveoYdlezR1Hrv07sAwFJ40CN
+jJhmSps2zXTCzTAXaQPKmQKBgQCRa8pJWIa4INejShHP9mgTna++pDN2GyiUqxtG
+1y4skaveBDtaYSEn2JWmjopI/2MaNoxw6FolQDaKOclQvd+D5I0Su7v/WeZ9A99a
+m0Qp683jlTRiCIEHJb0j8r1UOCXMFbIpMeOpz0xH5lc32LRJsfdhOLMxppZE75CE
+f4u2XQKBgH3X+3p7T952Z2BtnaGXdjyu1XdE20S8FZrBAmC+NLoOA/bE2l66vwT0
+44v3v92DH27Z7rgyTDlPYJRtrKoIma6owOOHRLIMpiibXNUWcYANp9SgWcYrxW21
+nXIJj3zszWcDFa+shpQEgz0wOkFODbkDoae/dPTAYnmrUqY1fuar
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/openssl_2way_auth.sh b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/openssl_2way_auth.sh
new file mode 100755
index 000000000..a6540fc87
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/openssl_2way_auth.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+set -euo pipefail
+
+BASE_DN="/C=US/ST=Acme State/L=Acme City/O=Acme Inc."
+
+WORKDIR=$(mktemp -d)
+trap "rm -rf $WORKDIR" EXIT
+
+CA_DAYS=$((3652 * 2))
+PEER_DAYS=$((3652 * 1))
+
+CONFIG_FILE=$WORKDIR/openssl.cnf
+CA_SERIAL_FILE=$WORKDIR/ca.srl
+echo 01 > $CA_SERIAL_FILE
+
+cat > $CONFIG_FILE <<EOL
+[req]
+default_bits = 2048
+distinguished_name = req_distinguised_name
+prompt = no
+serial = $CA_SERIAL_FILE
+default_md = sha256
+
+[req_distinguised_name]
+C = US
+ST = Acme State
+L = Acme City
+O = Acme Inc.
+CN = example.com
+
+[ca]
+basicConstraints = critical, CA:TRUE
+keyUsage = critical, keyCertSign
+subjectKeyIdentifier = hash
+
+[peer]
+basicConstraints = critical, CA:FALSE
+keyUsage = critical, digitalSignature, keyEncipherment
+subjectKeyIdentifier = hash
+EOL
+
+# Generate a self signed certificate for the CA along with a key.
+# NOTE: I'm using -nodes, this means that once anybody gets
+# their hands on this particular key, they can become this CA.
+openssl req \
+ -x509 \
+ -nodes \
+ -days $CA_DAYS \
+ -newkey rsa:2048 \
+ -keyout ca_key.pem \
+ -out ca.pem \
+ -config $CONFIG_FILE \
+ -extensions ca
+
+# Create server private key and certificate request
+openssl genrsa -out server_key.pem 2048
+openssl req -new \
+ -key server_key.pem \
+ -out $WORKDIR/server.csr \
+ -subj "$BASE_DN/CN=server.example.com"
+
+# Create client private key and certificate request
+openssl genrsa -out client_key.pem 2048
+openssl req -new \
+ -key client_key.pem \
+ -out $WORKDIR/client.csr \
+ -subj "$BASE_DN/CN=client.example.com"
+
+# Generate certificates
+openssl x509 -req -days $PEER_DAYS -in $WORKDIR/server.csr \
+ -CA ca.pem -CAkey ca_key.pem \
+ -out server_cert.pem \
+ -sha256 \
+ -CAserial $CA_SERIAL_FILE \
+ -extfile $CONFIG_FILE \
+ -extensions peer
+openssl x509 -req -days $PEER_DAYS -in $WORKDIR/client.csr \
+ -CA ca.pem -CAkey ca_key.pem \
+ -out client_cert.pem \
+ -sha256 \
+ -CAserial $CA_SERIAL_FILE \
+ -extfile $CONFIG_FILE \
+ -extensions peer
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_cert.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_cert.pem
new file mode 100644
index 000000000..8564438cb
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_cert.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDgTCCAmmgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBgMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQWNtZSBTdGF0ZTESMBAGA1UEBwwJQWNtZSBDaXR5MRIwEAYDVQQK
+DAlBY21lIEluYy4xFDASBgNVBAMMC2V4YW1wbGUuY29tMB4XDTIwMDQxODE4MjAz
+NVoXDTMwMDQxODE4MjAzNVowZzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkFjbWUg
+U3RhdGUxEjAQBgNVBAcMCUFjbWUgQ2l0eTESMBAGA1UECgwJQWNtZSBJbmMuMRsw
+GQYDVQQDDBJzZXJ2ZXIuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQDTxZJCVEcJ0vLn9gWfE2z98wsBxyOEy0obeXzRi6DVIDa+lO71
+8uSSO4TaOlOkIRfGOoLlQASN4eXtCEub2dPn81ubmlRlOtYpnjikQ1GYoqHQ8z4w
+h4WuqPZDUwpMAQbbWNAle0klWPYF46s7t51U+JuY3gfAVLnmv11dg3ZOW0pYrC2/
+JbbFxAhGqkp4H4pgkvVaADi2tEtHnNchQ0nYiq14PB/UISZlpiYECk10OoP9Q4Q7
+2UHEn8GuGJoO7SkFSVQY5MUWZkHxe46r8sHaM1lWhHEOJWhUSeALUZKgq+mDfkLR
+M474xR0FFinkBOEv06jdVA4OccsEcdRohZiJAgMBAAGjPzA9MAwGA1UdEwEB/wQC
+MAAwDgYDVR0PAQH/BAQDAgWgMB0GA1UdDgQWBBQmOgcOsOQiQfzb9fZ0rICUmSSN
+kzANBgkqhkiG9w0BAQsFAAOCAQEADZ4cFI1KbZfwkwien/kNXtd/D5l72Q491CAF
+0z9xuLKepKtHu5yMFQGuzBhOG+LJj20DYcyfVx9Pr+X5fnYiQfWjv4H2fuqx4Bh2
+FcjcKHIQiGFyA02FMTFNIua8sNXY1vQk7JU424wSkugQdBp1a1yEzzuZDJ7upJqS
+6+8/nW1rjzeS4DNhswga3s12oor1iuESORGU+8D2i3yk9OgLuf/MenPxivJlFC49
+7SXvIw34c13+5bkoMQKnhzs3RVa28babhohviJ+yb8R8FA24hF3lI0C6pKHAtf+2
+lrXvUcxRkkxZi+8BrLdhb/Q9sYvI48aYrVVMeuagtkbnTUiH8Q==
+-----END CERTIFICATE-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_key.pem b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_key.pem
new file mode 100644
index 000000000..6c81826a6
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/data/tls_new/server_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEA08WSQlRHCdLy5/YFnxNs/fMLAccjhMtKG3l80Yug1SA2vpTu
+9fLkkjuE2jpTpCEXxjqC5UAEjeHl7QhLm9nT5/Nbm5pUZTrWKZ44pENRmKKh0PM+
+MIeFrqj2Q1MKTAEG21jQJXtJJVj2BeOrO7edVPibmN4HwFS55r9dXYN2TltKWKwt
+vyW2xcQIRqpKeB+KYJL1WgA4trRLR5zXIUNJ2IqteDwf1CEmZaYmBApNdDqD/UOE
+O9lBxJ/BrhiaDu0pBUlUGOTFFmZB8XuOq/LB2jNZVoRxDiVoVEngC1GSoKvpg35C
+0TOO+MUdBRYp5AThL9Oo3VQODnHLBHHUaIWYiQIDAQABAoIBAFLjCpsBh4h103Ms
+3QhlPwyqew1oFyPbuZbFVzBhGUMxx5uSiXEkb4g42Yfcum4MMdT5g9Ac3Wt3FlpP
+G+DVQlaP7rQZlJzGiZpifkL0wlQem31AJ4AxGwbAxRqWvvn+kON7gISbG4cNqcWm
+VZgbBu6CG8yaYqhJwTVqgy2dzclexTQG39gFEwfFfYAu5tKlEO9GAqMGWdpLhoED
+h+mUV+f522ol37EksmesExzc3SRJpVV6So0KmH+a+1jdAYAz0W7bXsHDxsLiFw3R
+rlTB3jIskQBQALpIbkzv+KxO8tdsz9+FnLVlhZ6jDdN3whJwgTSlG4klGOwMTvnr
+vzPGUQECgYEA+Id4Am2HoXDuG/uNaDiqNMgM32mge5s25ysDEK6JhKtdNFKCLP2x
+VfHcc3g0W7dZAneXaoPeHJB+pdGo7OF5cO5NicX6pMKZS1ODK2ioME+8Lpr8uwp3
+Ss4a94G0c6qCzJndvQxLTP3fXhvmBomDFUHluy7B0287ZV9psnLZI+ECgYEA2iM9
+VQ6FoaxgQ8Mt+sskI+veR+i4J16FwaPXRf4x/GIf4FyYG4vfFkypOWsNH9MRnSfn
+H2JR+hj1apX2jducpCcaeRq2EJJ3n721rdwf9DGlEt8MoDR4qn1ZHj9s4rKolgSb
+wnz1UlDhIVwLG0H4Wp0Y6TfhnsLPLCWbv1IqCakCgYEAvuoR3ouVLQc7YnOS5QTi
+ezlR6i2SAmHxgxMff6kUKr4ZEyBur3ES0RrCZlFopyvpTGPiBQjXjsnRAEBWq+Fp
+EL9/AN7886QpbhvxH19+E96siIC2tFgN24EIZilVHaVWZSWtmJPhJHvBIuH7ifoI
+oEPG3kvEyU8hKXZqE5L2CwECgYEAkr34CVr+jFcAXzVSng6/3iZS3r7v+xP8GNqV
++7DXgXelB/JiJM3AIikqAcVBC/KaO8VXFma2zO6zUaNWO/HLeyYPCf7tEVLmhCBD
+spSNLmGjMYG45aDGt7IhHFcAcbRL8rdg7MHQ6jIccKuRkSGc56Ac3O7JqVpVsdYO
+4vJr8xkCgYB3t2iwhnHqT78bSHVCmGRL7zAK6aIm96nUEODIE3LI87JpecJV9blS
+ABwt3Pl6D35OTA1s0ShUc2qqUL7em+pPPlvKl63IQLZRo1W6qkukgkpQ+UvdsEVN
+ZJf7Kr9jlRXxTvwDXF+2b9eDfie8u70w9H9eliqcEvO4uLL+bsM8WA==
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/nctest.py b/test/mocks/netconf-pnp-simulator/engine/tests/nctest.py
new file mode 100644
index 000000000..c508ca47a
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/nctest.py
@@ -0,0 +1,50 @@
+import logging
+
+from ncclient import manager, operations
+
+import settings
+
+LOGGER = logging.getLogger(__name__)
+
+
+def check_reply_ok(reply):
+ assert reply is not None
+ _log_netconf_msg("Received", reply.xml)
+ assert reply.ok is True
+ assert reply.error is None
+
+
+def check_reply_err(reply):
+ assert reply is not None
+ _log_netconf_msg("Received", reply.xml)
+ assert reply.ok is False
+ assert reply.error is not None
+
+
+def check_reply_data(reply):
+ check_reply_ok(reply)
+
+
+def _log_netconf_msg(header: str, body: str):
+ """Log a message using a format inspired by NETCONF 1.1 """
+ LOGGER.info("%s:\n\n#%d\n%s\n##", header, len(body), body)
+
+
+class NCTestCase:
+ """ Base class for NETCONF test cases. Provides a NETCONF connection and some helper methods. """
+
+ nc: manager.Manager
+
+ def setup(self):
+ self.nc = manager.connect(
+ host=settings.HOST,
+ port=settings.SSH_PORT,
+ username=settings.USERNAME,
+ key_filename=settings.SSH_KEY_FILENAME,
+ allow_agent=False,
+ look_for_keys=False,
+ hostkey_verify=False)
+ self.nc.raise_mode = operations.RaiseMode.NONE
+
+ def teardown(self):
+ self.nc.close_session()
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/settings.py b/test/mocks/netconf-pnp-simulator/engine/tests/settings.py
new file mode 100644
index 000000000..0c665c738
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/settings.py
@@ -0,0 +1,8 @@
+import os
+
+HOST = "127.0.0.1"
+# Set by tox-docker
+SSH_PORT = int(os.environ["NETCONF_PNP_SIMULATOR_830_TCP_PORT"])
+TLS_PORT = int(os.environ["NETCONF_PNP_SIMULATOR_6513_TCP_PORT"])
+USERNAME = "netconf"
+SSH_KEY_FILENAME = "../config/ssh/id_rsa"
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/test_basic_operations.py b/test/mocks/netconf-pnp-simulator/engine/tests/test_basic_operations.py
new file mode 100644
index 000000000..06164e6b5
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/test_basic_operations.py
@@ -0,0 +1,49 @@
+import nctest
+
+
+class TestBasicOperations(nctest.NCTestCase):
+ """ Tests basic NETCONF operations with no prerequisites on datastore content. """
+
+ def test_capabilities(self):
+ assert ":startup" in self.nc.server_capabilities
+ assert ":candidate" in self.nc.server_capabilities
+ assert ":validate" in self.nc.server_capabilities
+ assert ":xpath" in self.nc.server_capabilities
+
+ def test_get(self):
+ reply = self.nc.get()
+ nctest.check_reply_data(reply)
+
+ def test_get_config_startup(self):
+ reply = self.nc.get_config(source='startup')
+ nctest.check_reply_data(reply)
+
+ def test_get_config_running(self):
+ reply = self.nc.get_config(source='running')
+ nctest.check_reply_data(reply)
+
+ def test_copy_config(self):
+ reply = self.nc.copy_config(source='startup', target='candidate')
+ nctest.check_reply_ok(reply)
+
+ def test_neg_filter(self):
+ reply = self.nc.get(filter=("xpath", "/non-existing-module:non-existing-data"))
+ nctest.check_reply_err(reply)
+
+ def test_lock(self):
+ reply = self.nc.lock("startup")
+ nctest.check_reply_ok(reply)
+ reply = self.nc.lock("running")
+ nctest.check_reply_ok(reply)
+ reply = self.nc.lock("candidate")
+ nctest.check_reply_ok(reply)
+
+ reply = self.nc.lock("startup")
+ nctest.check_reply_err(reply)
+
+ reply = self.nc.unlock("startup")
+ nctest.check_reply_ok(reply)
+ reply = self.nc.unlock("running")
+ nctest.check_reply_ok(reply)
+ reply = self.nc.unlock("candidate")
+ nctest.check_reply_ok(reply)
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/test_tls.py b/test/mocks/netconf-pnp-simulator/engine/tests/test_tls.py
new file mode 100644
index 000000000..f0adf447f
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/test_tls.py
@@ -0,0 +1,115 @@
+import os
+import socket
+import ssl
+import tarfile
+import tempfile
+import time
+from io import StringIO
+from typing import List
+
+import docker
+import pytest
+from docker.models.containers import Container
+from lxml import etree
+from ncclient.transport.ssh import MSG_DELIM
+
+import settings
+
+HELLO_DTD = etree.DTD(StringIO("""
+<!ELEMENT hello (capabilities, session-id)>
+<!ATTLIST hello xmlns CDATA #REQUIRED>
+<!ELEMENT capabilities (capability+)>
+<!ELEMENT capability (#PCDATA)>
+<!ELEMENT session-id (#PCDATA)>
+"""))
+
+INITIAL_CONFIG_DIR = "data/tls_initial"
+NEW_CONFIG_DIR = "data/tls_new"
+
+
+class TestTLS:
+ container: Container
+
+ @classmethod
+ def setup_class(cls):
+ dkr = docker.from_env()
+ containers = dkr.containers.list(filters={"ancestor": "netconf-pnp-simulator:latest"})
+ assert len(containers) == 1
+ cls.container = containers[0]
+
+ def test_tls_connect(self):
+ nc_connect(INITIAL_CONFIG_DIR)
+
+ @pytest.mark.parametrize("round_id", [f"round #{i + 1}" for i in range(6)])
+ def test_tls_reconfiguration(self, round_id):
+ # pylint: disable=W0613
+ self.reconfigure_and_check(NEW_CONFIG_DIR, INITIAL_CONFIG_DIR)
+ self.reconfigure_and_check(INITIAL_CONFIG_DIR, NEW_CONFIG_DIR)
+
+ def reconfigure_and_check(self, good_config_dir: str, bad_config_dir: str):
+ with simple_tar([f"{good_config_dir}/{b}.pem" for b in ["ca", "server_key", "server_cert"]]) as config_tar:
+ status = self.container.put_archive(f"/config/tls", config_tar)
+ assert status
+ test_start = int(time.time())
+ exit_code, (_, err) = self.container.exec_run("/opt/bin/reconfigure-tls.sh", demux=True)
+ if exit_code != 0:
+ print(f"reconfigure-tls.sh failed with rc={exit_code}")
+ log_all("stderr", err)
+ log_all("Container Logs", self.container.logs(since=test_start))
+ assert False
+ nc_connect(good_config_dir)
+ # Exception matching must be compatible with Py36 and Py37+
+ with pytest.raises(ssl.SSLError, match=r".*\[SSL: CERTIFICATE_VERIFY_FAILED\].*"):
+ nc_connect(bad_config_dir)
+
+
+def log_all(heading: str, lines: object):
+ print(f"{heading}:")
+ if isinstance(lines, bytes):
+ lines = lines.decode("utf-8")
+ if isinstance(lines, str):
+ lines = lines.split("\n")
+ for line in lines:
+ print(" ", line)
+
+
+def simple_tar(paths: List[str]):
+ file = tempfile.NamedTemporaryFile()
+ with tarfile.open(mode="w", fileobj=file) as tar:
+ for path in paths:
+ abs_path = os.path.abspath(path)
+ tar.add(abs_path, arcname=os.path.basename(path), recursive=False)
+ file.seek(0)
+ return file
+
+
+def nc_connect(config_dir: str):
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) as sock:
+ context = ssl.create_default_context()
+ context.load_verify_locations(f"{config_dir}/ca.pem")
+ context.load_cert_chain(certfile=f"{config_dir}/client_cert.pem", keyfile=f"{config_dir}/client_key.pem")
+ context.check_hostname = False
+ with context.wrap_socket(sock, server_side=False, server_hostname=settings.HOST) as conn:
+ conn.connect((settings.HOST, settings.TLS_PORT))
+ buf = nc_read_msg(conn)
+ print(f"Received NETCONF HelloMessage:\n{buf}")
+ conn.close()
+ assert buf.endswith(MSG_DELIM)
+ hello_root = etree.XML(buf[:-len(MSG_DELIM)])
+ valid = HELLO_DTD.validate(hello_root)
+ if not valid:
+ log_all("Invalid NETCONF <hello> msg", list(HELLO_DTD.error_log.filter_from_errors()))
+ assert False
+
+
+def nc_read_msg(conn: ssl.SSLSocket):
+ buf = ''
+ while True:
+ data = conn.recv(4096)
+ if data:
+ buf += data.decode(encoding="utf-8")
+ if buf.endswith(MSG_DELIM):
+ break
+ else:
+ break
+ return buf
diff --git a/test/mocks/netconf-pnp-simulator/engine/tests/test_turing_machine.py b/test/mocks/netconf-pnp-simulator/engine/tests/test_turing_machine.py
new file mode 100644
index 000000000..8ac38b0f5
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tests/test_turing_machine.py
@@ -0,0 +1,130 @@
+import nctest
+
+_NAMESPACES = {
+ "nc": "urn:ietf:params:xml:ns:netconf:base:1.0",
+ "tm": "http://example.net/turing-machine"
+}
+
+
+def check_labels_only_in_data(data):
+ children = data.xpath("/nc:rpc-reply/nc:data/*", namespaces=_NAMESPACES)
+ assert children
+ for child in children:
+ assert child.tag.endswith("turing-machine")
+ children = data.xpath("/nc:rpc-reply/nc:data/tm:turing-machine/*", namespaces=_NAMESPACES)
+ assert children
+ for child in children:
+ assert child.tag.endswith("transition-function")
+ children = data.xpath("/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/*", namespaces=_NAMESPACES)
+ assert children
+ for child in children:
+ assert child.tag.endswith("delta")
+ children = data.xpath("/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/tm:delta/*",
+ namespaces=_NAMESPACES)
+ assert children
+ for child in children:
+ assert child.tag.endswith("label")
+
+
+def check_deltas_in_data(data):
+ deltas = data.xpath("/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/*", namespaces=_NAMESPACES)
+ assert deltas
+ for d in deltas:
+ assert d.tag.endswith("delta")
+
+
+class TestTuringMachine(nctest.NCTestCase):
+ """ Tests basic NETCONF operations on the turing-machine YANG module. """
+
+ def test_get(self):
+ reply = self.nc.get()
+ nctest.check_reply_data(reply)
+ check_deltas_in_data(reply.data)
+
+ def test_get_config_startup(self):
+ reply = self.nc.get_config(source="startup")
+ nctest.check_reply_data(reply)
+ check_deltas_in_data(reply.data)
+
+ def test_get_config_running(self):
+ reply = self.nc.get_config(source="running")
+ nctest.check_reply_data(reply)
+ check_deltas_in_data(reply.data)
+
+ def test_get_subtree_filter(self):
+ filter_xml = """<nc:filter xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
+ <turing-machine xmlns="http://example.net/turing-machine">
+ <transition-function>
+ <delta>
+ <label />
+ </delta>
+ </transition-function>
+ </turing-machine>
+ </nc:filter>"""
+ reply = self.nc.get_config(source="running", filter=filter_xml)
+ nctest.check_reply_data(reply)
+ check_deltas_in_data(reply.data)
+ check_labels_only_in_data(reply.data)
+
+ def test_get_xpath_filter(self):
+ # https://github.com/ncclient/ncclient/issues/166
+ filter_xml = """<nc:filter type="xpath" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0"
+ xmlns:tm="http://example.net/turing-machine"
+ select="/tm:turing-machine/transition-function/delta/label" />
+ """
+ reply = self.nc.get(filter=filter_xml)
+ nctest.check_reply_data(reply)
+ check_deltas_in_data(reply.data)
+ check_labels_only_in_data(reply.data)
+
+ def test_edit_config(self):
+ config_xml = """<nc:config xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
+ <turing-machine xmlns="http://example.net/turing-machine">
+ <transition-function>
+ <delta nc:operation="{}">
+ <label>test-transition-rule</label>
+ <input>
+ <symbol>{}</symbol>
+ <state>{}</state>
+ </input>
+ </delta>
+ </transition-function>
+ </turing-machine></nc:config>"""
+ # merge
+ reply = self.nc.edit_config(target='running', config=config_xml.format("merge", 9, 99))
+ nctest.check_reply_ok(reply)
+ # get
+ reply = self.nc.get_config(source="running")
+ nctest.check_reply_data(reply)
+ deltas = reply.data.xpath(
+ "/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/tm:delta[tm:label='test-transition-rule']",
+ namespaces=_NAMESPACES)
+ assert len(deltas) == 1
+ # create already existing - expect error
+ reply = self.nc.edit_config(target='running', config=config_xml.format("create", 9, 99))
+ nctest.check_reply_err(reply)
+ # replace
+ reply = self.nc.edit_config(target='running', config=config_xml.format("replace", 9, 88))
+ nctest.check_reply_ok(reply)
+ # get
+ reply = self.nc.get_config(source="running")
+ nctest.check_reply_data(reply)
+ states = reply.data.xpath(
+ "/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/tm:delta[tm:label='test-transition-rule']/"
+ "tm:input/tm:state",
+ namespaces=_NAMESPACES)
+ assert len(states) == 1
+ assert states[0].text == "88"
+ # delete
+ reply = self.nc.edit_config(target='running', config=config_xml.format("delete", 9, 88))
+ nctest.check_reply_ok(reply)
+ # delete non-existing - expect error
+ reply = self.nc.edit_config(target='running', config=config_xml.format("delete", 9, 88))
+ nctest.check_reply_err(reply)
+ # get - should be empty
+ reply = self.nc.get_config(source="running")
+ nctest.check_reply_data(reply)
+ deltas = reply.data.xpath(
+ "/nc:rpc-reply/nc:data/tm:turing-machine/tm:transition-function/tm:delta[tm:label='test-transition-rule']",
+ namespaces=_NAMESPACES)
+ assert not deltas
diff --git a/test/mocks/netconf-pnp-simulator/engine/tox.ini b/test/mocks/netconf-pnp-simulator/engine/tox.ini
new file mode 100644
index 000000000..9fd5d1d9f
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/tox.ini
@@ -0,0 +1,39 @@
+#-
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+[tox]
+envlist = py3
+requires = tox-docker == 1.7.0
+skipsdist = True
+
+[testenv]
+changedir = tests
+docker =
+ netconf-pnp-simulator:latest
+
+deps =
+ pytest == 6.2.2
+ docker == 4.4.4
+ lxml == 4.6.2
+ ncclient == 0.6.9
+commands = pytest -v
+
+[pytest]
+log_level = INFO
+log_format = %(asctime)s.%(msecs)03d %(levelname)-5s [%(name)s] %(message)s
diff --git a/test/mocks/netconf-pnp-simulator/engine/zlog.conf b/test/mocks/netconf-pnp-simulator/engine/zlog.conf
new file mode 100644
index 000000000..0a1c72b7e
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/engine/zlog.conf
@@ -0,0 +1,7 @@
+[formats]
+
+common = "%d(%F %T).%ms %-5V [%c] %m%n"
+
+[rules]
+
+*.INFO >stderr; common
diff --git a/test/mocks/netconf-pnp-simulator/modules/docker-compose.yml b/test/mocks/netconf-pnp-simulator/modules/docker-compose.yml
new file mode 100644
index 000000000..2ddf56407
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/modules/docker-compose.yml
@@ -0,0 +1,12 @@
+version: '3'
+
+services:
+ netconf-pnp-simulator:
+ image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.2
+ container_name: netconf-pnp-simulator
+ restart: always
+ ports:
+ - "830:830"
+ - "6513:6513"
+ volumes:
+ - ./:/config/modules
diff --git a/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/LICENSE b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/LICENSE
new file mode 100644
index 000000000..3eface2c7
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/LICENSE
@@ -0,0 +1,13 @@
+Copyright (C) 2019 Nordix Foundation
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/pnf-sw-upgrade.yang b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/pnf-sw-upgrade.yang
new file mode 100644
index 000000000..6d413065e
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/pnf-sw-upgrade.yang
@@ -0,0 +1,78 @@
+module pnf-sw-upgrade {
+ namespace "http://onap.org/pnf-sw-upgrade";
+ prefix upgrade;
+
+ import ietf-yang-types {
+ prefix yang;
+ }
+
+ revision "2019-12-03" {
+ description
+ "initial version";
+ }
+
+ container software-upgrade {
+ config true;
+ list upgrade-package {
+ key "id";
+ leaf id {
+ type string;
+ }
+ leaf current-status {
+ type enumeration {
+ enum CREATED;
+ enum INITIALIZED;
+ enum DOWNLOAD_IN_PROGRESS;
+ enum DOWNLOAD_COMPLETED;
+ enum ACTIVATION_IN_PROGRESS;
+ enum ACTIVATION_COMPLETED;
+ }
+ description
+ "List of possible states of the upgrade";
+ }
+ leaf state-change-time {
+ mandatory false;
+ description
+ "Date and time of the last state change.";
+ type yang:date-and-time;
+ }
+ leaf action {
+ mandatory false;
+ type enumeration {
+ enum NONE;
+ enum PRE_CHECK;
+ enum DOWNLOAD_NE_SW;
+ enum ACTIVATE_NE_SW;
+ enum CANCEL;
+ }
+ description
+ "List of possible actions for the upgrade";
+ }
+ leaf software-version {
+ type string;
+ description
+ "Possible name or release version of the UP";
+ }
+ leaf uri {
+ type string;
+ description
+ "A URI that points to the directory where the UP can be found.";
+ }
+ leaf user {
+ type string;
+ description
+ "Indicates the user.";
+ }
+ leaf password {
+ type string;
+ description
+ "Indicates the password.";
+ }
+ leaf user-label {
+ type string;
+ description
+ "Free-text description of the UP.";
+ }
+ }
+ }
+}
diff --git a/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/startup.xml b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/startup.xml
new file mode 100644
index 000000000..4f1e7bceb
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/startup.xml
@@ -0,0 +1,12 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<software-upgrade xmlns="http://onap.org/pnf-sw-upgrade">
+ <upgrade-package>
+ <id>sw-id-1</id>
+ <current-status>CREATED</current-status>
+ <software-version>test_software_1</software-version>
+ <uri>sftp://127.0.0.1/test_software_1.img</uri>
+ <user>test_user</user>
+ <password>test_password</password>
+ <user-label>trial software update</user-label>
+ </upgrade-package>
+</software-upgrade>
diff --git a/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/subscriber.py b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/subscriber.py
new file mode 100755
index 000000000..0ebb2d654
--- /dev/null
+++ b/test/mocks/netconf-pnp-simulator/modules/pnf-sw-upgrade/subscriber.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+__author__ = "Eliezio Oliveira <eliezio.oliveira@est.tech>"
+__copyright__ = "Copyright (C) 2020 Nordix Foundation"
+__license__ = "Apache 2.0"
+
+import os
+import time
+from threading import Timer
+
+import sysrepo as sr
+from loguru import logger
+
+YANG_MODULE_NAME = 'pnf-sw-upgrade'
+
+XPATH_CTX = sr.Xpath_Ctx()
+PAUSE_TO_LOCK = 0.5
+
+#
+# ----- BEGIN Finite State Machine definitions -----
+#
+
+# Actions
+ACT_PRE_CHECK = 'PRE_CHECK'
+ACT_DOWNLOAD_NE_SW = 'DOWNLOAD_NE_SW'
+ACT_ACTIVATE_NE_SW = 'ACTIVATE_NE_SW'
+ACT_CANCEL = 'CANCEL'
+
+# States
+ST_CREATED = 'CREATED'
+ST_INITIALIZED = 'INITIALIZED'
+ST_DOWNLOAD_IN_PROGRESS = 'DOWNLOAD_IN_PROGRESS'
+ST_DOWNLOAD_COMPLETED = 'DOWNLOAD_COMPLETED'
+ST_ACTIVATION_IN_PROGRESS = 'ACTIVATION_IN_PROGRESS'
+ST_ACTIVATION_COMPLETED = 'ACTIVATION_COMPLETED'
+
+# Timeouts used for timed transitions
+SWUG_TIMED_TRANSITION_TO = int(os.environ.get("SWUG_TIMED_TRANSITION_TO", "7"))
+TO_DOWNLOAD = SWUG_TIMED_TRANSITION_TO
+TO_ACTIVATION = SWUG_TIMED_TRANSITION_TO
+
+
+def timestamper(sess, key_id):
+ xpath = xpath_of(key_id, 'state-change-time')
+ now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+ state = sr.Val(now, sr.SR_STRING_T)
+ sess.set_item(xpath, state)
+
+
+def xpath_of(key_id, leaf_id):
+ selector = "[id='{0}']".format(key_id) if key_id else ''
+ return "/%s:software-upgrade/upgrade-package%s/%s" % (YANG_MODULE_NAME, selector, leaf_id)
+
+
+"""
+The finite state machine (FSM) is represented as a dictionary where the current state is the key, and its value is
+an object (also represented as a dictionary) with the following optional attributes:
+
+- on_enter: a function called when FSM enters this state;
+- transitions: a dictionary mapping every acceptable action to the target state;
+- timed_transition: a pair for a timed transition that will automatically occur after a given interval.
+"""
+STATE_MACHINE = {
+ ST_CREATED: {
+ 'transitions': {ACT_PRE_CHECK: ST_INITIALIZED}
+ },
+ ST_INITIALIZED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_DOWNLOAD_NE_SW: ST_DOWNLOAD_IN_PROGRESS}
+ },
+ ST_DOWNLOAD_IN_PROGRESS: {
+ 'on_enter': timestamper,
+ 'timed_transition': (TO_DOWNLOAD, ST_DOWNLOAD_COMPLETED),
+ 'transitions': {ACT_CANCEL: ST_INITIALIZED}
+ },
+ ST_DOWNLOAD_COMPLETED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_ACTIVATE_NE_SW: ST_ACTIVATION_IN_PROGRESS}
+ },
+ ST_ACTIVATION_IN_PROGRESS: {
+ 'on_enter': timestamper,
+ 'timed_transition': (TO_ACTIVATION, ST_ACTIVATION_COMPLETED),
+ 'transitions': {ACT_CANCEL: ST_DOWNLOAD_COMPLETED}
+ },
+ ST_ACTIVATION_COMPLETED: {
+ 'on_enter': timestamper,
+ 'transitions': {ACT_ACTIVATE_NE_SW: ST_ACTIVATION_IN_PROGRESS}
+ }
+}
+
+
+#
+# ----- END Finite State Machine definitions -----
+#
+
+
+def main():
+ try:
+ conn = sr.Connection(YANG_MODULE_NAME)
+ sess = sr.Session(conn)
+ subscribe = sr.Subscribe(sess)
+
+ subscribe.module_change_subscribe(YANG_MODULE_NAME, module_change_cb, conn)
+
+ try:
+ print_current_config(sess, YANG_MODULE_NAME)
+ except Exception as e:
+ logger.error(e)
+
+ sr.global_loop()
+
+ logger.info("Application exit requested, exiting.")
+ except Exception as e:
+ logger.error(e)
+
+
+# Function to be called for subscribed client of given session whenever configuration changes.
+def module_change_cb(sess, module_name, event, private_ctx):
+ if event == sr.SR_EV_APPLY:
+ try:
+ conn = private_ctx
+ change_path = xpath_of(None, 'action')
+ it = sess.get_changes_iter(change_path)
+ while True:
+ change = sess.get_change_next(it)
+ if change is None:
+ break
+ op = change.oper()
+ if op in (sr.SR_OP_CREATED, sr.SR_OP_MODIFIED):
+ handle_trigger_action(conn, sess, change.new_val())
+ except Exception as e:
+ logger.error(e)
+ return sr.SR_ERR_OK
+
+
+# Function to print current configuration state.
+# It does so by loading all the items of a session and printing them out.
+def print_current_config(session, module_name):
+ select_xpath = f"/{module_name}:*//*"
+ values = session.get_items(select_xpath)
+ if values:
+ logger.info("========== BEGIN CONFIG ==========")
+ for i in range(values.val_cnt()):
+ logger.info(values.val(i).to_string().strip())
+ logger.info("=========== END CONFIG ===========")
+
+
+def handle_trigger_action(conn, sess, action_val):
+ """
+ Handle individual changes on the model.
+ """
+ logger.info("CREATED/MODIFIED: %s" % action_val.to_string())
+ xpath = action_val.xpath()
+ last_node = XPATH_CTX.last_node(xpath)
+ # Warning: 'key_value' modifies 'xpath'!
+ key_id = XPATH_CTX.key_value(xpath, 'upgrade-package', 'id')
+ if key_id and last_node == 'action':
+ action = action_val.data().get_enum()
+ cur_state = sess.get_item(xpath_of(key_id, 'current-status')).data().get_enum()
+ next_state_str = STATE_MACHINE[cur_state]['transitions'].get(action, None)
+ if next_state_str:
+ Timer(PAUSE_TO_LOCK, try_change_state, (conn, key_id, next_state_str)).start()
+
+
+def try_change_state(conn, key_id, state_str):
+ sess = sr.Session(conn)
+ try:
+ try:
+ sess.lock_module(YANG_MODULE_NAME)
+ except RuntimeError:
+ logger.warning(f"Retrying after {PAUSE_TO_LOCK}s")
+ Timer(PAUSE_TO_LOCK, try_change_state, (conn, key_id, state_str)).start()
+ return
+ try:
+ state = sr.Val(state_str, sr.SR_ENUM_T)
+ sess.set_item(xpath_of(key_id, 'current-status'), state)
+ on_enter = STATE_MACHINE[state_str].get('on_enter', None)
+ if callable(on_enter):
+ on_enter(sess, key_id)
+ sess.commit()
+ finally:
+ sess.unlock_module(YANG_MODULE_NAME)
+ delay, next_state_str = STATE_MACHINE[state_str].get('timed_transition', [0, None])
+ if delay:
+ Timer(delay, try_change_state, (conn, key_id, next_state_str)).start()
+ finally:
+ sess.session_stop()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/FileReadyEvent.json b/test/mocks/pmsh-pnf-sim/docker-compose/FileReadyEvent.json
new file mode 100644
index 000000000..1da15044c
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/FileReadyEvent.json
@@ -0,0 +1,34 @@
+{
+ "event": {
+ "commonEventHeader": {
+ "version": "4.0.1",
+ "vesEventListenerVersion": "7.0.1",
+ "domain": "notification",
+ "eventName": "Noti_RnNode-Ericsson_FileReady",
+ "eventId": "FileReady_1797490e-10ae-4d48-9ea7-3d7d790b25e1",
+ "lastEpochMicrosec": 8745745764578,
+ "priority": "Normal",
+ "reportingEntityName": "otenb5309",
+ "sequence": 0,
+ "sourceName": "oteNB5309",
+ "startEpochMicrosec": 8745745764578,
+ "timeZoneOffset": "UTC+05.30"
+ },
+ "notificationFields": {
+ "changeIdentifier": "PM_MEAS_FILES",
+ "changeType": "FileReady",
+ "notificationFieldsVersion": "2.0",
+ "arrayOfNamedHashMap": [
+ {
+ "name": "Apmfilename.xml.gz",
+ "hashMap": {
+ "location": "sftp://bulkpm:bulkpm@sftpserver:22/upload/Apmfilename.xml.gz",
+ "compression": "gzip",
+ "fileFormatType": "org.3GPP.32.435#measCollec",
+ "fileFormatVersion": "V10"
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml
new file mode 100644
index 000000000..d1422dc15
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml
@@ -0,0 +1,27 @@
+version: 1
+
+disable_existing_loggers: false
+
+loggers:
+ dev:
+ level: DEBUG
+ handlers: [console, file_handler]
+ propagate: false
+
+handlers:
+ console:
+ class: logging.StreamHandler
+ formatter: simple
+ file_handler:
+ class: logging.handlers.RotatingFileHandler
+ filename: config/modules/pnf-subscriptions/pmsh_sim.log
+ mode: a
+ maxBytes: 10000000
+ backupCount: 5
+ formatter: extended
+
+formatters:
+ simple:
+ format: "%(asctime)s %(name)s: %(message)s"
+ extended:
+ format: "%(asctime)s %(name)s %(levelname)s: %(message)s"
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py
new file mode 100644
index 000000000..5a03489e4
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py
@@ -0,0 +1,5 @@
+VES_IP = '10.10.10.47'
+VES_PORT = '30417'
+VES_USER = 'sample1'
+VES_PASS = 'sample1'
+ROP = 60 # in seconds
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem
new file mode 100644
index 000000000..62593ab7c
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID7TCCAtWgAwIBAgIJAMtE1NGAR5KoMA0GCSqGSIb3DQEBBQUAMIGMMQswCQYD
+VQQGEwJDWjEWMBQGA1UECAwNU291dGggTW9yYXZpYTENMAsGA1UEBwwEQnJubzEP
+MA0GA1UECgwGQ0VTTkVUMQwwCgYDVQQLDANUTUMxEzARBgNVBAMMCmV4YW1wbGUg
+Q0ExIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVjYUBsb2NhbGhvc3QwHhcNMTQwNzI0
+MTQxOTAyWhcNMjQwNzIxMTQxOTAyWjCBjDELMAkGA1UEBhMCQ1oxFjAUBgNVBAgM
+DVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoMBkNFU05FVDEM
+MAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJKoZIhvcNAQkB
+FhNleGFtcGxlY2FAbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEArD3TDHPAMT2Z84orK4lMlarbgooIUCcRZyLe+QM+8KY8Hn+mGaxPEOTS
+L3ywszqefB/Utm2hPKLHX684iRC14ID9WDGHxPjvoPArhgFhfV+qnPfxKTgxZC12
+uOj4u1V9y+SkTCocFbRfXVBGpojrBuDHXkDMDEWNvr8/52YCv7bGaiBwUHolcLCU
+bmtKILCG0RNJyTaJpXQdAeq5Z1SJotpbfYFFtAXB32hVoLug1dzl2tjG9sb1wq3Q
+aDExcbC5w6P65qOkNoyym9ne6QlQagCqVDyFn3vcqkRaTjvZmxauCeUxXgJoXkyW
+cm0lM1KMHdoTArmchw2Dz0yHHSyDAQIDAQABo1AwTjAdBgNVHQ4EFgQUc1YQIqjZ
+sHVwlea0AB4N+ilNI2gwHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gw
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAI/1KH60qnw9Xs2RGfi0/
+IKf5EynXt4bQX8EIyVKwSkYKe04zZxYfLIl/Q2HOPYoFmm3daj5ddr0ZS1i4p4fT
+UhstjsYWvXs3W/HhVmFUslakkn3PrswhP77fCk6eEJLxdfyJ1C7Uudq2m1isZbKi
+h+XF0mG1LxJaDMocSz4eAya7M5brwjy8DoOmA1TnLQFCVcpn+sCr7VC4wE/JqxyV
+hBCk/MuGqqM3B1j90bGFZ112ZOecyE0EDSr6IbiRBtmeNbEwOFjKXhNLYdxpBZ9D
+8A/368OckZkCrVLGuJNxK9UwCVTe8IhotHUqU9EqFDmxdV8oIdU/OzUwwNPA/Bd/
+9g==
+-----END CERTIFICATE-----
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem
new file mode 100644
index 000000000..d129e4666
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIBBzANBgkqhkiG9w0BAQsFADCBjDELMAkGA1UEBhMCQ1ox
+FjAUBgNVBAgMDVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoM
+BkNFU05FVDEMMAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJ
+KoZIhvcNAQkBFhNleGFtcGxlY2FAbG9jYWxob3N0MB4XDTE1MDczMDA3MjcxOFoX
+DTM1MDcyNTA3MjcxOFowgYUxCzAJBgNVBAYTAkNaMRYwFAYDVQQIDA1Tb3V0aCBN
+b3JhdmlhMQ8wDQYDVQQKDAZDRVNORVQxDDAKBgNVBAsMA1RNQzEXMBUGA1UEAwwO
+ZXhhbXBsZSBjbGllbnQxJjAkBgkqhkiG9w0BCQEWF2V4YW1wbGVjbGllbnRAbG9j
+YWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAueCQaNQWoNmF
+K6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68SfFNaY06zZl8QB9W02nr5kWeeMY0
+VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt6jAWZDzVfopwpJPAzRPxACDftIqF
+GagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4VDUHSNVbglc+u4UbEzNIFXMdEFsJ
+ZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuHQwAHdubuB07ObM2z01UhyEdDvEYG
+HwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UEFI1yTYw+xZ42HgFx3uGwApCImxhb
+j69GBYWFqwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUXGpLeLnh2cSDARAV
+A7KrBxGYpo8wHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gwDQYJKoZI
+hvcNAQELBQADggEBAJPV3RTXFRtNyOU4rjPpYeBAIAFp2aqGc4t2J1c7oPp/1n+l
+ZvjnwtlJpZHxMM783e2ryDQ6dkvXDf8kpwKlg3U3mkJ3xKkDdWrM4QwghXdCN519
+aa9qmu0zdFL+jUAaWlQ5tsceOrvbusCcbMqiFGk/QfpHqPv52SVWbYyUx7IX7DE+
+UjgsLHycfV/tlcx4ZE6soTzl9VdgSL/zmzG3rjsr58J80rXckLgBhvijgBlIAJvW
+fC7D0vaouvBInSFXymdPVoUDZ30cdGLf+hI/i/TfsEMOinLrXVdkSGNo6FXAHKSv
+XeB9oFKSzhQ7OPyRyqvEPycUSw/qD6FVr80oDDc=
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem
new file mode 100644
index 000000000..c85aa57d4
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEpAIBAAKCAQEAueCQaNQWoNmFK6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68
+SfFNaY06zZl8QB9W02nr5kWeeMY0VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt
+6jAWZDzVfopwpJPAzRPxACDftIqFGagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4
+VDUHSNVbglc+u4UbEzNIFXMdEFsJZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuH
+QwAHdubuB07ObM2z01UhyEdDvEYGHwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UE
+FI1yTYw+xZ42HgFx3uGwApCImxhbj69GBYWFqwIDAQABAoIBAQCZN9kR8DGu6V7y
+t0Ax68asL8O5B/OKaHWKQ9LqpVrXmikZJOxkbzoGldow/CIFoU+q+Zbwu9aDa65a
+0wiP7Hoa4Py3q5XNNUrOQDyU/OYC7cI0I83WS0lJ2zOJGYj8wKae5Z81IeQFKGHK
+4lsy1OGPAvPRGh7RjUUgRavA2MCwe07rWRuDb/OJFe4Oh56UMEjwMiNBtMNtncog
+j1vr/qgRJdf9tf0zlJmLvUJ9+HSFFV9I/97LJyFhb95gAfHkjdVroLVgT3Cho+4P
+WtZaKCIGD0OwfOG2nLV4leXvRUk62/LMlB8NI9+JF7Xm+HCKbaWHNWC7mvWSLV58
+Zl4AbUWRAoGBANyJ6SFHFRHSPDY026SsdMzXR0eUxBAK7G70oSBKKhY+O1j0ocLE
+jI2krHJBhHbLlnvJVyMUaCUOTS5m0uDw9hgSsAqeSL3hL38kxVZw+KNG9Ouno1Fl
+KnE/xXHlPQyeGs/P8nAMzHZxQtEsQdQayJEhK2XXHTsy7Q3MxDisfVJ1AoGBANfD
+34gB+OMx6pwj7zk3qWbYXSX8xjCZMR0ciko+h4xeMP2N8B0oyoqC+v1ABMAtJ3wG
+sGZd0hV9gwM7OUM3SEwkn6oeg1GemWLcn4rlSmTnZc4aeVwrEWlnSNFX3s4g9l4u
+k8Ugu4MVJYqH8HuDQ5Ggl6/QAwPzMSEdCW0O+jOfAoGAIBRbegC5+t6m7Yegz4Ja
+dxV1g98K6f58x+MDsQu4tYWV4mmrQgaPH2dtwizvlMwmdpkh+LNWNtWuumowkJHc
+akIFo3XExQIFg6wYnGtQb4e5xrGa2xMpKlIJaXjb+YLiCYqJDG2ALFZrTrvuU2kV
+9a5qfqTc1qigvNolTM0iaaUCgYApmrZWhnLUdEKV2wP813PNxfioI4afxlpHD8LG
+sCn48gymR6E+Lihn7vuwq5B+8fYEH1ISWxLwW+RQUjIneNhy/jjfV8TgjyFqg7or
+0Sy4KjpiNI6kLBXOakELRNNMkeSPopGR2E7v5rr3bGD9oAD+aqX1G7oJH/KgPPYd
+Vl7+ZwKBgQDcHyWYrimjyUgKaQD2GmoO9wdcJYQ59ke9K+OuGlp4ti5arsi7N1tP
+B4f09aeELM2ASIuk8Q/Mx0jQFnm8lzRFXdewgvdPoZW/7VufM9O7dGPOc41cm2Dh
+yrTcXx/VmUBb+/fnXVEgCv7gylp/wtdTGHQBQJHR81jFBz0lnLj+gg==
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml b/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml
new file mode 100644
index 000000000..9133d5860
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml
@@ -0,0 +1,33 @@
+version: '3'
+
+services:
+ netopeer2:
+ image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.6
+ container_name: netconf-pnp-pmsh
+ restart: always
+ ports:
+ - "830:830"
+ - "6513:6513"
+ volumes:
+ - ./:/config/modules/pnf-subscriptions
+
+ sftp:
+ container_name: sftpserver
+ image: atmoz/sftp
+ ports:
+ - "2222:22"
+ volumes:
+ - /host/upload:/home/admin
+ command: admin:admin:1001
+
+ opendaylight:
+ image: blueonap/opendaylight:v0.12.1-1
+ container_name: opendaylight
+ ports:
+ - "8101:8101"
+ - "8181:8181"
+ - "6666:6666"
+ environment:
+ - KARAF_FEATURES_BOOT=odl-restconf-all,odl-netconf-connector-all
+ volumes:
+ - ./app_config/tls:/config/tls
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/pnf-subscriptions.yang b/test/mocks/pmsh-pnf-sim/docker-compose/pnf-subscriptions.yang
new file mode 100644
index 000000000..6adce57cc
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/pnf-subscriptions.yang
@@ -0,0 +1,47 @@
+module pnf-subscriptions {
+ namespace "http://onap.org/pnf-subscriptions";
+ prefix subscriptions;
+
+ revision "2019-11-22" {
+ description
+ "initial version";
+ }
+ container subscriptions {
+ list configuration{
+ key "subscriptionName";
+ leaf subscriptionName {
+ type string;
+ }
+ leaf administrativeState {
+ type string;
+ }
+ leaf fileBasedGP {
+ type int16;
+ }
+ leaf fileLocation {
+ type string;
+ }
+ list measurementGroups {
+ key "id";
+ leaf id{
+ type int16;
+ }
+ container measurementGroup {
+ list measurementTypes {
+ key "measurementType";
+ leaf measurementType {
+ type string;
+ }
+ }
+ list managedObjectDNsBasic {
+ key "DN";
+ leaf DN {
+ type string;
+ }
+ }
+ }
+
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py b/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py
new file mode 100644
index 000000000..a187ff76b
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py
@@ -0,0 +1,113 @@
+import gzip
+import json
+import logging
+import os
+import shutil
+import time
+import xml.etree.ElementTree as ElementTree
+from random import randint
+
+import requests
+from requests.auth import HTTPBasicAuth
+
+from app_config import pnfconfig
+
+logger = logging.getLogger('dev')
+
+
+class PNF:
+ """ Handle update on xml and send file ready event to ves collector """
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def create_job_id(jobid, change_list):
+ """
+ create new measinfo tag and add new sub element in existing xml.
+ :param jobid: create unique job id within xml sub element.
+ :param change_list: list to create sub elements itmes.
+ """
+ try:
+ measurement_type = []
+ meas_object_dn = []
+ for items in range(len(change_list)):
+ if "/measurementType =" in change_list[items]:
+ measurement_type.append(((change_list[items].rsplit('/', 1))[1].rsplit('=', 1))[1].strip())
+ if "/DN =" in change_list[items]:
+ meas_object_dn.append(((change_list[items].rsplit('/', 1))[1].rsplit('=', 1))[1].strip())
+ script_dir = os.path.dirname(__file__)
+ pm_rel_file_path = "sftp/"
+ pm_location = os.path.join(script_dir, pm_rel_file_path)
+ ElementTree.register_namespace('', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
+ tree = ElementTree.parse(pm_location + "pm.xml")
+ root = tree.getroot()
+ attrib = {}
+ measinfo = ElementTree.SubElement(root[1], 'measInfo', attrib)
+ attrib = {'jobId': jobid}
+ ElementTree.SubElement(measinfo, 'job', attrib)
+ ElementTree.SubElement(measinfo, 'granPeriod', {'duration': 'PT900S', 'endTime': '2000-03-01T14:14:30+02:00'})
+ ElementTree.SubElement(measinfo, 'repPeriod', {'duration': 'PT1800S'})
+ for items in range(len(measurement_type)):
+ meastype = ElementTree.SubElement(measinfo, 'measType', {'p': (items + 1).__str__()})
+ meastype.text = measurement_type[items]
+ for items in range(len(meas_object_dn)):
+ measvalue = ElementTree.SubElement(measinfo, 'measValue', {'measObjLdn': meas_object_dn[items]})
+ for item in range(len(measurement_type)):
+ value = ElementTree.SubElement(measvalue, 'r', {'p': (item + 1).__str__()})
+ value.text = randint(100, 900).__str__()
+ tree.write(pm_location + "pm.xml", encoding="utf-8", xml_declaration=True)
+ except Exception as error:
+ logger.debug(error)
+
+ @staticmethod
+ def delete_job_id(jobid):
+ """
+ delete measinfo tag from existing xml pm file based on jobid.
+ :param jobid: element within measinfo tag.
+ """
+ try:
+ script_dir = os.path.dirname(__file__)
+ pm_rel_file_path = "sftp/"
+ pm_location = os.path.join(script_dir, pm_rel_file_path)
+ ElementTree.register_namespace(
+ '', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
+ tree = ElementTree.parse(pm_location + "pm.xml")
+ root = tree.getroot()
+ for measinfo in root[1].findall(
+ '{http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec}measInfo'):
+ xml_id = measinfo.find(
+ '{http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec}job').attrib
+ if xml_id["jobId"] == jobid:
+ root[1].remove(measinfo)
+ tree.write(pm_location + "pm.xml", encoding="utf-8", xml_declaration=True)
+ except Exception as error:
+ logger.debug(error)
+
+ @staticmethod
+ def pm_job():
+ """
+ create timestemp based gunzip xml file and send file ready event to ves collector.
+ """
+ try:
+ script_dir = os.path.dirname(__file__)
+ timestemp = time.time()
+ pm_location = os.path.join(script_dir, 'sftp/')
+ shutil.copy(pm_location + 'pm.xml', pm_location + f'A{timestemp}.xml')
+ with open(pm_location + f'A{timestemp}.xml', 'rb') as f_in:
+ with gzip.open(pm_location + f'A{timestemp}.xml.gz', 'wb') as f_out:
+ shutil.copyfileobj(f_in, f_out)
+ os.remove(pm_location + f'A{timestemp}.xml')
+ with open(os.path.join(script_dir, 'FileReadyEvent.json')) as json_file:
+ data = json_file.read().replace("pmfilename", str(timestemp))
+ eventdata = json.loads(data)
+ session = requests.Session()
+ url = f'https://{pnfconfig.VES_IP}:{pnfconfig.VES_PORT}/eventListener/v7'
+ logger.debug(f'Sending File Ready Event to VES Collector {url} -- data @{data}')
+ headers = {'content-type': 'application/json',
+ 'x-transactionid': '123456'}
+ response = session.post(url, json=eventdata, headers=headers,
+ auth=HTTPBasicAuth(pnfconfig.VES_USER, pnfconfig.VES_PASS),
+ verify=False)
+ response.raise_for_status()
+ except Exception as error:
+ logger.debug(f'Exception caught {error}', exc_info=True)
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt b/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt
new file mode 100644
index 000000000..c66dac877
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt
@@ -0,0 +1,3 @@
+schedule==0.6.0
+PyYAML==5.3.1
+requests==2.24.0
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py b/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py
new file mode 100644
index 000000000..2e916deb5
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+import logging.config
+import os
+import sys
+import time
+
+import schedule
+import yaml
+
+from app_config import pnfconfig
+from pnf import PNF
+
+log_file_path = os.path.join(os.path.dirname(__file__), 'app_config/logger_config.yaml')
+with open(log_file_path, 'r') as f:
+ log_cfg = yaml.safe_load(f.read())
+logging.config.dictConfig(log_cfg)
+logger = logging.getLogger('dev')
+
+if __name__ == "__main__":
+ try:
+ schedule.every(pnfconfig.ROP).seconds.do(PNF.pm_job)
+ logger.info('Starting PM scheduling job')
+ while True:
+ schedule.run_pending()
+ time.sleep(1)
+ except Exception as error:
+ logger.debug(error)
+ sys.exit(1)
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml b/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml
new file mode 100644
index 000000000..41344f1e1
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml
@@ -0,0 +1,41 @@
+<?xml version='1.0' encoding='utf-8'?>
+<measCollecFile xmlns="http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec">
+ <fileHeader dnPrefix="DC=a1.companyNN.com,SubNetwork=1,IRPAgent=1" fileFormatVersion="32.435 V7.0" vendorName="Company NN">
+ <fileSender elementType="RNC" localDn="SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1" />
+ <measCollec beginTime="2000-03-01T14:00:00+02:00" />
+ </fileHeader>
+ <measData>
+ <managedElement localDn="SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1" userLabel="RNC Telecomville" />
+ <measInfo>
+ <job jobId="sub0" />
+ <granPeriod duration="PT900S" endTime="2000-03-01T14:14:30+02:00" />
+ <repPeriod duration="PT1800S" />
+ <measType p="1">attTCHSeizures</measType>
+ <measType p="2">succTCHSeizures</measType>
+ <measType p="3">attImmediateAssignProcs</measType>
+ <measType p="4">succImmediateAssignProcs</measType>
+ <measValue measObjLdn="RncFunction=RF-1,UtranCell=Gbg-997">
+ <r p="1">234</r>
+ <r p="2">345</r>
+ <r p="3">567</r>
+ <r p="4">789</r>
+ </measValue>
+ <measValue measObjLdn="RncFunction=RF-1,UtranCell=Gbg-998">
+ <r p="1">890</r>
+ <r p="2">901</r>
+ <r p="3">123</r>
+ <r p="4">234</r>
+ </measValue>
+ <measValue measObjLdn="RncFunction=RF-1,UtranCell=Gbg-999">
+ <r p="1">456</r>
+ <r p="2">567</r>
+ <r p="3">678</r>
+ <r p="4">789</r>
+ <suspect>true</suspect>
+ </measValue>
+ </measInfo>
+ </measData>
+ <fileFooter>
+ <measCollec endTime="2000-03-01T14:15:00+02:00" />
+ </fileFooter>
+</measCollecFile> \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/startup.xml b/test/mocks/pmsh-pnf-sim/docker-compose/startup.xml
new file mode 100644
index 000000000..7bd895093
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/startup.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<subscriptions xmlns="http://onap.org/pnf-subscriptions">
+ <configuration>
+ <subscriptionName>sub0</subscriptionName>
+ <administrativeState>UNLOCKED</administrativeState>
+ <fileBasedGP>15</fileBasedGP>
+ <fileLocation>c://PM</fileLocation>
+ <measurementGroups>
+ <id>1</id>
+ <measurementGroup>
+ <measurementTypes>
+ <measurementType>EutranCellRelation.pmCounter1</measurementType>
+ </measurementTypes>
+ <measurementTypes>
+ <measurementType>EutranCellRelation.pmCounter2</measurementType>
+ </measurementTypes>
+ <managedObjectDNsBasic>
+ <DN>ManagedElement=1,ENodeBFunction=1,EUtranCell=CityCenter1</DN>
+ </managedObjectDNsBasic>
+ <managedObjectDNsBasic>
+ <DN>ManagedElement=1,ENodeBFunction=1,EUtranCell=CityCenter1, EUtranCellRelation=CityCenter2</DN>
+ </managedObjectDNsBasic>
+ </measurementGroup>
+ </measurementGroups>
+ </configuration>
+</subscriptions> \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py b/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py
new file mode 100755
index 000000000..cc2a24e46
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python3
+
+import logging.config
+import os
+import re
+
+import sysrepo as sr
+import yaml
+
+from pnf import PNF
+
+log_file_path = os.path.join(os.path.dirname(__file__), 'app_config/logger_config.yaml')
+with open(log_file_path, 'r') as f:
+ log_cfg = yaml.safe_load(f.read())
+logging.config.dictConfig(log_cfg)
+logger = logging.getLogger('dev')
+
+
+def module_change_cb(sess, module_name, event, private_ctx):
+ """ Handle event change based on yang operation. """
+ try:
+ change_path = f'/{module_name}:*'
+ iterate = sess.get_changes_iter(change_path)
+ change = sess.get_change_next(iterate)
+ changelist = []
+ operation = change.oper()
+ pnf = PNF()
+ if event == sr.SR_EV_APPLY:
+ logger.info('------------------> Start Handle Change <------------------')
+ if operation == sr.SR_OP_CREATED:
+ create_sub(changelist, iterate, pnf, sess)
+ elif operation == sr.SR_OP_DELETED:
+ delete_sub(change, changelist, pnf)
+ elif operation == sr.SR_OP_MODIFIED:
+ edit_sub(change, changelist, module_name, pnf, sess)
+ else:
+ logger.info('Unknown Operation')
+ logger.info('------------------> End Handle Change <------------------')
+ except Exception as error:
+ logger.info(error, exc_info=True)
+ return sr.SR_ERR_OK
+
+
+def edit_sub(change, changelist, module_name, pnf, sess):
+ changelist.append(change.new_val().to_string())
+ element = changelist[0]
+ jobid = get_job_id(changelist)
+ administrative_state = ((element.rsplit('/', 1)[1]).split('=', 1))[1].strip()
+ if administrative_state == 'LOCKED':
+ pnf.delete_job_id(jobid)
+ pnf.pm_job()
+ elif administrative_state == 'UNLOCKED':
+ select_xpath = '/' + module_name + ':*//*'
+ values = sess.get_items(select_xpath)
+ if values is not None:
+ for i in range(values.val_cnt()):
+ if jobid in values.val(i).to_string():
+ changelist.append(values.val(i).to_string())
+ pnf.create_job_id(jobid, changelist)
+ pnf.pm_job()
+ logger.info(f'Subscription Modified : {element}')
+
+
+def create_sub(changelist, iterate, pnf, sess):
+ while True:
+ change = sess.get_change_next(iterate)
+ if change is None:
+ break
+ changelist.append(change.new_val().to_string())
+ jobid = get_job_id(changelist)
+ pnf.create_job_id(jobid, changelist)
+ pnf.pm_job()
+ logger.info(f'Subscription Created : {changelist[0]}')
+
+
+def delete_sub(change, changelist, pnf):
+ changelist.append(change.old_val().to_string())
+ jobid = get_job_id(changelist)
+ pnf.delete_job_id(jobid)
+ pnf.pm_job()
+ logger.info(f'Subscription Deleted : {changelist[0]}')
+
+
+def get_job_id(changelist):
+ result = re.findall(r'\'(.*?)\'', changelist[0])
+ jobid = result[0]
+ return jobid
+
+
+def start():
+ """ main function to create connection based on module name. """
+ try:
+ module_name = 'pnf-subscriptions'
+ conn = sr.Connection(module_name)
+ sess = sr.Session(conn)
+ subscribe = sr.Subscribe(sess)
+ subscribe.module_change_subscribe(module_name, module_change_cb)
+ sr.global_loop()
+ logger.info('Application exit requested, exiting.')
+ except Exception as error:
+ logger.error(error, exc_info=True)
+
+
+if __name__ == '__main__':
+ start()
diff --git a/test/mocks/pnf-onboarding/README.md b/test/mocks/pnf-onboarding/README.md
index b14b34d95..b75a77631 100644
--- a/test/mocks/pnf-onboarding/README.md
+++ b/test/mocks/pnf-onboarding/README.md
@@ -1,20 +1,22 @@
-PNF Package for Integration Test
-================================
+# PNF Package for Integration Test
**NOTE: Requires openssl to be preinstalled.**
This module builds 3 PNF packages based on the files in `/src/main/resources/csarContent/`
1. unsigned package:
- `sample-pnf-1.0.1-SNAPSHOT.csar`
+ `sample-pnf-1.0.1-SNAPSHOT.csar`
2. signed packages:
- A) `sample-signed-pnf-1.0.1-SNAPSHOT.zip`
- B) `sample-signed-pnf-cms-includes-cert-1.0.1-SNAPSHOT.zip`
- The signed packages are based on ETSI SOL004 Security Option 2. They contain csar, cert and cms files. In package B cms includes cert.
+ A) `sample-signed-pnf-1.0.1-SNAPSHOT.zip`
+ B) `sample-signed-pnf-cms-includes-cert-1.0.1-SNAPSHOT.zip`
+ The signed packages are based on ETSI SOL004 Security Option 2. They contain csar, cert and cms files. In package B cms includes cert.
The packages are generated by running the following command in the same directory as this readme file i.e. pnf-onboarding directory:
-> `$ mvn clean install`
+
+> ```
+> `$ mvn clean install`
+> ```
The packages will be stored in the maven generated `target` directory.
@@ -22,5 +24,7 @@ To be able to use the signed packages in SDC the `src/main/resources/securityCon
If SDC is running in containers locally then the following commands could be used to copy the root.cert to the default location in SDC Onboarding Container. It is assumed that the commands are executed from inside pnf-onboarding directory.
-> `$ docker exec -it <sdc-onboard-backend-container-id> mkdir -p /var/lib/jetty/cert`
-> `$ docker cp src/main/resources/securityContent/root.cert <sdc-onboard-backend-container-id>:/var/lib/jetty/cert` \ No newline at end of file
+> ```
+> `$ docker exec -it <sdc-onboard-backend-container-id> mkdir -p /var/lib/jetty/cert`
+> `$ docker cp src/main/resources/securityContent/root.cert <sdc-onboard-backend-container-id>:/var/lib/jetty/cert`
+> ```
diff --git a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_pnfd_2_5_1_types.yaml b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_pnfd_2_5_1_types.yaml
index f05d273a1..2eae43f8d 100644
--- a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_pnfd_2_5_1_types.yaml
+++ b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_pnfd_2_5_1_types.yaml
@@ -4,19 +4,19 @@ description: ETSI NFV SOL 001 pnfd types definitions version 2.5.1
imports:
- etsi_nfv_sol001_vnfd_2_5_1_types.yaml
-data_types:
+data_types:
tosca.datatypes.nfv.L2AddressData:
derived_from: tosca.datatypes.Root
- description: Describes the information on the MAC addresses to be assigned to a connection point.
+ description: Describes the information on the MAC addresses to be assigned to a connection point.
properties:
mac_address_assignment:
type: boolean
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
- required: true
-
+ required: true
+
tosca.datatypes.nfv.L3AddressData:
derived_from: tosca.datatypes.Root
- description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
+ description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
properties:
ip_address_assignment:
type: boolean
@@ -24,7 +24,7 @@ data_types:
required: true
floating_ip_activated:
type: boolean
- description: Specifies if the floating IP scheme is activated on the Connection Point or not
+ description: Specifies if the floating IP scheme is activated on the Connection Point or not
required: true
ip_address_type:
type: string
@@ -34,14 +34,14 @@ data_types:
- valid_values: [ ipv4, ipv6 ]
number_of_ip_address:
type: integer
- description: Minimum number of IP addresses to be assigned
+ description: Minimum number of IP addresses to be assigned
required: false
constraints:
- greater_than: 0
tosca.datatypes.nfv.AddressData:
derived_from: tosca.datatypes.Root
- description: Describes information about the addressing scheme and parameters applicable to a CP
+ description: Describes information about the addressing scheme and parameters applicable to a CP
properties:
address_type:
type: string
@@ -55,9 +55,9 @@ data_types:
required: false
l3_address_data:
type: tosca.datatypes.nfv.L3AddressData
- description: Provides the information on the IP addresses to be assigned to a connection point
+ description: Provides the information on the IP addresses to be assigned to a connection point
required: false
-
+
tosca.datatypes.nfv.CpProtocolData:
derived_from: tosca.datatypes.Root
description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information
@@ -65,18 +65,18 @@ data_types:
associated_layer_protocol:
type: string
required: true
- description: One of the values of the property layer_protocols of the CP
+ description: One of the values of the property layer_protocols of the CP
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
address_data:
type: list
description: Provides information on the addresses to be assigned to the CP
- entry_schema:
+ entry_schema:
type: tosca.datatypes.nfv.AddressData
-
+
tosca.datatypes.nfv.LocationInfo:
derived_from: tosca.datatypes.Root
- description: Represents geographical information on the location where a PNF is deployed.
+ description: Represents geographical information on the location where a PNF is deployed.
properties:
country_code:
type: string # two-letter ISO 3166 country code
@@ -91,7 +91,7 @@ data_types:
tosca.datatypes.nfv.CivicAddressElement:
derived_from: tosca.datatypes.Root
- description: Represents an element of a civic location as specified in IETF RFC 4776 [11].
+ description: Represents an element of a civic location as specified in IETF RFC 4776 [11].
properties:
ca_type:
type: string # RFC4776
@@ -106,21 +106,21 @@ capability_types:
tosca.capabilities.nfv.VirtualLinkable:
derived_from: tosca.capabilities.Node
description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type
-
+
relationship_types:
tosca.relationships.nfv.VirtualLinksTo:
derived_from: tosca.relationships.DependsOn
- description: Represents an association relationship between the VNF or PNF or Sap of a Nested NS and NsVirtualLink node types
- valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
-
+ description: Represents an association relationship between the VNF or PNF or Sap of a Nested NS and NsVirtualLink node types
+ valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
+
node_types:
tosca.nodes.nfv.Cp:
derived_from: tosca.nodes.Root
- description: Provides information regarding the purpose of the connection point
+ description: Provides information regarding the purpose of the connection point
properties:
layer_protocols:
type: list
- description: Identifies which protocol the connection point uses for connectivity purposes
+ description: Identifies which protocol the connection point uses for connectivity purposes
required: true
entry_schema:
type: string
@@ -128,17 +128,17 @@ node_types:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
type: string
- description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
+ description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
required: false
constraints:
- valid_values: [ root, leaf ]
description:
type: string
- description: Provides human-readable information on the purpose of the connection point
+ description: Provides human-readable information on the purpose of the connection point
required: false
protocol:
type: list
- description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
+ description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
required: true
entry_schema:
type: tosca.datatypes.nfv.CpProtocolData
@@ -149,7 +149,7 @@ node_types:
tosca.nodes.nfv.PNF:
derived_from: tosca.nodes.Root
- properties:
+ properties:
descriptor_id: # instead of pnfd_id
type: string # GUID
required: true
@@ -168,7 +168,7 @@ node_types:
name:
type: string
required: true
- geographical_location_info:
+ geographical_location_info:
type: tosca.datatypes.nfv.LocationInfo
required: false
requirements:
@@ -184,9 +184,9 @@ node_types:
requirements:
- external_virtual_link:
capability: tosca.capabilities.nfv.VirtualLinkable
- relationship: tosca.relationships.nfv.VirtualLinksTo
-
-policy_types:
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+
+policy_types:
tosca.policies.nfv.SecurityGroupRule:
derived_from: tosca.policies.Root
description: The SecurityGroupRule type is a policy type specified the matching criteria for the ingress and/or egress traffic to/from visited connection points as defined in ETSI GS NFV-IFA 011 [1].
@@ -232,4 +232,4 @@ policy_types:
- greater_or_equal: 0
- less_or_equal: 65535
default: 65535
- targets: [ tosca.nodes.nfv.PnfExtCp ]
+ targets: [ tosca.nodes.nfv.PnfExtCp ]
diff --git a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_vnfd_2_5_1_types.yaml b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_vnfd_2_5_1_types.yaml
index 7d8391219..63e8b2ec6 100644
--- a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_vnfd_2_5_1_types.yaml
+++ b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/etsi_nfv_sol001_vnfd_2_5_1_types.yaml
@@ -4,16 +4,16 @@ description: ETSI NFV SOL 001 vnfd types definitions version 2.5.1
data_types:
tosca.datatypes.nfv.L2AddressData:
derived_from: tosca.datatypes.Root
- description: Describes the information on the MAC addresses to be assigned to a connection point.
+ description: Describes the information on the MAC addresses to be assigned to a connection point.
properties:
mac_address_assignment:
type: boolean
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
- required: true
-
+ required: true
+
tosca.datatypes.nfv.L3AddressData:
derived_from: tosca.datatypes.Root
- description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
+ description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
properties:
ip_address_assignment:
type: boolean
@@ -21,7 +21,7 @@ data_types:
required: true
floating_ip_activated:
type: boolean
- description: Specifies if the floating IP scheme is activated on the Connection Point or not
+ description: Specifies if the floating IP scheme is activated on the Connection Point or not
required: true
ip_address_type:
type: string
@@ -31,14 +31,14 @@ data_types:
- valid_values: [ ipv4, ipv6 ]
number_of_ip_address:
type: integer
- description: Minimum number of IP addresses to be assigned
+ description: Minimum number of IP addresses to be assigned
required: false
constraints:
- greater_than: 0
tosca.datatypes.nfv.AddressData:
derived_from: tosca.datatypes.Root
- description: Describes information about the addressing scheme and parameters applicable to a CP
+ description: Describes information about the addressing scheme and parameters applicable to a CP
properties:
address_type:
type: string
@@ -52,16 +52,16 @@ data_types:
required: false
l3_address_data:
type: tosca.datatypes.nfv.L3AddressData
- description: Provides the information on the IP addresses to be assigned to a connection point
+ description: Provides the information on the IP addresses to be assigned to a connection point
required: false
tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements:
derived_from: tosca.datatypes.Root
- description: Describes requirements on a virtual network interface
+ description: Describes requirements on a virtual network interface
properties:
name:
type: string
- description: Provides a human readable name for the requirement.
+ description: Provides a human readable name for the requirement.
required: false
description:
type: string
@@ -76,7 +76,7 @@ data_types:
description: The network interface requirements. A map of strings that contain a set of key-value pairs that describes the hardware platform specific network interface deployment requirements.
required: true
entry_schema:
- type: string
+ type: string
nic_io_requirements:
type: tosca.datatypes.nfv.LogicalNodeData
description: references (couples) the CP with any logical node I/O requirements (for network devices) that may have been created. Linking these attributes is necessary so that so that I/O requirements that need to be articulated at the logical node level can be associated with the network interface requirements associated with the CP.
@@ -84,7 +84,7 @@ data_types:
tosca.datatypes.nfv.ConnectivityType:
derived_from: tosca.datatypes.Root
- description: describes additional connectivity information of a virtualLink
+ description: describes additional connectivity information of a virtualLink
properties:
layer_protocols:
type: list
@@ -133,7 +133,7 @@ data_types:
description: supports the specification of requirements related to virtual memory of a virtual compute resource
properties:
virtual_mem_size:
- type: scalar-unit.size
+ type: scalar-unit.size
description: Amount of virtual memory.
required: true
virtual_mem_oversubscription_policy:
@@ -145,7 +145,7 @@ data_types:
description: The hardware platform specific VDU memory requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific VDU memory requirements.
required: false
entry_schema:
- type: string
+ type: string
numa_enabled:
type: boolean
description: It specifies the memory allocation to be cognisant of the relevant process/core allocation.
@@ -179,7 +179,7 @@ data_types:
description: The hardware platform specific VDU CPU requirements. A map of strings that contains a set of key-value pairs describing VDU CPU specific hardware platform requirements.
required: false
entry_schema:
- type: string
+ type: string
virtual_cpu_pinning:
type: tosca.datatypes.nfv.VirtualCpuPinning
description: The virtual CPU pinning configuration for the virtualised compute resource.
@@ -204,12 +204,12 @@ data_types:
tosca.datatypes.nfv.VnfcConfigurableProperties:
derived_from: tosca.datatypes.Root
- description: Defines the configurable properties of a VNFC
+ description: Defines the configurable properties of a VNFC
# properties:
# additional_vnfc_configurable_properties:
- # type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties
+ # type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties
# description: Describes additional configuration for VNFC that
- # can be modified using the ModifyVnfInfo operation
+ # can be modified using the ModifyVnfInfo operation
# required: false
# derived types are expected to introduce
# additional_vnfc_configurable_properties with its type derived from
@@ -225,13 +225,13 @@ data_types:
properties:
min_number_of_instances:
type: integer
- description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
+ description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
required: true
constraints:
- greater_or_equal: 0
max_number_of_instances:
type: integer
- description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
+ description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
required: true
constraints:
- greater_or_equal: 0
@@ -264,19 +264,19 @@ data_types:
description: describes one protocol layer and associated protocol data for a given virtual link used in a specific VNF deployment flavour
properties:
associated_layer_protocol:
- type: string
- description: Identifies one of the protocols a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire) as specified by the connectivity_type property.
- required: true
- constraints:
- - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
+ type: string
+ description: Identifies one of the protocols a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire) as specified by the connectivity_type property.
+ required: true
+ constraints:
+ - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
l2_protocol_data:
- type: tosca.datatypes.nfv.L2ProtocolData
- description: Specifies the L2 protocol data for a virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L2 protocol and shall be absent otherwise.
- required: false
+ type: tosca.datatypes.nfv.L2ProtocolData
+ description: Specifies the L2 protocol data for a virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L2 protocol and shall be absent otherwise.
+ required: false
l3_protocol_data:
- type: tosca.datatypes.nfv.L3ProtocolData
- description: Specifies the L3 protocol data for this virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L3 protocol and shall be absent otherwise.
- required: false
+ type: tosca.datatypes.nfv.L3ProtocolData
+ description: Specifies the L3 protocol data for this virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L3 protocol and shall be absent otherwise.
+ required: false
tosca.datatypes.nfv.L2ProtocolData:
derived_from: tosca.datatypes.Root
@@ -302,7 +302,7 @@ data_types:
description: Specifies the maximum transmission unit (MTU) value for this L2 protocol.
required: false
constraints:
- - greater_than: 0
+ - greater_than: 0
tosca.datatypes.nfv.L3ProtocolData:
derived_from: tosca.datatypes.Root
@@ -321,7 +321,7 @@ data_types:
cidr:
type: string
description: Specifies the CIDR (Classless Inter-Domain Routing) of this L3 protocol. The value may be overridden at run-time.
- required: true
+ required: true
ip_allocation_pools:
type: list
description: Specifies the allocation pools with start and end IP addresses for this L3 protocol. The value may be overridden at run-time.
@@ -358,11 +358,11 @@ data_types:
tosca.datatypes.nfv.InstantiationLevel:
derived_from: tosca.datatypes.Root
- description: Describes the scale level for each aspect that corresponds to a given level of resources to be instantiated within a deployment flavour in term of the number VNFC instances
+ description: Describes the scale level for each aspect that corresponds to a given level of resources to be instantiated within a deployment flavour in term of the number VNFC instances
properties:
description:
type: string
- description: Human readable description of the level
+ description: Human readable description of the level
required: true
scale_info:
type: map # key: aspectId
@@ -373,50 +373,50 @@ data_types:
tosca.datatypes.nfv.VduLevel:
derived_from: tosca.datatypes.Root
- description: Indicates for a given Vdu.Compute in a given level the number of instances to deploy
+ description: Indicates for a given Vdu.Compute in a given level the number of instances to deploy
properties:
number_of_instances:
type: integer
- description: Number of instances of VNFC based on this VDU to deploy for this level.
+ description: Number of instances of VNFC based on this VDU to deploy for this level.
required: true
constraints:
- greater_or_equal: 0
tosca.datatypes.nfv.VnfLcmOperationsConfiguration:
derived_from: tosca.datatypes.Root
- description: Represents information to configure lifecycle management operations
+ description: Represents information to configure lifecycle management operations
properties:
instantiate:
type: tosca.datatypes.nfv.VnfInstantiateOperationConfiguration
- description: Configuration parameters for the InstantiateVnf operation
+ description: Configuration parameters for the InstantiateVnf operation
required: false
scale:
type: tosca.datatypes.nfv.VnfScaleOperationConfiguration
- description: Configuration parameters for the ScaleVnf operation
+ description: Configuration parameters for the ScaleVnf operation
required: false
scale_to_level:
type: tosca.datatypes.nfv.VnfScaleToLevelOperationConfiguration
- description: Configuration parameters for the ScaleVnfToLevel operation
+ description: Configuration parameters for the ScaleVnfToLevel operation
required: false
change_flavour:
type: tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration
- description: Configuration parameters for the changeVnfFlavourOpConfig operation
+ description: Configuration parameters for the changeVnfFlavourOpConfig operation
required: false
heal:
type: tosca.datatypes.nfv.VnfHealOperationConfiguration
- description: Configuration parameters for the HealVnf operation
+ description: Configuration parameters for the HealVnf operation
required: false
terminate:
type: tosca.datatypes.nfv.VnfTerminateOperationConfiguration
- description: Configuration parameters for the TerminateVnf operation
+ description: Configuration parameters for the TerminateVnf operation
required: false
operate:
type: tosca.datatypes.nfv.VnfOperateOperationConfiguration
- description: Configuration parameters for the OperateVnf operation
+ description: Configuration parameters for the OperateVnf operation
required: false
change_ext_connectivity:
- type: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration
- description: Configuration parameters for the changeExtVnfConnectivityOpConfig operation
+ type: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration
+ description: Configuration parameters for the changeExtVnfConnectivityOpConfig operation
required: false
tosca.datatypes.nfv.VnfInstantiateOperationConfiguration:
@@ -425,7 +425,7 @@ data_types:
tosca.datatypes.nfv.VnfScaleOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: Represents information that affect the invocation of the ScaleVnf operation
+ description: Represents information that affect the invocation of the ScaleVnf operation
properties:
scaling_by_more_than_one_step_supported:
type: boolean
@@ -444,22 +444,22 @@ data_types:
tosca.datatypes.nfv.VnfHealOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: represents information that affect the invocation of the HealVnf operation
+ description: represents information that affect the invocation of the HealVnf operation
properties:
causes:
type: list
- description: Supported "cause" parameter values
+ description: Supported "cause" parameter values
required: false
entry_schema:
type: string
tosca.datatypes.nfv.VnfTerminateOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: represents information that affect the invocation of the TerminateVnf
+ description: represents information that affect the invocation of the TerminateVnf
properties:
min_graceful_termination_timeout:
type: scalar-unit.time
- description: Minimum timeout value for graceful termination of a VNF instance
+ description: Minimum timeout value for graceful termination of a VNF instance
required: true
max_recommended_graceful_termination_timeout:
type: scalar-unit.time
@@ -468,11 +468,11 @@ data_types:
tosca.datatypes.nfv.VnfOperateOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: represents information that affect the invocation of the OperateVnf operation
+ description: represents information that affect the invocation of the OperateVnf operation
properties:
min_graceful_stop_timeout:
type: scalar-unit.time
- description: Minimum timeout value for graceful stop of a VNF instance
+ description: Minimum timeout value for graceful stop of a VNF instance
required: true
max_recommended_graceful_stop_timeout:
type: scalar-unit.time
@@ -481,11 +481,11 @@ data_types:
tosca.datatypes.nfv.ScaleInfo:
derived_from: tosca.datatypes.Root
- description: Indicates for a given scaleAspect the corresponding scaleLevel
+ description: Indicates for a given scaleAspect the corresponding scaleLevel
properties:
scale_level:
type: integer
- description: The scale level for a particular aspect
+ description: The scale level for a particular aspect
required: true
constraints:
- greater_or_equal: 0
@@ -512,7 +512,7 @@ data_types:
tosca.datatypes.nfv.LinkBitrateRequirements:
derived_from: tosca.datatypes.Root
- description: describes the requirements in terms of bitrate for a virtual link
+ description: describes the requirements in terms of bitrate for a virtual link
properties:
root:
type: integer # in bits per second
@@ -529,26 +529,26 @@ data_types:
tosca.datatypes.nfv.Qos:
derived_from: tosca.datatypes.Root
- description: describes QoS data for a given VL used in a VNF deployment flavour
+ description: describes QoS data for a given VL used in a VNF deployment flavour
properties:
latency:
type: scalar-unit.time #Number
- description: Specifies the maximum latency
+ description: Specifies the maximum latency
required: true
- constraints:
+ constraints:
- greater_than: 0 s
packet_delay_variation:
type: scalar-unit.time #Number
- description: Specifies the maximum jitter
+ description: Specifies the maximum jitter
required: true
- constraints:
+ constraints:
- greater_or_equal: 0 s
packet_loss_ratio:
type: float
- description: Specifies the maximum packet loss ratio
+ description: Specifies the maximum packet loss ratio
required: false
constraints:
- - in_range: [ 0.0, 1.0 ]
+ - in_range: [ 0.0, 1.0 ]
tosca.datatypes.nfv.VnfConfigurableProperties:
derived_from: tosca.datatypes.Root
@@ -556,15 +556,15 @@ data_types:
properties:
is_autoscale_enabled:
type: boolean
- description: It permits to enable (TRUE)/disable (FALSE) the auto-scaling functionality. If the properties is not present for configuring, then VNF property is not supported
+ description: It permits to enable (TRUE)/disable (FALSE) the auto-scaling functionality. If the properties is not present for configuring, then VNF property is not supported
required: false
is_autoheal_enabled:
type: boolean
- description: It permits to enable (TRUE)/disable (FALSE) the auto-healing functionality. If the properties is not present for configuring, then VNF property is not supported
+ description: It permits to enable (TRUE)/disable (FALSE) the auto-healing functionality. If the properties is not present for configuring, then VNF property is not supported
required: false
# additional_configurable_properties:
# description: It provides VNF specific configurable properties that
- # can be modified using the ModifyVnfInfo operation
+ # can be modified using the ModifyVnfInfo operation
# required: false
# type: tosca.datatypes.nfv.VnfAdditionalConfigurableProperties
# derived types are expected to introduce
@@ -572,30 +572,30 @@ data_types:
# tosca.datatypes.nfv.VnfAdditionalConfigurableProperties
tosca.datatypes.nfv.VnfAdditionalConfigurableProperties:
- derived_from: tosca.datatypes.Root
+ derived_from: tosca.datatypes.Root
description: is an empty base type for deriving data types for describing additional configurable properties for a given VNF
tosca.datatypes.nfv.VnfInfoModifiableAttributes:
derived_from: tosca.datatypes.Root
- description: Describes VNF-specific extension and metadata for a given VNF
+ description: Describes VNF-specific extension and metadata for a given VNF
#properties:
#extensions:
#type: tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions
- #description: "Extension" properties of VnfInfo that are writeable
+ #description: "Extension" properties of VnfInfo that are writeable
#required: false
# derived types are expected to introduce
# extensions with its type derived from
# tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions
#metadata:
#type: tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata
- #description: "Metadata" properties of VnfInfo that are writeable
+ #description: "Metadata" properties of VnfInfo that are writeable
#required: false
# derived types are expected to introduce
# metadata with its type derived from
# tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata
tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions:
- derived_from: tosca.datatypes.Root
+ derived_from: tosca.datatypes.Root
description: is an empty base type for deriving data types for describing VNF-specific extension
tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata:
@@ -609,13 +609,13 @@ data_types:
associated_layer_protocol:
type: string
required: true
- description: One of the values of the property layer_protocols of the CP
+ description: One of the values of the property layer_protocols of the CP
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
address_data:
type: list
description: Provides information on the addresses to be assigned to the CP
- entry_schema:
+ entry_schema:
type: tosca.datatypes.nfv.AddressData
required: false
@@ -632,51 +632,51 @@ data_types:
tosca.datatypes.nfv.SwImageData:
derived_from: tosca.datatypes.Root
- description: describes information related to a software image artifact
+ description: describes information related to a software image artifact
properties: # in SOL001 v0.8.0: "properties or metadata:"
name:
type: string
- description: Name of this software image
+ description: Name of this software image
required: true
version:
type: string
- description: Version of this software image
+ description: Version of this software image
required: true
checksum:
type: string
- description: Checksum of the software image file
+ description: Checksum of the software image file
required: true
container_format:
type: string
- description: The container format describes the container file format in which software image is provided
+ description: The container format describes the container file format in which software image is provided
required: true
constraints:
- valid_values: [ aki, ami, ari, bare, docker, ova, ovf ]
disk_format:
type: string
- description: The disk format of a software image is the format of the underlying disk image
+ description: The disk format of a software image is the format of the underlying disk image
required: true
constraints:
- - valid_values: [ aki, ami, ari, iso, qcow2, raw, vdi, vhd, vhdx, vmdk ]
+ - valid_values: [ aki, ami, ari, iso, qcow2, raw, vdi, vhd, vhdx, vmdk ]
min_disk:
type: scalar-unit.size # Number
- description: The minimal disk size requirement for this software image
+ description: The minimal disk size requirement for this software image
required: true
min_ram:
type: scalar-unit.size # Number
- description: The minimal RAM requirement for this software image
+ description: The minimal RAM requirement for this software image
required: false
size:
type: scalar-unit.size # Number
- description: The size of this software image
+ description: The size of this software image
required: true
operating_system:
type: string
- description: Identifies the operating system used in the software image
+ description: Identifies the operating system used in the software image
required: false
supported_virtualisation_environments:
type: list
- description: Identifies the virtualisation environments (e.g. hypervisor) compatible with this software image
+ description: Identifies the virtualisation environments (e.g. hypervisor) compatible with this software image
required: false
entry_schema:
type: string
@@ -692,60 +692,60 @@ data_types:
vdu_storage_requirements:
type: map
description: The hardware platform specific storage requirements. A map of strings that contains a set of key-value pairs that represents the hardware platform specific storage deployment requirements.
- required: false
+ required: false
entry_schema:
type: string
rdma_enabled:
type: boolean
- description: Indicates if the storage support RDMA
+ description: Indicates if the storage support RDMA
required: false
default: false
tosca.datatypes.nfv.VirtualObjectStorageData:
- derived_from: tosca.datatypes.Root
- description: VirtualObjectStorageData describes object storage requirements associated with compute resources in a particular VDU
- properties:
- max_size_of_storage:
- type: scalar-unit.size
- description: Maximum size of virtualized storage resource
- required: false
+ derived_from: tosca.datatypes.Root
+ description: VirtualObjectStorageData describes object storage requirements associated with compute resources in a particular VDU
+ properties:
+ max_size_of_storage:
+ type: scalar-unit.size
+ description: Maximum size of virtualized storage resource
+ required: false
tosca.datatypes.nfv.VirtualFileStorageData:
- derived_from: tosca.datatypes.Root
- description: VirtualFileStorageData describes file storage requirements associated with compute resources in a particular VDU
- properties:
- size_of_storage:
- type: scalar-unit.size
- description: Size of virtualized storage resource
- required: true
- file_system_protocol:
- type: string
- description: The shared file system protocol (e.g. NFS, CIFS)
- required: true
+ derived_from: tosca.datatypes.Root
+ description: VirtualFileStorageData describes file storage requirements associated with compute resources in a particular VDU
+ properties:
+ size_of_storage:
+ type: scalar-unit.size
+ description: Size of virtualized storage resource
+ required: true
+ file_system_protocol:
+ type: string
+ description: The shared file system protocol (e.g. NFS, CIFS)
+ required: true
tosca.datatypes.nfv.VirtualLinkBitrateLevel:
derived_from: tosca.datatypes.Root
- description: Describes bitrate requirements applicable to the virtual link instantiated from a particicular VnfVirtualLink
+ description: Describes bitrate requirements applicable to the virtual link instantiated from a particicular VnfVirtualLink
properties:
bitrate_requirements:
type: tosca.datatypes.nfv.LinkBitrateRequirements
- description: Virtual link bitrate requirements for an instantiation level or bitrate delta for a scaling step
+ description: Virtual link bitrate requirements for an instantiation level or bitrate delta for a scaling step
required: true
tosca.datatypes.nfv.VnfOperationAdditionalParameters:
derived_from: tosca.datatypes.Root
- description: Is an empty base type for deriving data type for describing VNF-specific parameters to be passed when invoking lifecycle management operations
+ description: Is an empty base type for deriving data type for describing VNF-specific parameters to be passed when invoking lifecycle management operations
#properties:
tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: represents information that affect the invocation of the ChangeVnfFlavour operation
+ description: represents information that affect the invocation of the ChangeVnfFlavour operation
#properties:
tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration:
derived_from: tosca.datatypes.Root
- description: represents information that affect the invocation of the ChangeExtVnfConnectivity operation
- #properties:
+ description: represents information that affect the invocation of the ChangeExtVnfConnectivity operation
+ #properties:
tosca.datatypes.nfv.VnfMonitoringParameter:
derived_from: tosca.datatypes.Root
@@ -802,7 +802,7 @@ data_types:
description: Identifies a performance metric derived from those defined in ETSI GS NFV-IFA 027.The packetOutgoingVirtualLink and packetIncomingVirtualLink metrics shall be obtained by aggregation the PacketOutgoing and PacketIncoming measurements defined in clause 7.1 of GS NFV-IFA 027 of all virtual link ports attached to the virtual link to which the metrics apply.
required: true
constraints:
- - valid_values: [ packet_outgoing_virtual_link, packet_incoming_virtual_link ]
+ - valid_values: [ packet_outgoing_virtual_link, packet_incoming_virtual_link ]
collection_period:
type: scalar-unit.time
description: Describes the recommended periodicity at which to collect the performance information.
@@ -833,7 +833,7 @@ data_types:
type: string # shall comply with IETF RFC3986
description: scheme component of a URI.
required: true
- authority:
+ authority:
type: tosca.datatypes.nfv.UriAuthority
description: Authority component of a URI
required: false
@@ -891,11 +891,11 @@ data_types:
artifact_types:
tosca.artifacts.nfv.SwImage:
derived_from: tosca.artifacts.Deployment.Image
- description: describes the software image which is directly loaded on the virtualisation container realizing of the VDU or is to be loaded on a virtual storage resource.
+ description: describes the software image which is directly loaded on the virtualisation container realizing of the VDU or is to be loaded on a virtual storage resource.
tosca.artifacts.Implementation.nfv.Mistral:
derived_from: tosca.artifacts.Implementation
- description: artifacts for Mistral workflows
+ description: artifacts for Mistral workflows
mime_type: application/x-yaml
file_ext: [ yaml ]
@@ -910,61 +910,61 @@ capability_types:
tosca.capabilities.nfv.VirtualCompute:
derived_from: tosca.capabilities.Node
- description: Describes the capabilities related to virtual compute resources
+ description: Describes the capabilities related to virtual compute resources
properties:
logical_node:
type: map
- description: Describes the Logical Node requirements
+ description: Describes the Logical Node requirements
required: false
entry_schema:
- type: tosca.datatypes.nfv.LogicalNodeData
+ type: tosca.datatypes.nfv.LogicalNodeData
requested_additional_capabilities:
type: map
- description: Describes additional capability for a particular VDU
+ description: Describes additional capability for a particular VDU
required: false
entry_schema:
- type: tosca.datatypes.nfv.RequestedAdditionalCapability
+ type: tosca.datatypes.nfv.RequestedAdditionalCapability
compute_requirements:
type: map
- required: false
+ required: false
entry_schema:
- type: string
+ type: string
virtual_memory:
type: tosca.datatypes.nfv.VirtualMemory
- description: Describes virtual memory of the virtualized compute
+ description: Describes virtual memory of the virtualized compute
required: true
virtual_cpu:
type: tosca.datatypes.nfv.VirtualCpu
- description: Describes virtual CPU(s) of the virtualized compute
+ description: Describes virtual CPU(s) of the virtualized compute
required: true
virtual_local_storage:
type: list
- description: A list of virtual system disks created and destroyed as part of the VM lifecycle
+ description: A list of virtual system disks created and destroyed as part of the VM lifecycle
required: false
entry_schema:
type: tosca.datatypes.nfv.VirtualBlockStorageData
- description: virtual system disk definition
+ description: virtual system disk definition
tosca.capabilities.nfv.VirtualStorage:
derived_from: tosca.capabilities.Root
description: Describes the attachment capabilities related to Vdu.Storage
-
+
relationship_types:
tosca.relationships.nfv.VirtualBindsTo:
derived_from: tosca.relationships.DependsOn
- description: Represents an association relationship between Vdu.Compute and VduCp node types
+ description: Represents an association relationship between Vdu.Compute and VduCp node types
valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ]
tosca.relationships.nfv.VirtualLinksTo:
derived_from: tosca.relationships.DependsOn
- description: Represents an association relationship between the VduCp and VnfVirtualLink node types
+ description: Represents an association relationship between the VduCp and VnfVirtualLink node types
valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
tosca.relationships.nfv.AttachesTo:
derived_from: tosca.relationships.Root
description: Represents an association relationship between the Vdu.Compute and one of the node types, Vdu.VirtualBlockStorage, Vdu.VirtualObjectStorage or Vdu.VirtualFileStorage
valid_target_types: [ tosca.capabilities.nfv.VirtualStorage ]
-
+
interface_types:
tosca.interfaces.nfv.Vnflcm:
derived_from: tosca.interfaces.Root
@@ -976,7 +976,7 @@ interface_types:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
# derived types are expected to introduce additional_parameters with
- # its type derived from
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
instantiate_start:
description: Invoked before instantiate
@@ -1007,8 +1007,8 @@ interface_types:
# additional_parameters:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
- # derived types are expected to introduce additional_parameters with
- # its type derived from
+ # derived types are expected to introduce additional_parameters with
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
change_flavour_start:
description: Invoked before change_flavour
@@ -1020,8 +1020,8 @@ interface_types:
# additional_parameters:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
- # derived types are expected to introduce additional_parameters with
- # its type derived from
+ # derived types are expected to introduce additional_parameters with
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
change_external_connectivity_start:
description: Invoked before change_external_connectivity
@@ -1033,8 +1033,8 @@ interface_types:
# additional_parameters:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
- # derived types are expected to introduce additional_parameters with
- # its type derived from
+ # derived types are expected to introduce additional_parameters with
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
operate_start:
description: Invoked before operate
@@ -1046,8 +1046,8 @@ interface_types:
# additional_parameters:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
- # derived types are expected to introduce additional_parameters with
- # its type derived from
+ # derived types are expected to introduce additional_parameters with
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
heal_start:
description: Invoked before heal
@@ -1060,12 +1060,12 @@ interface_types:
# type: tosca.datatypes.nfv.VnfOperationAdditionalParameters
# required: false
# derived types are expected to introduce additional_parameters with
- # its type derived from
+ # its type derived from
# tosca.datatypes.nfv.VnfOperationAdditionalParameters
scale_start:
description: Invoked before scale
scale_end:
- description: Invoked after scale
+ description: Invoked after scale
scale_to_level:
description: Invoked upon receipt of a Scale VNF to Level request
# inputs:
@@ -1079,7 +1079,7 @@ interface_types:
description: Invoked before scale_to_level
scale_to_level_end:
description: Invoked after scale_to_level
-
+
node_types:
tosca.nodes.nfv.VNF:
derived_from: tosca.nodes.Root
@@ -1133,13 +1133,13 @@ node_types:
#type: tosca.datatypes.nfv.VnfConfigurableProperties
#description: Describes the configurable properties of the VNF
#required: false
- # derived types are expected to introduce configurable_properties
- # with its type derived from
+ # derived types are expected to introduce configurable_properties
+ # with its type derived from
# tosca.datatypes.nfv.VnfConfigurableProperties
#modifiable_attributes:
#type: tosca.datatypes.nfv.VnfInfoModifiableAttributes
#description: Describes the modifiable attributes of the VNF
- #required: false
+ #required: false
# derived types are expected to introduce modifiable_attributes
# with its type derived from
# tosca.datatypes.nfv.VnfInfoModifiableAttributes
@@ -1177,11 +1177,11 @@ node_types:
tosca.nodes.nfv.VnfExtCp:
derived_from: tosca.nodes.nfv.Cp
- description: Describes a logical external connection point, exposed by the VNF enabling connection with an external Virtual Link
+ description: Describes a logical external connection point, exposed by the VNF enabling connection with an external Virtual Link
properties:
virtual_network_interface_requirements:
type: list
- description: The actual virtual NIC requirements that is been assigned when instantiating the connection point
+ description: The actual virtual NIC requirements that is been assigned when instantiating the connection point
required: false
entry_schema:
type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements
@@ -1195,25 +1195,25 @@ node_types:
tosca.nodes.nfv.Vdu.Compute:
derived_from: tosca.nodes.Root
- description: Describes the virtual compute part of a VDU which is a construct supporting the description of the deployment and operational behavior of a VNFC
+ description: Describes the virtual compute part of a VDU which is a construct supporting the description of the deployment and operational behavior of a VNFC
properties:
name:
type: string
- description: Human readable name of the VDU
+ description: Human readable name of the VDU
required: true
description:
type: string
- description: Human readable description of the VDU
+ description: Human readable description of the VDU
required: true
boot_order:
type: list # explicit index (boot index) not necessary, contrary to IFA011
- description: References a node template name from which a valid boot device is created
+ description: References a node template name from which a valid boot device is created
required: false
entry_schema:
type: string
nfvi_constraints:
type: list
- description: Describes constraints on the NFVI for the VNFC instance(s) created from this VDU
+ description: Describes constraints on the NFVI for the VNFC instance(s) created from this VDU
required: false
entry_schema:
type: string
@@ -1221,21 +1221,21 @@ node_types:
type: list
description: Describes monitoring parameters applicable to a VNFC instantiated from this VDU
required: false
- entry_schema:
+ entry_schema:
type: tosca.datatypes.nfv.VnfcMonitoringParameter
#configurable_properties:
#type: tosca.datatypes.nfv.VnfcConfigurableProperties
- #required: false
+ #required: false
# derived types are expected to introduce
# configurable_properties with its type derived from
# tosca.datatypes.nfv.VnfcConfigurableProperties
vdu_profile:
type: tosca.datatypes.nfv.VduProfile
- description: Defines additional instantiation data for the VDU.Compute node
+ description: Defines additional instantiation data for the VDU.Compute node
required: true
sw_image_data:
type: tosca.datatypes.nfv.SwImageData
- description: Defines information related to a SwImage artifact used by this Vdu.Compute node
+ description: Defines information related to a SwImage artifact used by this Vdu.Compute node
required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise
boot_data:
type: string
@@ -1260,18 +1260,18 @@ node_types:
properties:
virtual_block_storage_data:
type: tosca.datatypes.nfv.VirtualBlockStorageData
- description: Describes the block storage characteristics.
+ description: Describes the block storage characteristics.
required: true
sw_image_data:
type: tosca.datatypes.nfv.SwImageData
- description: Defines information related to a SwImage artifact used by this Vdu.Compute node.
+ description: Defines information related to a SwImage artifact used by this Vdu.Compute node.
required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise
capabilities:
virtual_storage:
type: tosca.capabilities.nfv.VirtualStorage
description: Defines the capabilities of virtual_storage.
- tosca.nodes.nfv.Vdu.VirtualObjectStorage:
+ tosca.nodes.nfv.Vdu.VirtualObjectStorage:
derived_from: tosca.nodes.Root
description: This node type describes the specifications of requirements related to virtual object storage resources
properties:
@@ -1284,7 +1284,7 @@ node_types:
type: tosca.capabilities.nfv.VirtualStorage
description: Defines the capabilities of virtual_storage.
- tosca.nodes.nfv.Vdu.VirtualFileStorage:
+ tosca.nodes.nfv.Vdu.VirtualFileStorage:
derived_from: tosca.nodes.Root
description: This node type describes the specifications of requirements related to virtual file storage resources
properties:
@@ -1297,18 +1297,18 @@ node_types:
type: tosca.capabilities.nfv.VirtualStorage
description: Defines the capabilities of virtual_storage.
requirements:
- - virtual_link:
- capability: tosca.capabilities.nfv.VirtualLinkable
- relationship: tosca.relationships.nfv.VirtualLinksTo
- #description: Describes the requirements for linking to virtual link
+ - virtual_link:
+ capability: tosca.capabilities.nfv.VirtualLinkable
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+ #description: Describes the requirements for linking to virtual link
tosca.nodes.nfv.Cp:
derived_from: tosca.nodes.Root
- description: Provides information regarding the purpose of the connection point
+ description: Provides information regarding the purpose of the connection point
properties:
layer_protocols:
type: list
- description: Identifies which protocol the connection point uses for connectivity purposes
+ description: Identifies which protocol the connection point uses for connectivity purposes
required: true
entry_schema:
type: string
@@ -1316,17 +1316,17 @@ node_types:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
type: string
- description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
+ description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
required: false
constraints:
- valid_values: [ root, leaf ]
description:
type: string
- description: Provides human-readable information on the purpose of the connection point
+ description: Provides human-readable information on the purpose of the connection point
required: false
protocol:
type: list
- description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
+ description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
required: false
entry_schema:
type: tosca.datatypes.nfv.CpProtocolData
@@ -1337,17 +1337,17 @@ node_types:
tosca.nodes.nfv.VduCp:
derived_from: tosca.nodes.nfv.Cp
- description: describes network connectivity between a VNFC instance based on this VDU and an internal VL
+ description: describes network connectivity between a VNFC instance based on this VDU and an internal VL
properties:
bitrate_requirement:
type: integer # in bits per second
- description: Bitrate requirement in bit per second on this connection point
+ description: Bitrate requirement in bit per second on this connection point
required: false
- constraints:
+ constraints:
- greater_or_equal: 0
virtual_network_interface_requirements:
type: list
- description: Specifies requirements on a virtual network interface realising the CPs instantiated from this CPD
+ description: Specifies requirements on a virtual network interface realising the CPs instantiated from this CPD
required: false
entry_schema:
type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements
@@ -1357,7 +1357,7 @@ node_types:
required: false
constraints:
- greater_or_equal: 0
- vnic_type:
+ vnic_type:
type: string
description: Describes the type of the virtual network interface realizing the CPs instantiated from this CPD
required: false
@@ -1374,19 +1374,19 @@ node_types:
tosca.nodes.nfv.VnfVirtualLink:
derived_from: tosca.nodes.Root
- description: Describes the information about an internal VNF VL
+ description: Describes the information about an internal VNF VL
properties:
connectivity_type:
type: tosca.datatypes.nfv.ConnectivityType
- description: Specifies the protocol exposed by the VL and the flow pattern supported by the VL
+ description: Specifies the protocol exposed by the VL and the flow pattern supported by the VL
required: true
description:
type: string
- description: Provides human-readable information on the purpose of the VL
+ description: Provides human-readable information on the purpose of the VL
required: false
test_access:
type: list
- description: Test access facilities available on the VL
+ description: Test access facilities available on the VL
required: false
entry_schema:
type: string
@@ -1394,7 +1394,7 @@ node_types:
- valid_values: [ passive_monitoring, active_loopback ]
vl_profile:
type: tosca.datatypes.nfv.VlProfile
- description: Defines additional data for the VL
+ description: Defines additional data for the VL
required: true
monitoring_parameters:
type: list
@@ -1405,7 +1405,7 @@ node_types:
capabilities:
virtual_linkable:
type: tosca.capabilities.nfv.VirtualLinkable
-
+
group_types:
tosca.groups.nfv.PlacementGroup:
derived_from: tosca.groups.Root
@@ -1416,7 +1416,7 @@ group_types:
description: Human readable description of the group
required: true
members: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink ]
-
+
policy_types:
tosca.policies.nfv.InstantiationLevels:
derived_from: tosca.policies.Root
@@ -1435,7 +1435,7 @@ policy_types:
tosca.policies.nfv.VduInstantiationLevels:
derived_from: tosca.policies.Root
- description: The VduInstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour in term of the number of VNFC instances to be created from each vdu.Compute. as defined in ETSI GS NFV-IFA 011 [1]
+ description: The VduInstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour in term of the number of VNFC instances to be created from each vdu.Compute. as defined in ETSI GS NFV-IFA 011 [1]
properties:
levels:
type: map # key: levelId
@@ -1447,7 +1447,7 @@ policy_types:
tosca.policies.nfv.VirtualLinkInstantiationLevels:
derived_from: tosca.policies.Root
- description: The VirtualLinkInstantiationLevels type is a policy type representing all the instantiation levels of virtual link resources to be instantiated within a deployment flavour as defined in ETSI GS NFV-IFA 011 [1].
+ description: The VirtualLinkInstantiationLevels type is a policy type representing all the instantiation levels of virtual link resources to be instantiated within a deployment flavour as defined in ETSI GS NFV-IFA 011 [1].
properties:
levels:
type: map # key: levelId
@@ -1589,11 +1589,11 @@ policy_types:
- greater_or_equal: 0
- less_or_equal: 65535
default: 65535
- targets: [ tosca.nodes.nfv.VduCp, tosca.nodes.nfv.VnfExtCp ]
+ targets: [ tosca.nodes.nfv.VduCp, tosca.nodes.nfv.VnfExtCp ]
tosca.policies.nfv.SupportedVnfInterface:
derived_from: tosca.policies.Root
- description: this policy type represents interfaces produced by a VNF, the details to access them and the applicable connection points to use to access these interfaces
+ description: this policy type represents interfaces produced by a VNF, the details to access them and the applicable connection points to use to access these interfaces
properties:
interface_name:
type: string
@@ -1604,6 +1604,5 @@ policy_types:
details:
type: tosca.datatypes.nfv.InterfaceDetails
description: Provide additional data to access the interface endpoint
- required: false
+ required: false
targets: [ tosca.nodes.nfv.VnfExtCp, tosca.nodes.nfv.VduCp ]
-
diff --git a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/pnf_main_descriptor.yaml b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/pnf_main_descriptor.yaml
index 400efea52..f0064dee1 100644
--- a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/pnf_main_descriptor.yaml
+++ b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Definitions/pnf_main_descriptor.yaml
@@ -16,7 +16,7 @@ topology_template:
version: 1.0
descriptor_invariant_id: 1111-2222-ccaa-bbdd
name: Acme PNF
-
+
pnfExtCp_1:
type: tosca.nodes.nfv.PnfExtCp
properties:
@@ -24,5 +24,5 @@ topology_template:
layer_protocols: [ ipv4 ]
role: leaf
description: External connection point to access Acme myPnf
- requirements:
- - dependency: myPnf \ No newline at end of file
+ requirements:
+ - dependency: myPnf
diff --git a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Events/MyPnf_Pnf_v1.yaml b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Events/MyPnf_Pnf_v1.yaml
index 83f1d3e8e..8d3002b36 100644
--- a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Events/MyPnf_Pnf_v1.yaml
+++ b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Events/MyPnf_Pnf_v1.yaml
@@ -1,10 +1,10 @@
---
event:
presence: required
- structure:
+ structure:
commonEventHeader:
presence: required
- structure:
+ structure:
domain: {presence: required, value: notification}
eventName: {presence: required, value: Noti_MyPnf-Acme_FileReady}
priority: {presence: required, value: Normal}
@@ -20,14 +20,14 @@ event:
lastEpochMicrosec: {presence: required}
notificationFields:
presence: required
- structure:
+ structure:
changeIdentifier: {presence: required, value: PM_MEAS_FILES}
changeType: {presence: required, value: fileReady}
notificationFieldsVersion: {presence: required, value: 2.0}
arrayOfNamedHashMap:
presence: required
array:
- - name: {presence: required}
+ - name: {presence: required}
hashMap: {presence: required, structure: {
keyValuePair: {presence: required, structure: {key: {presence: required, value: location}, value: {presence: required}}},
keyValuePair: {presence: required, structure: {key: {presence: required, value: compression}, value: {presence: required, value: gzip}}},
@@ -71,7 +71,7 @@ event:
event:
presence: required
action: [ any, any, null, null, null ]
- comment: "
+ comment: "
ALARM NAME: CertMAutomaticEnrollmentFailed,
ALARM DESCRIPTION: ‘See alarm OPI x/1111-ABC 123 4567/1 Uen’,
ALARM EFFECT: 'See alarm OPI x/2222-ABC 123 4567/1 Uen',
@@ -205,9 +205,9 @@ event:
eventSeverity: {presence: required}
vfStatus: {presence: required, value: Active}
alarmAdditionalInformation: {presence: required, structure: {
- keyValuePair: {presence: required, structure: {key: {presence: required, value: source},value: {presence: required}}},
- keyValuePair: {presence: required, structure: {key: {presence: required, value: probableCause},value: {presence: required, value: 'FileError'}}},
- keyValuePair: {presence: required, structure: {key: {presence: required, value: additionalText},value: {presence: optional}}},
- keyValuePair: {presence: required, structure: {key: {presence: required, value: additionalInfo},value: {presence: optional}}}}
+ keyValuePair: {presence: required, structure: {key: {presence: required, value: source}, value: {presence: required}}},
+ keyValuePair: {presence: required, structure: {key: {presence: required, value: probableCause}, value: {presence: required, value: 'FileError'}}},
+ keyValuePair: {presence: required, structure: {key: {presence: required, value: additionalText}, value: {presence: optional}}},
+ keyValuePair: {presence: required, structure: {key: {presence: required, value: additionalInfo}, value: {presence: optional}}}}
}
-... \ No newline at end of file
+...
diff --git a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Measurements/PM_Dictionary.yaml b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Measurements/PM_Dictionary.yaml
index 858951f7b..d00a5330f 100644
--- a/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Measurements/PM_Dictionary.yaml
+++ b/test/mocks/pnf-onboarding/src/main/resources/csarContent/Files/Measurements/PM_Dictionary.yaml
@@ -39,148 +39,148 @@ pmDictionary:
vendor: Acme
pmDictionaryMeasurements:
- - measType: DRB.UEThpDl
- measDescription: Average DL UE throughput in gNB
- measCondition: See 3GPP TS 28.552
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: kbps
- measObjClass: NRCellDU
- measCollectionMethod: DER
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "Data Radio Bearer"
- measFamily: DRB
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0,
- "acmeParameter2": true,
- "acmeParameter3": "acmeParameterValue3"}
-
- - measType: VS.ifInDiscards
- measDescription: The number of inbound packets which were chosen to be discarded
- measCondition: The number of inbound packets which were chosen to be
- discarded even though no errors had been detected to prevent
- their being deliverable to a higher-layer protocol. One
- possible reason for discarding such a packet could be to
- free up buffer space.
- Discontinuities in the value of this counter can occur at
- re-initialization of the management system, and at other
- times as indicated by the value of
- ifCounterDiscontinuityTime.
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: number
- measObjClass: EthernetPort
- measCollectionMethod: CC
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "IP Management"
- measFamily: IP
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0,
- "acmeParameter2": true,
- "acmeParameter3": "acmeParameterValue3"}
+ - measType: DRB.UEThpDl
+ measDescription: Average DL UE throughput in gNB
+ measCondition: See 3GPP TS 28.552
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: kbps
+ measObjClass: NRCellDU
+ measCollectionMethod: DER
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "Data Radio Bearer"
+ measFamily: DRB
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0,
+ "acmeParameter2": true,
+ "acmeParameter3": "acmeParameterValue3"}
- - measType: VS.ifInErrors
- measDescription: Number of inbound packets that contained errors
- measCondition: For packet-oriented interfaces, the number of inbound
- packets that contained errors preventing them from being
- deliverable to a higher-layer protocol. For character-
- oriented or fixed-length interfaces, the number of inbound
- transmission units that contained errors preventing them
- from being deliverable to a higher-layer protocol.
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: number
- measObjClass: EthernetPort
- measCollectionMethod: Gauge
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "IP Management"
- measFamily: IP
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0,
- "acmeParameter3": "acmeParameterValue3"}
+ - measType: VS.ifInDiscards
+ measDescription: The number of inbound packets which were chosen to be discarded
+ measCondition: The number of inbound packets which were chosen to be
+ discarded even though no errors had been detected to prevent
+ their being deliverable to a higher-layer protocol. One
+ possible reason for discarding such a packet could be to
+ free up buffer space.
+ Discontinuities in the value of this counter can occur at
+ re-initialization of the management system, and at other
+ times as indicated by the value of
+ ifCounterDiscontinuityTime.
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: number
+ measObjClass: EthernetPort
+ measCollectionMethod: CC
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "IP Management"
+ measFamily: IP
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0,
+ "acmeParameter2": true,
+ "acmeParameter3": "acmeParameterValue3"}
- - measType: VS.ifInUnknownProtos
- measDescription: Number of inbound packets received via an unknown or usupported protocol
- measCondition: For packet-oriented interfaces, the number of packets
- received via the interface which were discarded because of
- an unknown or unsupported protocol. For character-oriented
- or fixed-length interfaces that support protocol
- multiplexing the number of transmission units received via
- the interface which were discarded because of an unknown or
- unsupported protocol. For any interface that does not
- support protocol multiplexing, this counter will always be
- 0.
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: number
- measObjClass: EthernetPort
- measCollectionMethod: CC
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "IP Management"
- measFamily: IP
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0,
- "acmeParameter2": true}
+ - measType: VS.ifInErrors
+ measDescription: Number of inbound packets that contained errors
+ measCondition: For packet-oriented interfaces, the number of inbound
+ packets that contained errors preventing them from being
+ deliverable to a higher-layer protocol. For character-
+ oriented or fixed-length interfaces, the number of inbound
+ transmission units that contained errors preventing them
+ from being deliverable to a higher-layer protocol.
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: number
+ measObjClass: EthernetPort
+ measCollectionMethod: Gauge
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "IP Management"
+ measFamily: IP
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0,
+ "acmeParameter3": "acmeParameterValue3"}
- - measType: VS.ifHCInBroadcastPkts
- measDescription: Number of the broadcasted inbound packets delivered to the higher (sub-)layer
- measCondition: The number of packets, delivered by this sub-layer to a
- higher (sub-)layer, which were addressed to a broadcast
- address at this sub-layer. This object is a 64-bit version
- of ifInBroadcastPkts.
- Discontinuities in the value of this counter can occur at
- re-initialization of the management system, and at other
- times as indicated by the value of
- ifCounterDiscontinuityTime.
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: number
- measObjClass: EthernetPort
- measCollectionMethod: CC
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "IP Management"
- measFamily: IP
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0}
+ - measType: VS.ifInUnknownProtos
+ measDescription: Number of inbound packets received via an unknown or usupported protocol
+ measCondition: For packet-oriented interfaces, the number of packets
+ received via the interface which were discarded because of
+ an unknown or unsupported protocol. For character-oriented
+ or fixed-length interfaces that support protocol
+ multiplexing the number of transmission units received via
+ the interface which were discarded because of an unknown or
+ unsupported protocol. For any interface that does not
+ support protocol multiplexing, this counter will always be
+ 0.
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: number
+ measObjClass: EthernetPort
+ measCollectionMethod: CC
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "IP Management"
+ measFamily: IP
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0,
+ "acmeParameter2": true}
- - measType: VS.ifHCOutBroadcastPkts
- measDescription: Number of the broadcasted outsbound packets delivered to the higher (sub-)layer
- measCondition: The total number of packets that higher-level protocols
- requested be transmitted, and which were addressed to a
- broadcast address at this sub-layer, including those that
- were discarded or not sent. This object is a 64-bit version
- of ifOutBroadcastPkts.
- Discontinuities in the value of this counter can occur at
- re-initialization of the management system, and at other
- times as indicated by the value of
- ifCounterDiscontinuityTime.
- measResultType: integer
- measResultRange: 0-4294967295
- measResultUnits: number
- measObjClass: EthernetPort
- measCollectionMethod: CC
- measLastChange: 1.0
- measChangeType: added
- measInfoId: "IP Management"
- measFamily: IP
- measAdditionalFields: {
- "measurementStatus": "USED",
- "initialValue": 0,
- "acmeParameter1": 0,
- "acmeParameter2": true,
- "acmeParameter3": "acmeParameterValue3"}
+ - measType: VS.ifHCInBroadcastPkts
+ measDescription: Number of the broadcasted inbound packets delivered to the higher (sub-)layer
+ measCondition: The number of packets, delivered by this sub-layer to a
+ higher (sub-)layer, which were addressed to a broadcast
+ address at this sub-layer. This object is a 64-bit version
+ of ifInBroadcastPkts.
+ Discontinuities in the value of this counter can occur at
+ re-initialization of the management system, and at other
+ times as indicated by the value of
+ ifCounterDiscontinuityTime.
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: number
+ measObjClass: EthernetPort
+ measCollectionMethod: CC
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "IP Management"
+ measFamily: IP
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0}
+
+ - measType: VS.ifHCOutBroadcastPkts
+ measDescription: Number of the broadcasted outsbound packets delivered to the higher (sub-)layer
+ measCondition: The total number of packets that higher-level protocols
+ requested be transmitted, and which were addressed to a
+ broadcast address at this sub-layer, including those that
+ were discarded or not sent. This object is a 64-bit version
+ of ifOutBroadcastPkts.
+ Discontinuities in the value of this counter can occur at
+ re-initialization of the management system, and at other
+ times as indicated by the value of
+ ifCounterDiscontinuityTime.
+ measResultType: integer
+ measResultRange: 0-4294967295
+ measResultUnits: number
+ measObjClass: EthernetPort
+ measCollectionMethod: CC
+ measLastChange: 1.0
+ measChangeType: added
+ measInfoId: "IP Management"
+ measFamily: IP
+ measAdditionalFields: {
+ "measurementStatus": "USED",
+ "initialValue": 0,
+ "acmeParameter1": 0,
+ "acmeParameter2": true,
+ "acmeParameter3": "acmeParameterValue3"}
diff --git a/test/mocks/pnfsimulator/.gitignore b/test/mocks/pnfsimulator/.gitignore
deleted file mode 100644
index daf6cf225..000000000
--- a/test/mocks/pnfsimulator/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-**/*.iml
-**/.idea
-**/target \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/checkstyle-suppressions.xml b/test/mocks/pnfsimulator/checkstyle-suppressions.xml
deleted file mode 100644
index 8d9560789..000000000
--- a/test/mocks/pnfsimulator/checkstyle-suppressions.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-
-<!DOCTYPE suppressions PUBLIC
- "-//Checkstyle//DTD SuppressionFilter Configuration 1.0//EN"
- "https://checkstyle.org/dtds/suppressions_1_0.dtd">
-
-<suppressions>
- <suppress checks=".*" files="\.java" lines="1-20"
- />
- <suppress checks=".*" files="\.properties"
- />
- <suppress checks="javadoc" files="\.java"
- />
- <suppress checks="LineLength" files="\.java"
- />
- <suppress checks="HiddenField" files="\.java"
- />
- <suppress checks="FinalClass" files="\.java"
- />
- <suppress checks="FinalParameters" files="\.java"
- />
- <suppress checks="AvoidInlineConditionals" files="\.java"
- />
- <suppress checks="DesignForExtension" files="\.java"
- />
-</suppressions>
diff --git a/test/mocks/pnfsimulator/deployment/assembly.xml b/test/mocks/pnfsimulator/deployment/assembly.xml
deleted file mode 100644
index a0609efd3..000000000
--- a/test/mocks/pnfsimulator/deployment/assembly.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
- <id>resources</id>
- <includeBaseDirectory>false</includeBaseDirectory>
- <formats>
- <format>zip</format>
- </formats>
- <fileSets>
-
- <fileSet>
- <directory>src</directory>
- <outputDirectory>.</outputDirectory>
- </fileSet>
- </fileSets>
-</assembly> \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/deployment/pom.xml b/test/mocks/pnfsimulator/deployment/pom.xml
deleted file mode 100644
index 25fe08b49..000000000
--- a/test/mocks/pnfsimulator/deployment/pom.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.simulator</groupId>
- <artifactId>simulator-parent</artifactId>
- <version>5.0.0-SNAPSHOT</version>
- </parent>
- <artifactId>onboarding-package</artifactId>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptor>assembly.xml</descriptor>
- <finalName>pnf-pnp-simulators</finalName>
- <outputDirectory>.</outputDirectory>
- </configuration>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/pnfsimulator/deployment/src/MANIFEST.json b/test/mocks/pnfsimulator/deployment/src/MANIFEST.json
deleted file mode 100644
index fecdda996..000000000
--- a/test/mocks/pnfsimulator/deployment/src/MANIFEST.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "name": "",
- "description": "",
- "data": [
- {
- "file": "simulators_heat_template.yaml",
- "type": "HEAT",
- "isBase": "true",
- "data": [
- {
- "file": "simulators_heat_template.env",
- "type": "HEAT_ENV"
- }
- ]
- }
- ]
-}
diff --git a/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.env b/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.env
deleted file mode 100644
index cf2db52bf..000000000
--- a/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.env
+++ /dev/null
@@ -1,10 +0,0 @@
-parameters:
- image_name: ubuntu-14-04-cloud-amd64
- flavor_name: m1.medium
- public_net_id: 41eae12a-27f7-4ace-9fda-3cd55c7c0651
- private_net_id: 41eae12a-27f7-4ace-9fda-3cd55c7c0651
- private_subnet_id: 41eae12a-27f7-4ace-9fda-3cd55c7c0651
- proxy: http://10.10.10.10:8080
- vnf_id: simulators_vnf
- vf_module_id: simulators_vsp
- key_name: onap-dev \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.yaml b/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.yaml
deleted file mode 100644
index 253fcc735..000000000
--- a/test/mocks/pnfsimulator/deployment/src/simulators_heat_template.yaml
+++ /dev/null
@@ -1,147 +0,0 @@
-description: Heat template that deploys PnP PNF simulators
-heat_template_version: '2013-05-23'
-parameters:
- flavor_name: {description: Type of instance (flavor) to be used, label: Flavor,
- type: string}
- image_name: {description: Image to be used for compute instance, label: Image name
- or ID, type: string}
- key_name: {description: Public/Private key pair name, label: Key pair name, type: string}
- public_net_id: {description: Public network that enables remote connection to VNF,
- label: Public network name or ID, type: string}
- private_net_id: {type: string, description: Private network id, label: Private network name or ID}
- private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID}
- proxy: {type: string, description: Proxy, label: Proxy, default: ""}
- vnf_id: {type: string, label: VNF ID, description: The VNF ID is provided by ONAP}
- vf_module_id: {type: string, label: vBase module ID, description: The vBase Module ID is provided by ONAP}
-resources:
- PNF_PnP_simulator:
- type: OS::Nova::Server
- properties:
- key_name: { get_param: key_name }
- image: { get_param: image_name }
- flavor: { get_param: flavor_name }
- networks:
- - port: { get_resource: PNF_PnP_simulator_port0 }
- user_data_format: RAW
- user_data:
- str_replace:
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_VERSION=17.03
- DOCKER_COMPOSE_VERSION=1.22.0
- }
-
- set_proxy () {
- HTTP_PROXY=$proxy
- HTTPS_PROXY=$proxy
- http_proxy=$proxy
- https_proxy=$proxy
- export HTTP_PROXY=$proxy
- export HTTPS_PROXY=$proxy
- export http_proxy=$proxy
- export https_proxy=$proxy
- }
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "arthur\narthur" | passwd root
- }
-
- update_os () {
- rm -rf /var/lib/apt/lists/*
- apt-get clean
- apt-get update
- }
-
- set_apt_get_proxy () {
- cat > /etc/apt/apt.conf.d/proxy.conf << EOF
- Acquire {
- HTTP::proxy "$proxy";
- HTTPS::proxy "$proxy";
- }
- EOF
- }
-
- docker_remove () {
- dnf -y remove docker \
- docker-client \
- docker-client-latest \
- docker-common \
- docker-latest \
- docker-latest-logrotate \
- docker-logrotate \
- docker-selinux \
- docker-engine-selinux \
- docker-engine
- }
-
- docker_install_and_configure () {
- curl "https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh" | sh
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
- [Service]
- ExecStart=
- ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10003
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- apt-mark hold docker-ce
- docker login -u docker -p docker nexus3.onap.org:10003
- }
- docker_compose_install () {
- curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- }
-
- start_pnf_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator/pnfsimulator
- docker-compose up -d
- }
-
- start_netconf_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator/netconfsimulator
- docker-compose up -d
- }
-
- set_versions
- set_proxy
- enable_root_ssh
- set_apt_get_proxy
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_pnf_simulator
- start_netconf_simulator
- params:
- $proxy: { get_param: proxy }
- PNF_PnP_simulator_port0:
- type: OS::Neutron::Port
- properties:
- network_id: { get_param: private_net_id }
- security_groups:
- - default
- fixed_ips:
- - subnet_id: { get_param: private_subnet_id }
- PNF_PnP_simulator_public:
- type: OS::Neutron::FloatingIP
- properties:
- floating_network_id: { get_param: public_net_id }
- port_id: { get_resource: PNF_PnP_simulator_port0 }
-outputs:
- PNF_PnP_simulator_private_ip:
- description: IP address of PNF_PnP_simulator in private network
- value: { get_attr: [ PNF_PnP_simulator, first_address ] }
- PNF_PnP_simulator_public_ip:
- description: Floating IP address of PNF_PnP_simulator in public network
- value: { get_attr: [ PNF_PnP_simulator_public, floating_ip_address ] }
diff --git a/test/mocks/pnfsimulator/netconfsimulator/README.md b/test/mocks/pnfsimulator/netconfsimulator/README.md
deleted file mode 100644
index 94bcd760f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/README.md
+++ /dev/null
@@ -1,276 +0,0 @@
-# Netconf Simulator
-A simulator that is able to receive and print history of CM configurations.
-
-## Required software
-To run the simulator, the following software should be installed:
-- JDK 1.8
-- Maven
-- docker
-- docker-compose
-
-### API
-Simulator exposes both HTTP and native netconf interface.
-
-### Running simulator
-In order to run simulator, invoke *mvn clean install docker:build* to build required images.
-Add executable permission to initialize_netopeer.sh (by executing `sudo chmod +x netconf/initialize_netopeer.sh`)
-and then invoke *docker-compose up* command.
-In case of copying simulator files to another location, keep in mind to copy also *docker-compose.yml* and directories: *config, templates, netopeer-change-saver-native and netconf*.
-
-#### Restarting
-Restarting simulator can be done by first typing *docker-compose restart* in terminal.
-
-#### Shutting down
-The command *docker-compose down* can be used to shut the simulator down.
-
-## Usage of simulator
-
-### Netconf TLS support
-Embedded netconf server supports connections over TLS on port 6513. Default server and CA certificate have been taken from Netopeer2 repository: https://github.com/CESNET/Netopeer2/tree/master/server/configuration/tls
-
-Mentioned Github repository contains sample client certificate, which works out of the box.
-#### Replacing server certificates
-In order to replace TLS certificates with third-party ones, the following naming schema must be followed:
-* CA certificate file should be named 'ca.crt'
-* Netconf server certificate file should be named 'server_cert.crt'
-* Netconf server keyfile file should be named 'server_key.pem'
-
-Certificates and keys should follow PEM formatting guidelines.
-Prepared files should be placed under _tls/_ directory (existing files must be overwritten).
-After copying, it is necessary to restart the Netconf Simulator (please refer to [restarting simulator](restarting) guide).
-
-This is a sample curl command to test client connection (the example assumes that Netconf Simulator runs on 127.0.0.1):
-```
-curl -k -v https://127.0.0.1:6513 --cacert ca.crt --key client.key --cert client.crt
-```
-
-
-### Capturing netconf configuration changes
-
-The netconfsimulator tool will intercept changes in netconf configuration, done by edit-config command (invoked through simulator's edit-configuration endpoint or directly through exposed netconf-compliant interface). The following changes are intercepted:
-- creating new item
-- moving an item
-- modifying an item
-- deleting an item
-
-Each captured change contains fully qualified parameter name (including xpath - namespace and container name)
-
-#### REST API usage with examples
-
-Application of native netconf operations on YANG model is covered by REST API layer.
-Example invocation of operations with its requests and results are presented below.
-For basic edit-config and get config actions, response is in plain XML format, whereas stored data that can be accessed via API is returned in JSON format.
-
-**Load new YANG model**
-http method: POST
-```
-URL: http:<simulator_ip>:9000/netconf/model/<moduleName>
-```
-request: file content to be sent as multipart (form data)
-```
-module pnf-simulator {
- namespace "http://onap.org/pnf-simulator";
- prefix config;
- container config {
- config true;
- leaf itemValue1 {type uint32;}
- leaf itemValue2 {type uint32;}
- leaf itemValue3 {type uint32;}
- leaf-list allow-user {
- type string;
- ordered-by user;
- description "A sample list of user names.";
- }
- }
-}
-```
-
-**Delete existing YANG model**
-http method: DELETE
-```
-URL: http:<simulator_ip>:9000/netconf/model/<moduleName>
-```
-request body should be empty.
-response: a HTTP 200 code indicating successful operation or 400/500 in case of errors.
-
-**Get all running configurations**
-http method: GET
-```
-URL: http:<simulator_ip>:9000/netconf/get
-```
-response: plain XML
-```
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config>
-<config2 xmlns="http://onap.org/pnf-simulator2" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config2>
-```
-
-**Get running configuration**
-http method: GET
-```
-URL: http:<simulator_ip>:9000/netconf/get/'moduleName'/'container'
-```
-response: plain XML
-```
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config>
-```
-
-**Edit configuration**
-To edit configuration XML file must be prepared. No plain request body is used here,
-request content must be passed as multipart file (form data) with file name/key='editConfigXml' and file content in XML format
-
-http method: POST
-```
-URL: http:<simulator_ip>:9000/netconf/edit-config
-```
-request: file content to be sent as multipart (form data)
-```
-<config xmlns="http://onap.org/pnf-simulator">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config>
-```
-
-response: actual, running configuration after editing config:
-```
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config>"
-```
-
-Captured change, that can be obtained from db also via REST API:
-
-http method: GET
-```
-URL: http://<simulator_ip>:9000/store/less?offset=1
-```
-response:
-```
-[{"timestamp": 1542877413979, "configuration": "CREATED: /pnf-simulator:config/itemValue3 = 3333"}]
-```
-
-Notice: if new value is the same as the old one, the change won’t be intercepted (because there is no state change). This is a limitation of used netconf implementation (Netopeer2).
-
-**Modify request**
-http method: POST
-```
-URL: http:<simulator_ip>:9000/netconf/edit-config
-```
-file content to be sent as multipart (form data):
-```
-<config xmlns="http://onap.org/pnf-simulator" >
- <itemValue1>111</itemValue1>
- <itemValue2>222</itemValue2>
-</config>
-```
-
-response: actual, running configuration after editing config:
-```
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>111</itemValue1>
- <itemValue2>222</itemValue2>
-</config>"
-```
-
-Captured change:
-http method: GET
-```
-URL: http://<simulator_ip>:9000/store/less?offset=2
-```
-```
-[{"timestamp": 1542877413979, "configuration": "MODIFIED: : old value: /pnf-simulator:config/itemValue1 = 2781, new value: /pnf-simulator:config/itemValue1 = 111",
- {"timestamp": 1542877413979, "configuration": "MODIFIED: : old value: /pnf-simulator:config/itemValue2 = 3782, new value: /pnf-simulator:config/itemValue2 = 222"}]
-```
-
-**Move request** (inserting a value into leaf-list which in turn rearranges remaining elements)
-http method: POST
-```
-URL: http:<simulator_ip>:9000/netconf/edit-config
-```
-file content to be sent as multipart (form data):
-```
-<config xmlns="http://onap.org/pnf-simulator" xmlns:yang="urn:ietf:params:xml:ns:yang:1" xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <allow-user xc:operation="create" yang:insert="before" yang:value="bob">mike</allow-user>
-</config>
-```
-
-Captured change:
-http method: GET
-```
-URL: http://<simulator_ip>:9000/store/less?offset=2
-```
-```
-[{"timestamp": 1542877413979, "configuration": "CREATED: /pnf-simulator:config/allow-user = mike"},
- {"timestamp": 1542877413979, "configuration": "MOVED: /pnf-simulator:config/allow-user = mike after /pnf-simulator:config/allow-user = alice"}]
-```
-
-**Delete request**
-http method: POST
-```
-URL: http:<simulator_ip>:9000/netconf/edit-config
-```
-file content to be sent as multipart (form data):
-```
-<config xmlns="http://onap.org/pnf-simulator">
- <itemValue1>1111</itemValue1>
- <itemValue2 xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0" xc:operation="delete"/>
-</config>
-```
-
-Captured change:
-http method: GET
-```
-URL: http://<simulator_ip>:9000/store/less?offset=1
-```
-```
-[{"timestamp": 1542877413979, "configuration": "DELETED: /pnf-simulator:config/itemValue2 = 222"}]
-```
-
-Getting all configuration changes:
-http method: GET
-```
-URL: http://<simulator_ip>:9000/store/cm-history
-```
-response:
-```
-[{"timestamp":1542877413979,"configuration":"MODIFIED: : old value: /pnf-simulator:config/itemValue1 = 2781, new value: /pnf-simulator:config/itemValue1 = 111"},
- {"timestamp":1542877413979,"configuration":"MODIFIED: : old value: /pnf-simulator:config/itemValue2 = 3782, new value: /pnf-simulator:config/itemValue2 = 222"},
- {"timestamp":1542877414000,"configuration":"CREATED: : /pnf-simulator:config/itemValue3 = 3333"},
- {"timestamp":1542877414104,"configuration":"CREATED: : CREATED: /pnf-simulator:config/allow-user = mike"}
- {"timestamp":1542877414107,"configuration":"MOVED: /pnf-simulator:config/allow-user = mike after /pnf-simulator:config/allow-user = alice"},
- {"timestamp":1542877414275,"configuration":"DELETED: /pnf-simulator:config/itemValue2 = 222"}]
-```
-
-### Logging
-
-### Swagger
-
-## Developers Guide
-
-### Integration tests
-Integration tests use docker-compose for setting up cluster with all services.
-Those tests are not part of build pipeline, but can be run manually by invoking *mvn verify -DskipITs=false* from project command line.
-Tests can be found in netconfsimulator project in src/integration directory.
-
-## Troubleshooting
-Q: Simulator throws errors after shutting down with *docker-compose down* or *docker-compose restart*
-
-A: Remove docker containers that were left after stopping the simulator with the following commands:
-```
-docker stop $(docker ps | grep netconfsimulator | awk '{print $1;}')
-docker rm $(docker ps -a | grep netconfsimulator | awk '{print $1;}')
-```
diff --git a/test/mocks/pnfsimulator/netconfsimulator/config/netconf.env b/test/mocks/pnfsimulator/netconfsimulator/config/netconf.env
deleted file mode 100644
index 6cf310a27..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/config/netconf.env
+++ /dev/null
@@ -1,5 +0,0 @@
-NETCONF_ADDRESS=netopeer
-NETCONF_PORT=830
-NETCONF_MODEL=pnf-simulator
-NETCONF_MAIN_CONTAINER=config
-TZ=Europe/Warsaw
diff --git a/test/mocks/pnfsimulator/netconfsimulator/docker-compose.yml b/test/mocks/pnfsimulator/netconfsimulator/docker-compose.yml
deleted file mode 100644
index 201f7c4de..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/docker-compose.yml
+++ /dev/null
@@ -1,96 +0,0 @@
-version: '3'
-
-services:
- zookeeper:
- image: wurstmeister/zookeeper
- ports:
- - "2181:2181"
- networks:
- - netconfnetwork
-
- kafka1:
- image: wurstmeister/kafka:1.1.0
- ports:
- - "9092:9092"
- hostname: kafka1
- networks:
- - netconfnetwork
- environment:
- KAFKA_ADVERTISED_PORT: 9092
- KAFKA_ADVERTISED_HOST_NAME: kafka1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
- KAFKA_CREATE_TOPICS: "config:1:1"
- KAFKA_DELETE_RETENTION_MS: 604800000
- KAFKA_LOG_CLEANER_DELETE_RETENTION_MS: 604800000
- depends_on:
- - zookeeper
-
- netconf-simulator:
- image: nexus3.onap.org:10003/onap/netconfsimulator
- ports:
- - "9000:8080"
- restart: on-failure
- hostname: netconf-simulator
- networks:
- - netconfnetwork
- depends_on:
- - zookeeper
- - kafka1
- - netopeer
-
- netopeer:
- image: sysrepo/sysrepo-netopeer2:latest
- ports:
- - "830:830"
- - "5002:5002"
- - "6513:6513"
- volumes:
- - ./netconf:/netconf
- - ./netopeer-change-saver-native:/netopeer-change-saver
- - ./tls:/tls
- env_file:
- - ./config/netconf.env
- restart: on-failure
- networks:
- - netconfnetwork
- depends_on:
- - sftp-server
- - ftpes-server
- environment:
- http_proxy: ${http_proxy}
- https_proxy: ${https_proxy}
- command:
- - /netconf/initialize_netopeer.sh
-
- sftp-server:
- image: atmoz/sftp:alpine
- ports:
- - "2222:22"
- volumes:
- - ./sftp:/home/sftp-user/sftp
- - ./ssh/ssh_host_rsa_key.pub:/home/sftp-user/.ssh/keys/ssh_host_rsa_key.pub
- networks:
- - netconfnetwork
- restart: on-failure
- command: sftp-user::1001
-
- ftpes-server:
- image: stilliard/pure-ftpd:latest
- ports:
- - "2221:21"
- - "30000-30009:30000-30009"
- volumes:
- - ./ftpes/files:/home/ftpusers/onap
- - ./ftpes/userpass/:/etc/pure-ftpd/passwd/
- - ./ftpes/tls/:/etc/ssl/private/
- networks:
- - netconfnetwork
- environment:
- PUBLICHOST: localhost
- ADDED_FLAGS: --tls=2
- FTP_USER_HOME: onap
- restart: on-failure
-
-networks:
- netconfnetwork:
- driver: bridge
diff --git a/test/mocks/pnfsimulator/netconfsimulator/docker/Dockerfile b/test/mocks/pnfsimulator/netconfsimulator/docker/Dockerfile
deleted file mode 100644
index 0e25fd310..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/docker/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM openjdk:8-jre-alpine
-ADD libs /app/libs
-ADD netconfsimulator-5.0.0-SNAPSHOT.jar /app/netconf-simulator.jar
-CMD java -cp /app/libs/*:/app/netconf-simulator.jar org.onap.netconfsimulator.Main
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ftpes/tls/pure-ftpd.pem b/test/mocks/pnfsimulator/netconfsimulator/ftpes/tls/pure-ftpd.pem
deleted file mode 100755
index 0ce676efa..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/ftpes/tls/pure-ftpd.pem
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDHbSk5/cABTpCt
-q54QyTkhuhb84nEz5ztKL0hY56fsVtAA2gSAde+qV9YwUIuReOUhAF4RVVt2Lkn/
-1R0yX+0IjrXnO7jYzDj6QhgyqYKb3iQlvShZMMQ7qihn8qBxobk7+O10x6VLS2L8
-WYpQxGXu7T1qXbw10RhrqG8nbXYX+aHMsv9zMt9OYqKSI073OZR2vk3K49Uqcurj
-sXuRJOa10MRsxgA726pr8OLWAWejsoFaqP2fQS3HeT2RnAqPyAgPc0P6n7gxo0JU
-U5dPnrPbsvfdegIFxfc57oZXrLz7nYXkJEcjYTBFSQ+JAaRfx9kNXZ7Gft7EAMyF
-BLemY/0VAgMBAAECggEARD9bSHlKaCgW4xhEM8JpRt2EWG62BukvJSghPiupD/x1
-mpUBzWSO7GC68DXgTZxt7WlOx+fKMRuOP3sTTtX9LFyKa+PIUokxRpOv7EaOaAER
-pciiMkO6JCELSueBeOG7noaF3N0l+CqIaYvLBfDwYV/XELubWV+BV/aAc6HGNFWi
-4bjM+BOBLQstrEeJh2jVylzv4CTtlTs2pwiHFSyrHhudTk5nnATAHn1gi+X42v1A
-zk3UfqADZJmMI0/Roup4YPZ3+6zUzDN2i+qasHexL0OKIjRcSqpgqQoIeKEbKKfw
-sOgiWIR2Xvj7EJmhzJlWgKjk8OLs/7U4QpnD+s0agQKBgQDu3ojqKNWnPHy0Nupm
-tmAs28WLK76R0iZeAd2nwsU2K6lnm9z5o2ab3ffTJVB9kAetKJa3UerKskF/qF9C
-MtjlEn6F++uYFitpLjQevnyrKSqFqbzytDXrQlk+gZLglmi6YylT5k9qLSREAu55
-XS/wbm9XU2Q7sl8oTnZHXptT7QKBgQDVunvqdDn1FaNU9EwQCGPS3QGu+go22xkM
-4Rs2CoHWfqmhGOo8lJKBElDqsXvxggrZLWJe/1lgnELT/9aXS8QLWBnZxpTj9wfd
-igH+CJc3mWnLThmUGdSV/tuHon2IdQ8/1CiGSwIr9kYCnStidUtOXjIbgc6kUTTi
-5wtIGHh4yQKBgQDXJ/0dJbDklRgiX4CdCdLxNPfnlnxt7mN+s6GK1WY7l/JcD8ln
-1qW66aGrP2YT42L2tqOi9hdNgmh66xb6ksBI/XKXjsWz1Ow/Lk3mD2BN76OMh8pY
-trgGc1ndcmrw/qnQkTcNilqn4YdT92wER0rB/0cs2kFjgBQ0QxBI0s+INQKBgA6Y
-2fW9UmgGvk0DEl7V89tm9MJ6mU/9zswuY6lhNlTr+bHi/bx9eTQPiC8/R/PKqesD
-SoCqd/Q9N+M6yfEzX4RW1A0nnuui54qd7lznQUyu0abtApo22WoVKfEti91SAWSe
-nNXvMYrHGyj6iwgCcs47aLiwOOjIExCcLw0RfsjhAoGAc1zaRbrtjjh66FJYjLiJ
-Q6EXfm31ptaQQUn5rQyHMD2VRlajCYV+fv75tezf2dQvJcqHYWrEuY8U+OTbB1TB
-IEqN8ETUeLegl5RgvWoyWinqdbv/0d9LtwVBdtiEQLoYumD934mshEDgzCOOjrBe
-Salcd1vc6y6NiFooPlvloXQ=
------END PRIVATE KEY-----
------BEGIN CERTIFICATE-----
-MIIDYDCCAkigAwIBAgIJAMH2upKd2yAJMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV
-BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX
-aWRnaXRzIFB0eSBMdGQwHhcNMTgwOTEwMTI1ODE2WhcNMzgwOTA1MTI1ODE2WjBF
-MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50
-ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
-CgKCAQEAx20pOf3AAU6QraueEMk5IboW/OJxM+c7Si9IWOen7FbQANoEgHXvqlfW
-MFCLkXjlIQBeEVVbdi5J/9UdMl/tCI615zu42Mw4+kIYMqmCm94kJb0oWTDEO6oo
-Z/KgcaG5O/jtdMelS0ti/FmKUMRl7u09al28NdEYa6hvJ212F/mhzLL/czLfTmKi
-kiNO9zmUdr5NyuPVKnLq47F7kSTmtdDEbMYAO9uqa/Di1gFno7KBWqj9n0Etx3k9
-kZwKj8gID3ND+p+4MaNCVFOXT56z27L33XoCBcX3Oe6GV6y8+52F5CRHI2EwRUkP
-iQGkX8fZDV2exn7exADMhQS3pmP9FQIDAQABo1MwUTAdBgNVHQ4EFgQUt51lQ+ab
-MTq+w2U/knCsIPb3wrkwHwYDVR0jBBgwFoAUt51lQ+abMTq+w2U/knCsIPb3wrkw
-DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAQ69AktYLQ+VRbojz
-zC0XQ2M1FAkfJI2P0LvPoYxZgId2CnZW3sMIdnJdF+KjvOqeGyFmw+hn8WkoKiWj
-0sxuGmrWt5t+5bF2wcq0CtTeF1/o6DsRhRiJBzmcLe81ItrN6emZSg96xCKzkHBZ
-3nF4fG88vtiYgD932lMStDqQzSTx0FsCGpGaKh9xDmKvlP24NWdM9gyOEsRbDvqd
-vS1Q45Jx0jzkp7X5d0casqBWIZak3z0EVdK7c8Y/GxxTcWfIMINCl9+F9kpTA/ZX
-uARYzrPWaBfDBi2r5acWi/AHJM3U+LgzO5nCKa+38vtjNw3NtbslA4InQ5cU2B8X
-QN8NlQ==
------END CERTIFICATE-----
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ftpes/userpass/pureftpd.passwd b/test/mocks/pnfsimulator/netconfsimulator/ftpes/userpass/pureftpd.passwd
deleted file mode 100755
index 7961e710d..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/ftpes/userpass/pureftpd.passwd
+++ /dev/null
@@ -1 +0,0 @@
-onap:$6$Guq6OMhBdNZ6nTk0$7dLt6hOrAv.in36jzWGd5UgWeDqN3CuKjrzJ.izRTdgZRTszeNYbT2dk7UDh9CLD7pohnB0.k1NSZmRIUB/ID/:1001:1001::/home/ftpusers/onap/./::::::::::::
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/initialize_netopeer.sh b/test/mocks/pnfsimulator/netconfsimulator/netconf/initialize_netopeer.sh
deleted file mode 100755
index 550a64ff4..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/initialize_netopeer.sh
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-cat > /etc/apt/apt.conf << EOF
-Acquire::http {
- No-Cache "true";
- No-Store "true";
- Pipeline-Depth "0";
-};
-EOF
-
-NETOPEER_CHANGE_SAVER=netopeer-change-saver
-
-cp /tls/* /usr/local/etc/keystored/keys/
-cp /netconf/*.xml /tmp/
-
-chmod +x /netconf/set-up-xmls.py
-/netconf/set-up-xmls.py /tls ca.crt server_cert.crt server_key.pem /tmp/load_server_certs.xml /tmp/tls_listen.xml
-
-/usr/bin/supervisord -c /etc/supervisord.conf &
-sysrepoctl --install --yang=/netconf/pnf-simulator.yang --owner=netconf:nogroup --permissions=777
-sysrepocfg --import=/netconf/pnf-simulator.data.xml --datastore=startup --format=xml --level=3 pnf-simulator
-sysrepocfg --merge=/tmp/load_server_certs.xml --format=xml --datastore=startup ietf-keystore
-sysrepocfg --merge=/tmp/tls_listen.xml --format=xml --datastore=startup ietf-netconf-server
-
-apt-get update
-apt-get install -y python3 python3-pip librdkafka-dev
-pip3 install flask flask_restful
-nohup python3 /netconf/yang_loader_server.py &
-
-cd /opt/dev/sysrepo && cmake .
-cd /opt/dev/sysrepo && make -j2
-
-/bin/cp -R /$NETOPEER_CHANGE_SAVER /opt/dev/
-cp /opt/dev/sysrepo/swig/libSysrepo-cpp.so /opt/dev/$NETOPEER_CHANGE_SAVER/
-ln -s /opt/dev/sysrepo/build/src/libsysrepo.so /opt/dev/$NETOPEER_CHANGE_SAVER/libsysrepo.so
-
-cd /opt/dev/$NETOPEER_CHANGE_SAVER && cmake .
-cd /opt/dev/$NETOPEER_CHANGE_SAVER && make
-/opt/dev/$NETOPEER_CHANGE_SAVER/bin/netopeer-change-saver pnf-simulator kafka1 config
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/load_server_certs.xml b/test/mocks/pnfsimulator/netconfsimulator/netconf/load_server_certs.xml
deleted file mode 100644
index 2524e08b0..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/load_server_certs.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<keystore xmlns="urn:ietf:params:xml:ns:yang:ietf-keystore">
- <private-keys>
- <private-key>
- <name>SERVER_KEY_NAME</name>
- <certificate-chains>
- <certificate-chain>
- <name>SERVER_CERT_NAME</name>
- <certificate>SERVER_CERTIFICATE_HERE</certificate>
- </certificate-chain>
- </certificate-chains>
- </private-key>
- </private-keys>
- <trusted-certificates>
- <name>test_trusted_ca_list</name>
- <trusted-certificate>
- <name>CA_CERT_NAME</name>
- <certificate>CA_CERTIFICATE_HERE</certificate>
- </trusted-certificate>
- </trusted-certificates>
-</keystore>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.xml b/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.xml
deleted file mode 100644
index 90a3451d4..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config2 xmlns="http://onap.org/pnf-simulator2">
- <item1>500</item1>
- <item2>1000</item2>
-</config2>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.yang b/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.yang
deleted file mode 100644
index 544f46725..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/newmodel.yang
+++ /dev/null
@@ -1,9 +0,0 @@
-module newmodel {
- namespace "http://onap.org/pnf-simulator2";
- prefix config2;
- container config2 {
- config true;
- leaf item1 {type uint32;}
- leaf item2 {type uint32;}
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.data.xml b/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.data.xml
deleted file mode 100644
index c235f6405..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.data.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config xmlns="http://onap.org/pnf-simulator">
- <itemValue1>42</itemValue1>
- <itemValue2>35</itemValue2>
-</config>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.yang b/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.yang
deleted file mode 100644
index ba1158560..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/pnf-simulator.yang
+++ /dev/null
@@ -1,9 +0,0 @@
-module pnf-simulator {
- namespace "http://onap.org/pnf-simulator";
- prefix config;
- container config {
- config true;
- leaf itemValue1 {type uint32;}
- leaf itemValue2 {type uint32;}
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/set-up-xmls.py b/test/mocks/pnfsimulator/netconfsimulator/netconf/set-up-xmls.py
deleted file mode 100755
index d46ff91f9..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/set-up-xmls.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python
-
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-import os
-import sys
-import logging
-import logging.config
-
-logging.basicConfig()
-logger = logging.getLogger()
-logger.setLevel(logging.INFO)
-
-# Placeholders definition - this needs to match placeholders in
-# load_server_certs_xml_file and tls_listen_xml_file
-SERVER_KEY_NAME = "SERVER_KEY_NAME"
-SERVER_CERT_NAME = "SERVER_CERT_NAME"
-SERVER_CERTIFICATE_HERE = "SERVER_CERTIFICATE_HERE"
-CA_CERT_NAME = "CA_CERT_NAME"
-CA_CERTIFICATE_HERE = "CA_CERTIFICATE_HERE"
-CA_FINGERPRINT_HERE = "CA_FINGERPRINT_HERE"
-CA_FINGERPRINT_ENV = "CA_FINGERPRINT"
-SERVER_CERTIFICATE_ENV = "SERVER_CERTIFICATE_ENV"
-CA_CERTIFICATE_ENV = "CA_CERTIFICATE_ENV"
-
-
-class FileHelper(object):
- @classmethod
- def get_file_contents(cls, filename):
- with open(filename, "r") as f:
- return f.read()
-
- @classmethod
- def write_file_contents(cls, filename, data):
- with open(filename, "w+") as f:
- f.write(data)
-
-
-class CertHelper(object):
- @classmethod
- def get_pem_content_stripped(cls, pem_dir, pem_filename):
- cmd = "cat {}/{} | grep -v '^-'".format(pem_dir, pem_filename)
- content = CertHelper.system(cmd)
- return content
-
- @classmethod
- def get_cert_fingerprint(cls, directory, cert_filename):
- cmd = "openssl x509 -fingerprint -noout -in {}/{} | sed -e " \
- "'s/SHA1 Fingerprint//; s/=//; s/=//p'" \
- .format(directory, cert_filename)
- fingerprint = CertHelper.system(cmd)
- return fingerprint
-
- @classmethod
- def print_certs_info(cls, ca_cert, ca_fingerprint, server_cert):
- logger.info("Will use server certificate: " + server_cert)
- logger.info("Will use CA certificate: " + ca_cert)
- logger.info("CA certificate fingerprint: " + ca_fingerprint)
-
- @classmethod
- def system(cls, cmd):
- return os.popen(cmd).read().replace("\n", "")
-
-
-class App(object):
- @classmethod
- def patch_server_certs(cls, data, server_key_filename_noext,
- server_cert_filename_noext, ca_cert_filename_noext,
- server_cert, ca_cert):
- data = data.replace(SERVER_KEY_NAME, server_key_filename_noext)
- data = data.replace(SERVER_CERT_NAME, server_cert_filename_noext)
- data = data.replace(CA_CERT_NAME, ca_cert_filename_noext)
- data = data.replace(SERVER_CERTIFICATE_HERE, server_cert)
- data = data.replace(CA_CERTIFICATE_HERE, ca_cert)
- return data
-
- @classmethod
- def patch_tls_listen(cls, data, server_cert_filename_noext, ca_fingerprint,
- server_cert, ca_cert):
- data = data.replace(SERVER_CERT_NAME, server_cert_filename_noext)
- data = data.replace(CA_FINGERPRINT_HERE, ca_fingerprint)
- data = data.replace(SERVER_CERTIFICATE_HERE, server_cert)
- data = data.replace(CA_CERTIFICATE_HERE, ca_cert)
- return data
-
- @classmethod
- def run(cls):
- # name things
- cert_dir = sys.argv[1]
- ca_cert_filename = sys.argv[2]
- server_cert_filename = sys.argv[3]
- server_key_filename = sys.argv[4]
- load_server_certs_xml_file = sys.argv[5]
- tls_listen_xml_file = sys.argv[6]
-
- # strip extensions
- ca_cert_filename_noext = ca_cert_filename.replace(".crt", "")
- server_cert_filename_noext = server_cert_filename.replace(".crt", "")
- server_key_filename_noext = server_key_filename.replace(".pem", "")
-
- # get certificates from files
- server_cert = CertHelper.get_pem_content_stripped(cert_dir,
- server_cert_filename)
- ca_cert = CertHelper.get_pem_content_stripped(cert_dir,
- ca_cert_filename)
- ca_fingerprint = CertHelper.get_cert_fingerprint(cert_dir,
- ca_cert_filename)
- CertHelper.print_certs_info(ca_cert, ca_fingerprint, server_cert)
-
- # patch TLS configuration files
- data_srv = FileHelper.get_file_contents(load_server_certs_xml_file)
- patched_srv = App.patch_server_certs(data_srv, server_key_filename_noext,
- server_cert_filename_noext,
- ca_cert_filename_noext,
- server_cert, ca_cert)
- FileHelper.write_file_contents(load_server_certs_xml_file, patched_srv)
-
- data_tls = FileHelper.get_file_contents(tls_listen_xml_file)
- patched_tls = App.patch_tls_listen(data_tls, server_cert_filename_noext,
- ca_fingerprint, server_cert, ca_cert)
- FileHelper.write_file_contents(tls_listen_xml_file, patched_tls)
-
-
-def main():
- if len(sys.argv) is not 7:
- print("Usage: {1} <cert_dir> <ca_cert_filename> <server_cert_filename> "
- "<server_key_filename> <load_server_certs_xml_full_path> "
- "<tls_listen_full_path>", sys.argv[0])
- return 1
- App.run()
- logger.info("XML files patched successfully")
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/test_yang_loader_server.py b/test/mocks/pnfsimulator/netconfsimulator/netconf/test_yang_loader_server.py
deleted file mode 100644
index f282517b2..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/test_yang_loader_server.py
+++ /dev/null
@@ -1,121 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-import unittest
-
-from unittest import mock
-from werkzeug.datastructures import FileStorage
-
-from yang_loader_server import YangLoaderHelper, YangModelServer
-
-
-class TestYangLoaderHelper(unittest.TestCase):
-
- def test_should_save_file_and_return_path(self):
- helper = YangLoaderHelper()
- mocked_file = mock.Mock(FileStorage)
- mocked_file.filename = "sample"
-
- path = helper.save_file(mocked_file)
-
- self.assertEqual(path, "/tmp/sample")
- mocked_file.save.assert_called_once_with("/tmp/sample")
-
- @mock.patch('yang_loader_server.check_output')
- def test_should_install_new_yang_model(self, mocked_output):
- helper = YangLoaderHelper()
-
- helper.install_new_model("path")
-
- mocked_output.assert_called_with(
- ['sysrepoctl', '--install', '--yang=path',
- '--owner=netconf:nogroup', '--permissions=777'],
- stderr=-2, universal_newlines=True)
-
- @mock.patch('yang_loader_server.check_output')
- def test_should_delete_yang_model(self, mocked_output):
- helper = YangLoaderHelper()
-
- helper.uninstall_a_model("modelName")
-
- mocked_output.assert_called_with(
- ['sysrepoctl', '--uninstall', '--module=modelName'],
- stderr=-2, universal_newlines=True)
-
- @mock.patch('yang_loader_server.check_output')
- def test_should_set_default_configuration(self, mocked_output):
- helper = YangLoaderHelper()
-
- helper.set_default_configuration("samplePath", "sampleModuleName")
-
- mocked_output.assert_called_with(
- ['sysrepocfg', '--import=samplePath', '--datastore=startup',
- '--format=xml', '--level=3', 'sampleModuleName'],
- stderr=-2, universal_newlines=True)
-
- @mock.patch('yang_loader_server.subprocess.Popen')
- @mock.patch('yang_loader_server.check_output')
- def test_should_verify_change_listener_for_model_properly(self, mocked_output, mocked_popen):
- helper = YangLoaderHelper()
-
- helper.start_change_listener_for_model("sampleModule")
-
- mocked_output.assert_called_with(
- ['pgrep', '-f', '/opt/dev/netopeer-change-saver/bin/netopeer-change-saver sampleModule kafka1 config'],
- stderr=-2, universal_newlines=True)
-
- @mock.patch('yang_loader_server.check_output')
- def test_should_raise_exception_when_error_occurred_in_output(self,
- mocked_output):
- helper = YangLoaderHelper()
- mocked_output.return_value = "abcd ERR"
- with self.assertRaises(RuntimeError) as context:
- helper._run_bash_command("sample command")
-
- self.assertEqual('abcd ERR', str(context.exception))
-
-
-class TestYangModelServer(unittest.TestCase):
-
- def __init__(self, methodName='runTest'):
- super().__init__(methodName)
- self._mocked_file = mock.Mock(FileStorage)
-
- def test_should_properly_apply_and_start_new_model(self):
- with mock.patch.object(YangModelServer, '_parse_request',
- new=self._mock_request):
- helper = mock.Mock(YangLoaderHelper)
- helper.save_file.return_value = "sampleFile"
- server = YangModelServer(helper)
-
- server.post()
-
- self.assertEqual(helper.save_file.call_count, 2)
- helper.install_new_model.assert_called_once_with('sampleFile')
- helper.set_default_configuration.assert_called_once_with(
- 'sampleFile', 'sampleModuleName')
- helper.start_change_listener_for_model.assert_called_once_with('sampleModuleName')
-
- def _mock_request(self):
- return {
- 'yangModel': self._mocked_file,
- 'initialConfig': self._mocked_file,
- 'moduleName': "sampleModuleName"
- }
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/tls_listen.xml b/test/mocks/pnfsimulator/netconfsimulator/netconf/tls_listen.xml
deleted file mode 100644
index 4f45b28a2..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/tls_listen.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<netconf-server xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-server">
- <listen>
- <endpoint>
- <name>test_tls_listen_endpt</name>
- <tls>
- <address>0.0.0.0</address>
- <port>6513</port>
- <certificates>
- <certificate>
- <name>SERVER_CERT_NAME</name>
- </certificate>
- </certificates>
- <client-auth>
- <trusted-ca-certs>test_trusted_ca_list</trusted-ca-certs>
- <cert-maps>
- <cert-to-name>
- <id>1</id>
- <!-- This is not a typo - 0x02 should stay there -->
- <fingerprint>02:CA_FINGERPRINT_HERE</fingerprint>
- <map-type xmlns:x509c2n="urn:ietf:params:xml:ns:yang:ietf-x509-cert-to-name">x509c2n:specified</map-type>
- <name>test</name>
- </cert-to-name>
- </cert-maps>
- </client-auth>
- </tls>
- </endpoint>
- </listen>
-</netconf-server>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netconf/yang_loader_server.py b/test/mocks/pnfsimulator/netconfsimulator/netconf/yang_loader_server.py
deleted file mode 100644
index 716d0712e..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netconf/yang_loader_server.py
+++ /dev/null
@@ -1,172 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-import logging
-import subprocess
-import os
-from subprocess import check_output, CalledProcessError
-from flask import Flask
-from flask_restful import Resource, Api, reqparse
-from werkzeug.datastructures import FileStorage
-import time
-
-app = Flask(__name__)
-api = Api(app)
-logger = logging.getLogger("yang-loader")
-logger.addHandler(logging.StreamHandler())
-KAFKA_BROKER_NAME="kafka1"
-KAFKA_TOPIC_NAME="config"
-
-
-class YangLoaderHelper(object):
-
- @classmethod
- def save_file(cls, yang_model_file: FileStorage) -> str:
- path = "/tmp/" + yang_model_file.filename
- yang_model_file.save(path)
- return path
-
- @classmethod
- def install_new_model(cls, yang_model_path: str):
- logger.info("Installing new model: %s", yang_model_path)
- command = "sysrepoctl --install --yang={} --owner=netconf:nogroup --permissions=777" \
- .format(yang_model_path)
- cls._run_bash_command(command)
-
- @classmethod
- def uninstall_a_model(cls, yang_model_name: str):
- logger.info("Uninstalling a model: %s", yang_model_name)
- command = "sysrepoctl --uninstall --module={}" \
- .format(yang_model_name)
- cls._run_bash_command(command)
-
-
- @classmethod
- def set_default_configuration(cls, init_conf_path: str, module_name: str):
- logger.info("Attempting to set default configuration %s for module %s", init_conf_path, module_name)
- command = "sysrepocfg --import={} --datastore=startup --format=xml --level=3 {}" \
- .format(init_conf_path, module_name)
- cls._run_bash_command(command)
-
- @classmethod
- def start_change_listener_for_model(cls, module_name: str):
- logger.info("Starting listener for model: %s", module_name)
- command = "/opt/dev/netopeer-change-saver/bin/netopeer-change-saver {} {} {}" \
- .format(module_name, KAFKA_BROKER_NAME, KAFKA_TOPIC_NAME)
- try:
- check_output(["pgrep", "-f" , command], stderr=subprocess.STDOUT, universal_newlines=True)
- logger.info("Change listener for {} already exist.".format(module_name))
- except CalledProcessError:
- subprocess.Popen(command.split(), stdout=subprocess.PIPE)
-
- @classmethod
- def stop_change_listener_for_model(cls, model_name):
- logger.info("Stopping listener for model %s", model_name)
- pid = cls.get_pid_by_name(model_name)
- logger.info("pid is %s", pid)
- command = "kill -2 {}".format(pid)
- cls._run_bash_command(command)
-
- @classmethod
- def _run_bash_command(cls, command: str):
- try:
- logger.info("Attempts to invoke %s", command)
- output = check_output(command.split(), stderr=subprocess.STDOUT,
- universal_newlines=True)
- logger.info("Output: %s", output)
- if "ERR" in output:
- raise RuntimeError(str(output))
- except subprocess.CalledProcessError as e:
- raise RuntimeError(e, str(e.stdout))
-
- @classmethod
- def get_pid_by_name(cls, name):
- for dirname in os.listdir('/proc'):
- if not dirname.isdigit():
- continue
- try:
- with open('/proc/{}/cmdline'.format(dirname), mode='rb') as fd:
- content = fd.read().decode().split('\x00')
- except Exception as e:
- print(e)
- continue
-
- if name in content:
- return dirname
-
-
-class YangModelServer(Resource):
- logger = logging.getLogger('YangModelServer')
-
- def __init__(self, yang_loader_helper: YangLoaderHelper = YangLoaderHelper()):
- self._yang_loader_helper = yang_loader_helper
-
- def post(self):
- args = self._parse_request()
- yang_model_file = args['yangModel']
- initial_config_file = args['initialConfig']
- module_name = args['moduleName']
- model_path = self._yang_loader_helper.save_file(yang_model_file)
- conf_path = self._yang_loader_helper.save_file(initial_config_file)
-
- try:
- self._yang_loader_helper.install_new_model(model_path)
- self._yang_loader_helper.set_default_configuration(conf_path,
- module_name)
- self._yang_loader_helper.start_change_listener_for_model(module_name)
- except RuntimeError as e:
- self.logger.error(e.args, exc_info=True)
- return str(e.args), 400
- return "Successfully started"
-
- def delete(self):
- args = self._parse_request()
- yang_model_name = args['yangModelName']
-
- try:
- self._yang_loader_helper.stop_change_listener_for_model(yang_model_name)
- time.sleep(5)
- self._yang_loader_helper.uninstall_a_model(yang_model_name)
- except RuntimeError as e:
- self.logger.error(e.args, exc_info=True)
- return str(e.args), 400
- return "Successfully deleted"
-
- @classmethod
- def _parse_request(cls) -> reqparse.Namespace:
- parse = reqparse.RequestParser()
- parse.add_argument('yangModel',
- type=FileStorage,
- location='files')
- parse.add_argument('initialConfig',
- type=FileStorage,
- location='files')
- parse.add_argument('moduleName', type=str)
- parse.add_argument('yangModelName', type=str)
- return parse.parse_args()
-
-
-api.add_resource(YangModelServer, '/model')
-
-if __name__ == '__main__':
- logging.basicConfig(filename=os.path.dirname(__file__) + "/yang_loader.log",
- filemode="w",
- level=logging.DEBUG)
- app.run(host='0.0.0.0', port='5002')
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.cpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.cpp
deleted file mode 100644
index 56c33f0de..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#include "Application.h"
-#include <cstdio>
-#include <unistd.h>
-#include "sysrepo/Session.hpp"
-#include "SysrepoCallback.h"
-
-Application::~Application() {
- this->subscriber->unsubscribe();
- this->session->session_stop();
- sr_disconnect(this->connection->_conn);
- std::cout << "Application closed correctly " << std::endl;
-}
-
-void Application::run() {
- /*create kafka wrapper object*/
- auto kafkaWrapper = std::make_shared<KafkaWrapper>(this->brokers,this->topic_name);
-
- std::cout << "Application will watch for changes in " << module_name << std::endl;
- /* connect to sysrepo */
- this->connection = new sysrepo::Connection("example_application");
- sysrepo::S_Connection conn(new sysrepo::Connection("example_application"));
-
- /* start session */
- sysrepo::S_Session sess(new sysrepo::Session(conn));
-
- this->session = sess;
- /* subscribe for changes in running config */
- sysrepo::S_Subscribe subscribe(new sysrepo::Subscribe(sess));
- std::shared_ptr<SysrepoCallback> cb(new SysrepoCallback(kafkaWrapper));
-
- subscribe->module_change_subscribe(module_name, cb);
- this->subscriber = subscribe;
-
- /* read startup config */
- std::cout << "\n ========== READING STARTUP CONFIG: ==========\n" << std::endl;
-
- cb->print_current_config(sess, module_name);
-
- std::cout << "\n ========== STARTUP CONFIG APPLIED AS RUNNING ==========\n" << std::endl;
-
- /* loop until ctrl-c is pressed / SIGINT is received */
- while (!exit_application) {
- sleep(1000); /* or do some more useful work... */
- }
-
- std::cout << "Application exit requested, exiting." << std::endl;
-
-}
-
-Application::Application(const char *module_name, const char *brokers, const char *topic_name) {
- this->module_name = module_name;
- this->brokers = brokers;
- this->topic_name = topic_name;
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.h
deleted file mode 100644
index c41f7e28b..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/Application.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#ifndef NETOPEER_CHANGE_SAVER_CPP_APPLICATION_H
-#define NETOPEER_CHANGE_SAVER_CPP_APPLICATION_H
-#include "sysrepo/Sysrepo.hpp"
-
-extern volatile int exit_application;
-
-class Application {
-private:
- const char *module_name;
- const char *brokers;
- const char *topic_name;
- sysrepo::S_Session session;
- sysrepo::S_Subscribe subscriber;
- sysrepo::Connection *connection;
-
-public:
- Application(const char *module_name, const char *brokers, const char *topic_name);
- ~Application();
- void run();
-
-};
-
-#endif //NETOPEER_CHANGE_SAVER_CPP_APPLICATION_H
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/CMakeLists.txt b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/CMakeLists.txt
deleted file mode 100755
index f21576d00..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/CMakeLists.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-cmake_minimum_required(VERSION 3.7)
-project(netopeer-change-saver)
-
-set(CMAKE_CXX_STANDARD 11)
-set(THREADS_PREFER_PTHREAD_FLAG ON)
-set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
-set (CMAKE_EXE_LINKER_FLAGS "-Wl,--unresolved-symbols=ignore-all")
-link_directories(.)
-
-find_package(Threads REQUIRED)
-
-add_executable(netopeer-change-saver main.cpp sysrepo.h Application.cpp Application.h KafkaWrapper.cpp KafkaWrapper.h
- SysrepoCallback.cpp SysrepoCallback.h)
-file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/libSysrepo-cpp.so
- DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/bin)
-
-target_link_libraries(netopeer-change-saver libSysrepo-cpp.so)
-target_link_libraries(netopeer-change-saver libsysrepo.so)
-target_link_libraries(netopeer-change-saver Threads::Threads)
-target_link_libraries(netopeer-change-saver librdkafka.so) \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.cpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.cpp
deleted file mode 100644
index cd018a33f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.cpp
+++ /dev/null
@@ -1,105 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#include "KafkaWrapper.h"
-#include <cstdlib>
-#include <cinttypes>
-#include <iostream>
-
-extern "C" {
- rd_kafka_resp_err_t rd_kafka_last_error (void);
- rd_kafka_resp_err_t rd_kafka_flush (rd_kafka_t *rk, int timeout_ms);
-}
-
-extern "C" {
-void kafka_delivery_report_callback(rd_kafka_t *rk, const rd_kafka_message_t *rkmessage, void *opaque) {
-#ifdef DEBUG
- if (rkmessage->err)
- std::cout<<"%% Message delivery failed: %s\n"<<rd_kafka_err2str(rkmessage->err)<<std::endl;
- else
- std::cout<<
- "%% Message delivered ("<<rkmessage->len <<" bytes, partition " << rkmessage->partition <<")" << std::endl;
- /* The rkmessage is destroyed automatically by librdkafka */
-#endif
-}
-}
-
-KafkaWrapper::KafkaWrapper(const char *brokers, const char *topic_name) {
- this->brokers = brokers;
- this->topic_name = topic_name;
-
- init();
-}
-
-KafkaWrapper::~KafkaWrapper() {
- std::cerr<<"%% Flushing final messages..."<<std::endl;
- rd_kafka_flush(rk, 10 * 1000);
- rd_kafka_destroy(rk);
-}
-
-void KafkaWrapper::init() {
- /*Kafka stuff*/
- conf = rd_kafka_conf_new();
- if (rd_kafka_conf_set(conf, "bootstrap.servers", brokers, errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) {
- perror(errstr);
- exit(1);
- }
-
- rd_kafka_conf_set_dr_msg_cb(conf, kafka_delivery_report_callback);
- rk = rd_kafka_new(RD_KAFKA_PRODUCER, conf, errstr, sizeof(errstr));
- if (!rk) {
- std::cerr<<"%% Failed to create new producer: %s\n"<<errstr<<std::endl;
- exit(1);
- }
-
- rkt = rd_kafka_topic_new(rk, topic_name, nullptr);
- if (!rkt) {
- std::cerr<<"%% Failed to create topic object: %s\n"<<
- rd_kafka_err2str(rd_kafka_last_error())<<std::endl;
- rd_kafka_destroy(rk);
- exit(1);
- }
-}
-
-void KafkaWrapper::kafka_send_message(std::string message) {
- size_t len = message.length();
- int retry = 1;
- while (retry) {
-#ifdef DEBUG
- std::cout<<"Sending the message to topic...\n"<<std::endl;
-#endif
- if (rd_kafka_produce(rkt, RD_KAFKA_PARTITION_UA, RD_KAFKA_MSG_F_COPY, (void *) message.c_str(), len, nullptr, 0,
- nullptr)) {
- retry = 1;
- rd_kafka_resp_err_t last_error = rd_kafka_last_error();
- std::cerr<<"%% Failed to produce to topic %s: %s\n"<<topic_name<<rd_kafka_err2str(last_error)<<std::endl;
- if (last_error == RD_KAFKA_RESP_ERR__QUEUE_FULL) {
- rd_kafka_poll(rk, 1000);
- } else {
- std::cerr<<"%% Enqueued message (%zd bytes) for topic %s\n"<<len<<topic_name<<std::endl;
- }
- } else {
- retry = 0;
- }
- }
- rd_kafka_poll(rk, 0/*non-blocking*/);
-}
-
-
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.h
deleted file mode 100644
index 804afa758..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/KafkaWrapper.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#ifndef NETOPEER_CHANGE_SAVER_CPP_KAFKAWRAPPER_H
-#define NETOPEER_CHANGE_SAVER_CPP_KAFKAWRAPPER_H
-#include "librdkafka/rdkafka.h"
-#include <string>
-
-class KafkaWrapper {
-private:
- char errstr[512];
- const char *brokers;
- const char *topic_name;
- rd_kafka_t *rk;
- rd_kafka_topic_t *rkt;
- rd_kafka_conf_t *conf;
-
- void init();
-
-public:
- KafkaWrapper(const char *brokers, const char *topic_name);
- ~KafkaWrapper();
- void kafka_send_message(std::string message);
-};
-
-
-#endif //NETOPEER_CHANGE_SAVER_CPP_KAFKAWRAPPER_H
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.cpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.cpp
deleted file mode 100644
index 225fe03a4..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.cpp
+++ /dev/null
@@ -1,108 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#include "SysrepoCallback.h"
-#define CREATED "CREATED"
-#define DELETED "DELETED"
-#define MODIFIED "MODIFIED"
-#define MOVED "MOVED"
-#define XPATH_MAX_LEN 100
-
-
-int SysrepoCallback::module_change(sysrepo::S_Session sess, const char *module_name, sr_notif_event_t event, void *private_ctx) {
- {
- if (event == SR_EV_APPLY) {
- char change_path[XPATH_MAX_LEN];
-
- try {
-#ifdef DEBUG
- std::cout << "\n ========== CHANGES: =============================================\n" << std::endl;
-#endif
- snprintf(change_path, XPATH_MAX_LEN, "/%s:*", module_name);
- auto it = sess->get_changes_iter(&change_path[0]);
- while (auto change = sess->get_change_next(it)) {
- std::string message = create_message(change);
- std::cout<<message<<std::endl;
- kafkaWrapper->kafka_send_message(message);
- }
-#ifdef DEBUG
- std::cout << "\n ========== END OF CHANGES =======================================\n" << std::endl;
-#endif
- } catch( const std::exception& e ) {
- std::cerr << e.what() << std::endl;
- }
- }
- return SR_ERR_OK;
- }
-}
-
-SysrepoCallback::SysrepoCallback(std::shared_ptr<KafkaWrapper> wrapper) {
- this->kafkaWrapper = wrapper;
-}
-
-std::string SysrepoCallback::create_message(sysrepo::S_Change change) {
- std::string change_details;
- sysrepo::S_Val new_val = change->new_val();
- sysrepo::S_Val old_val = change->old_val();
-
- switch (change->oper()) {
- case SR_OP_CREATED:
- if (nullptr != new_val) {
- change_details.append(CREATED).append(": ").append(new_val->to_string());
- }
- break;
- case SR_OP_DELETED:
- if (nullptr != old_val) {
- change_details.append(DELETED).append(": ").append(old_val->to_string());
- }
- break;
- case SR_OP_MODIFIED:
- if (nullptr != old_val && nullptr != new_val) {
- change_details.append(MODIFIED).append(": ").append(": old value: ").append(old_val->to_string())
- .append(", new value: ").append(new_val->to_string());
- }
- break;
- case SR_OP_MOVED:
- if (nullptr != old_val && nullptr != new_val) {
- change_details.append(MOVED).append(": ").append(new_val->to_string())
- .append(" after ").append(old_val->to_string());
- } else if (nullptr != new_val) {
- change_details.append(MOVED).append(": ").append(new_val->xpath()).append(" last");
- }
- break;
- }
- return change_details;
-}
-
-void SysrepoCallback::print_current_config(sysrepo::S_Session session, const char *module_name) {
- char select_xpath[XPATH_MAX_LEN];
- try {
- snprintf(select_xpath, XPATH_MAX_LEN, "/%s:*//*", module_name);
-
- auto values = session->get_items(&select_xpath[0]);
- if (values == nullptr)
- return;
-
- for(unsigned int i = 0; i < values->val_cnt(); i++)
- std::cout << values->val(i)->to_string();
- } catch( const std::exception& e ) {
- std::cout << e.what() << std::endl;
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.h
deleted file mode 100644
index 7d2cd7221..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/SysrepoCallback.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#ifndef NETOPEER_CHANGE_SAVER_CPP_SYSREPOCALLBACK_H
-#define NETOPEER_CHANGE_SAVER_CPP_SYSREPOCALLBACK_H
-#include "KafkaWrapper.h"
-#include "sysrepo/Session.hpp"
-#include <memory>
-
-class SysrepoCallback: public sysrepo::Callback {
-private:
- std::shared_ptr<KafkaWrapper> kafkaWrapper;
-
-public:
- explicit SysrepoCallback(std::shared_ptr<KafkaWrapper> wrapper);
- void print_current_config(sysrepo::S_Session session, const char *module_name);
-
-private:
- std::string create_message(sysrepo::S_Change change);
- int module_change(sysrepo::S_Session sess, const char *module_name, sr_notif_event_t event, void *private_ctx);
-
-};
-
-
-#endif //NETOPEER_CHANGE_SAVER_CPP_SYSREPOCALLBACK_H
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/main.cpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/main.cpp
deleted file mode 100644
index 0329f0552..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/main.cpp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-#include <iostream>
-#include <csignal>
-#include "Application.h"
-
-volatile int exit_application = 0;
-
-void sigint_handler(int signum) {
- std::cout << "Interrupt signal (" << signum << ") received." << std::endl;
- exit_application = 1;
-}
-
-int main(int argc, char *argv[]) {
- if (argc != 4) {
- std::cerr<<"Usage: "<<argv[0]<<" <module_name> <broker> <topic> "<<std::endl;
- return 1;
- }
-
- signal(SIGINT, sigint_handler);
-
- const char *module_name = argv[1];
- const char *brokers = argv[2];
- const char *topic_name = argv[3];
-
- Application application(module_name, brokers, topic_name);
- application.run();
-
- return 0;
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo.h
deleted file mode 100644
index 9d541d1c0..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo.h
+++ /dev/null
@@ -1,2015 +0,0 @@
-/**
- * @file sysrepo.h
- * @author Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>
- * @brief Sysrepo Client Library public API.
- *
- * @copyright
- * Copyright 2015 Cisco Systems, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_H_
-#define SYSREPO_H_
-
-/**
- * @defgroup cl Client Library
- * @{
- *
- * @brief Provides the public API towards applications using sysrepo to store
- * their configuration data, or towards management agents.
- *
- * Communicates with Sysrepo Engine (@ref cm), which is running either inside
- * of dedicated sysrepo daemon, or within this library if daemon is not alive.
- *
- * Access to the sysrepo datastore is connection- and session- oriented. Before
- * calling any data access/manipulation API, one needs to connect to the datastore
- * via ::sr_connect and open a session via ::sr_session_start. One connection
- * can serve multiple sessions.
- *
- * Each data access/manipulation request call is blocking - blocks the connection
- * until the response from Sysrepo Engine comes, or until an error occurs. It is
- * safe to call multiple requests on the same session (or different session that
- * belongs to the same connection) from multiple threads at the same time,
- * however it is not effective, since each call is blocked until previous one
- * finishes. If you need fast multi-threaded access to sysrepo, use a dedicated
- * connection for each thread.
- *
- * @see
- * See @ref main_page "Sysrepo Introduction" for details about sysrepo architecture.
- * @see
- * @ref xp_page "XPath Addressing" is used for node identification in data-related calls.
- */
-
-#include <stdbool.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <time.h>
-#ifdef __APPLE__
- #include <sys/types.h>
-#endif
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-////////////////////////////////////////////////////////////////////////////////
-// Common typedefs and API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Sysrepo connection context used to identify a connection to sysrepo datastore.
- */
-typedef struct sr_conn_ctx_s sr_conn_ctx_t;
-
-/**
- * @brief Sysrepo session context used to identify a configuration session.
- */
-typedef struct sr_session_ctx_s sr_session_ctx_t;
-
-/**
- * @brief Memory context used for efficient memory management for values, trees and GPB messages.
- */
-typedef struct sr_mem_ctx_s sr_mem_ctx_t;
-
-/**
- * @brief Possible types of an data element stored in the sysrepo datastore.
- */
-typedef enum sr_type_e {
- /* special types that does not contain any data */
- SR_UNKNOWN_T, /**< Element unknown to sysrepo (unsupported element). */
- SR_TREE_ITERATOR_T, /**< Special type of tree node used to store all data needed for iterative tree loading. */
-
- SR_LIST_T, /**< List instance. ([RFC 6020 sec 7.8](http://tools.ietf.org/html/rfc6020#section-7.8)) */
- SR_CONTAINER_T, /**< Non-presence container. ([RFC 6020 sec 7.5](http://tools.ietf.org/html/rfc6020#section-7.5)) */
- SR_CONTAINER_PRESENCE_T, /**< Presence container. ([RFC 6020 sec 7.5.1](http://tools.ietf.org/html/rfc6020#section-7.5.1)) */
- SR_LEAF_EMPTY_T, /**< A leaf that does not hold any value ([RFC 6020 sec 9.11](http://tools.ietf.org/html/rfc6020#section-9.11)) */
- SR_NOTIFICATION_T, /**< Notification instance ([RFC 7095 sec 7.16](https://tools.ietf.org/html/rfc7950#section-7.16)) */
-
- /* types containing some data */
- SR_BINARY_T, /**< Base64-encoded binary data ([RFC 6020 sec 9.8](http://tools.ietf.org/html/rfc6020#section-9.8)) */
- SR_BITS_T, /**< A set of bits or flags ([RFC 6020 sec 9.7](http://tools.ietf.org/html/rfc6020#section-9.7)) */
- SR_BOOL_T, /**< A boolean value ([RFC 6020 sec 9.5](http://tools.ietf.org/html/rfc6020#section-9.5)) */
- SR_DECIMAL64_T, /**< 64-bit signed decimal number ([RFC 6020 sec 9.3](http://tools.ietf.org/html/rfc6020#section-9.3)) */
- SR_ENUM_T, /**< A string from enumerated strings list ([RFC 6020 sec 9.6](http://tools.ietf.org/html/rfc6020#section-9.6)) */
- SR_IDENTITYREF_T, /**< A reference to an abstract identity ([RFC 6020 sec 9.10](http://tools.ietf.org/html/rfc6020#section-9.10)) */
- SR_INSTANCEID_T, /**< References a data tree node ([RFC 6020 sec 9.13](http://tools.ietf.org/html/rfc6020#section-9.13)) */
- SR_INT8_T, /**< 8-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_INT16_T, /**< 16-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_INT32_T, /**< 32-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_INT64_T, /**< 64-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_STRING_T, /**< Human-readable string ([RFC 6020 sec 9.4](http://tools.ietf.org/html/rfc6020#section-9.4)) */
- SR_UINT8_T, /**< 8-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_UINT16_T, /**< 16-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_UINT32_T, /**< 32-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_UINT64_T, /**< 64-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- SR_ANYXML_T, /**< Unknown chunk of XML ([RFC 6020 sec 7.10](https://tools.ietf.org/html/rfc6020#section-7.10)) */
- SR_ANYDATA_T, /**< Unknown set of nodes, encoded in XML ([RFC 7950 sec 7.10](https://tools.ietf.org/html/rfc7950#section-7.10)) */
-} sr_type_t;
-
-/**
- * @brief Data of an element (if applicable), properly set according to the type.
- */
-typedef union sr_data_u {
- char *binary_val; /**< Base64-encoded binary data ([RFC 6020 sec 9.8](http://tools.ietf.org/html/rfc6020#section-9.8)) */
- char *bits_val; /**< A set of bits or flags ([RFC 6020 sec 9.7](http://tools.ietf.org/html/rfc6020#section-9.7)) */
- bool bool_val; /**< A boolean value ([RFC 6020 sec 9.5](http://tools.ietf.org/html/rfc6020#section-9.5)) */
- double decimal64_val; /**< 64-bit signed decimal number ([RFC 6020 sec 9.3](http://tools.ietf.org/html/rfc6020#section-9.3)) */
- char *enum_val; /**< A string from enumerated strings list ([RFC 6020 sec 9.6](http://tools.ietf.org/html/rfc6020#section-9.6)) */
- char *identityref_val; /**< A reference to an abstract identity ([RFC 6020 sec 9.10](http://tools.ietf.org/html/rfc6020#section-9.10)) */
- char *instanceid_val; /**< References a data tree node ([RFC 6020 sec 9.13](http://tools.ietf.org/html/rfc6020#section-9.13)) */
- int8_t int8_val; /**< 8-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- int16_t int16_val; /**< 16-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- int32_t int32_val; /**< 32-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- int64_t int64_val; /**< 64-bit signed integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- char *string_val; /**< Human-readable string ([RFC 6020 sec 9.4](http://tools.ietf.org/html/rfc6020#section-9.4)) */
- uint8_t uint8_val; /**< 8-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- uint16_t uint16_val; /**< 16-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- uint32_t uint32_val; /**< 32-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- uint64_t uint64_val; /**< 64-bit unsigned integer ([RFC 6020 sec 9.2](http://tools.ietf.org/html/rfc6020#section-9.2)) */
- char *anyxml_val; /**< Unknown chunk of XML ([RFC 6020 sec 7.10](https://tools.ietf.org/html/rfc6020#section-7.10)) */
- char *anydata_val; /**< Unknown set of nodes, encoded in XML ([RFC 7950 sec 7.10](https://tools.ietf.org/html/rfc7950#section-7.10)) */
-} sr_data_t;
-
-/**
- * @brief Structure that contains value of an data element stored in the sysrepo datastore.
- */
-typedef struct sr_val_s {
- /**
- * Memory context used internally by Sysrepo for efficient storage
- * and conversion of this structure.
- */
- sr_mem_ctx_t *_sr_mem;
-
- /**
- * XPath identifier of the data element, as defined in
- * @ref xp_page "Path Addressing" documentation
- */
- char *xpath;
-
- /** Type of an element. */
- sr_type_t type;
-
- /**
- * Flag for node with default value (applicable only for leaves).
- * It is set to TRUE only if the value was *implicitly* set by the datastore as per
- * module schema. Explicitly set/modified data element (through the sysrepo API) always
- * has this flag unset regardless of the entered value.
- */
- bool dflt;
-
- /** Data of an element (if applicable), properly set according to the type. */
- sr_data_t data;
-
-} sr_val_t;
-
-/**
- * @brief A data element stored in the sysrepo datastore represented as a tree node.
- *
- * @note Can be safely casted to ::sr_val_t, only *xpath* member will point to node name rather
- * than to an actual xpath.
- */
-typedef struct sr_node_s {
- /**
- * Memory context used internally by Sysrepo for efficient storage
- * and conversion of this structure.
- */
- sr_mem_ctx_t *_sr_mem;
-
- /** Name of the node. */
- char *name;
-
- /** Type of an element. */
- sr_type_t type;
-
- /** Flag for default node (applicable only for leaves). */
- bool dflt;
-
- /** Data of an element (if applicable), properly set according to the type. */
- sr_data_t data;
-
- /**
- * Name of the module that defines scheme of this node.
- * NULL if it is the same as that of the predecessor.
- */
- char *module_name;
-
- /** Pointer to the parent node (NULL in case of root node). */
- struct sr_node_s *parent;
-
- /** Pointer to the next sibling node (NULL if there is no one). */
- struct sr_node_s *next;
-
- /** Pointer to the previous sibling node (NULL if there is no one). */
- struct sr_node_s *prev;
-
- /** Pointer to the first child node (NULL if this is a leaf). */
- struct sr_node_s *first_child;
-
- /** Pointer to the last child node (NULL if this is a leaf). */
- struct sr_node_s *last_child;
-} sr_node_t;
-
-/**
- * @brief Sysrepo error codes.
- */
-typedef enum sr_error_e {
- SR_ERR_OK = 0, /**< No error. */
- SR_ERR_INVAL_ARG, /**< Invalid argument. */
- SR_ERR_NOMEM, /**< Not enough memory. */
- SR_ERR_NOT_FOUND, /**< Item not found. */
- SR_ERR_INTERNAL, /**< Other internal error. */
- SR_ERR_INIT_FAILED, /**< Sysrepo infra initialization failed. */
- SR_ERR_IO, /**< Input/Output error. */
- SR_ERR_DISCONNECT, /**< The peer disconnected. */
- SR_ERR_MALFORMED_MSG, /**< Malformed message. */
- SR_ERR_UNSUPPORTED, /**< Unsupported operation requested. */
- SR_ERR_UNKNOWN_MODEL, /**< Request includes unknown schema */
- SR_ERR_BAD_ELEMENT, /**< Unknown element in existing schema */
- SR_ERR_VALIDATION_FAILED, /**< Validation of the changes failed. */
- SR_ERR_OPERATION_FAILED, /**< An operation failed. */
- SR_ERR_DATA_EXISTS, /**< Item already exists. */
- SR_ERR_DATA_MISSING, /**< Item does not exists. */
- SR_ERR_UNAUTHORIZED, /**< Operation not authorized. */
- SR_ERR_INVAL_USER, /**< Invalid username. */
- SR_ERR_LOCKED, /**< Requested resource is already locked. */
- SR_ERR_TIME_OUT, /**< Time out has expired. */
- SR_ERR_RESTART_NEEDED, /**< Sysrepo Engine restart is needed. */
- SR_ERR_VERSION_MISMATCH, /**< Incompatible client library used to communicate with sysrepo. */
-} sr_error_t;
-
-/**
- * @brief Detailed sysrepo error information.
- */
-typedef struct sr_error_info_s {
- const char *message; /**< Error message. */
- const char *xpath; /**< XPath to the node where the error has been discovered. */
-} sr_error_info_t;
-
-/**
- * @brief Returns the error message corresponding to the error code.
- *
- * @param[in] err_code Error code.
- *
- * @return Error message (statically allocated, do not free).
- */
-const char *sr_strerror(int err_code);
-
-/**
- * @brief Log levels used to determine if message of certain severity should be printed.
- */
-typedef enum {
- SR_LL_NONE, /**< Do not print any messages. */
- SR_LL_ERR, /**< Print only error messages. */
- SR_LL_WRN, /**< Print error and warning messages. */
- SR_LL_INF, /**< Besides errors and warnings, print some other informational messages. */
- SR_LL_DBG, /**< Print all messages including some development debug messages. */
-} sr_log_level_t;
-
-/**
- * @brief Enables / disables / changes log level (verbosity) of logging to
- * standard error output.
- *
- * By default, logging to stderr is disabled. Setting log level to any value
- * other than SR_LL_NONE enables the logging to stderr. Setting log level
- * back to SR_LL_NONE disables the logging to stderr.
- *
- * @param[in] log_level requested log level (verbosity).
- */
-void sr_log_stderr(sr_log_level_t log_level);
-
-/**
- * @brief Enables / disables / changes log level (verbosity) of logging to system log.
- *
- * By default, logging into syslog is disabled. Setting log level to any value
- * other than SR_LL_NONE enables the logging into syslog. Setting log level
- * back to SR_LL_NONE disables the logging into syslog.
- *
- * @note Please note that enabling logging into syslog will overwrite your syslog
- * connection settings (calls openlog), if you are connected to syslog already.
- *
- * @param[in] log_level requested log level (verbosity).
- */
-void sr_log_syslog(sr_log_level_t log_level);
-
-/**
- * @brief Sets callback that will be called when a log entry would be populated.
- *
- * @param[in] level Verbosity level of the log entry.
- * @param[in] message Message of the log entry.
- */
-typedef void (*sr_log_cb)(sr_log_level_t level, const char *message);
-
-/**
- * @brief Sets callback that will be called when a log entry would be populated.
- * Callback will be called for each message with any log level.
- *
- * @param[in] log_callback Callback to be called when a log entry would populated.
- */
-void sr_log_set_cb(sr_log_cb log_callback);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Connection / Session Management
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Flags used to override default connection handling by ::sr_connect call.
- */
-typedef enum sr_conn_flag_e {
- SR_CONN_DEFAULT = 0, /**< Default behavior - instantiate library-local Sysrepo Engine if
- the connection to sysrepo daemon is not possible. */
- SR_CONN_DAEMON_REQUIRED = 1, /**< Require daemon connection - do not instantiate library-local Sysrepo Engine
- if the library cannot connect to the sysrepo daemon (and return an error instead). */
- SR_CONN_DAEMON_START = 2, /**< If sysrepo daemon is not running, and SR_CONN_DAEMON_REQUIRED was specified,
- start it (only if the process calling ::sr_connect is running under root privileges). */
-} sr_conn_flag_t;
-
-/**
- * @brief Options overriding default connection handling by ::sr_connect call,
- * it is supposed to be bitwise OR-ed value of any ::sr_conn_flag_t flags.
- */
-typedef uint32_t sr_conn_options_t;
-
-/**
- * @brief Flags used to override default session handling (used by ::sr_session_start
- * and ::sr_session_start_user calls).
- */
-typedef enum sr_session_flag_e {
- SR_SESS_DEFAULT = 0, /**< Default (normal) session behavior. */
- SR_SESS_CONFIG_ONLY = 1, /**< Session will process only configuration data (e.g. sysrepo won't
- return any state data by ::sr_get_items / ::sr_get_items_iter calls). */
- SR_SESS_ENABLE_NACM = 2, /**< Enable NETCONF access control for this session (disabled by default). */
-
- SR_SESS_MUTABLE_OPTS = 3 /**< Bit-mask of options that can be set by the user
- (immutable flags are defined in sysrepo.proto file). */
-} sr_session_flag_t;
-
-/**
- * @brief Options overriding default connection session handling,
- * it is supposed to be bitwise OR-ed value of any ::sr_session_flag_t flags.
- */
-typedef uint32_t sr_sess_options_t;
-
-/**
- * @brief Data stores that sysrepo supports. Both are editable via implicit candidate.
- * To make changes permanent in edited datastore ::sr_commit must be issued.
- * @see @ref ds_page "Datastores & Sessions" information page.
- */
-typedef enum sr_datastore_e {
- SR_DS_STARTUP = 0, /**< Contains configuration data that should be loaded by the controlled application when it starts. */
- SR_DS_RUNNING = 1, /**< Contains currently applied configuration and state data of a running application.
- @note This datastore is supported only by applications that subscribe for notifications
- about the changes made in the datastore (e.g. ::sr_module_change_subscribe). */
- SR_DS_CANDIDATE = 2, /**< Contains configuration that can be manipulated without impacting the current configuration.
- Its content is set to the content of running datastore by default. Changes made within
- the candidate can be later committed to the running datastore or copied to any datastore.
-
- @note The main difference between working with running and candidate datastore is in commit
- operation - commit of candidate session causes the content of running configuration to be set
- the value of the candidate configuration (running datastore is overwritten), whereas commit of
- runnnig session merges the changes made within the session with the actual state of running. */
-} sr_datastore_t;
-
-/**
- * @brief Connects to the sysrepo datastore (Sysrepo Engine).
- *
- * @note If the client library loses connection to the Sysrepo Engine during
- * the lifetime of the application, all Sysrepo API calls will start returning
- * ::SR_ERR_DISCONNECT error on active sessions. In this case, the application is supposed to reconnect
- * with another ::sr_connect call and restart all lost sessions.
- *
- * @param[in] app_name Name of the application connecting to the datastore
- * (can be a static string). Used only for accounting purposes.
- * @param[in] opts Options overriding default connection handling by this call.
- * @param[out] conn_ctx Connection context that can be used for subsequent API calls
- * (automatically allocated, it is supposed to be released by the caller using ::sr_disconnect).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_connect(const char *app_name, const sr_conn_options_t opts, sr_conn_ctx_t **conn_ctx);
-
-/**
- * @brief Disconnects from the sysrepo datastore (Sysrepo Engine).
- *
- * Cleans up and frees connection context allocated by ::sr_connect. All sessions
- * started within the connection will be automatically stopped and cleaned up too.
- *
- * @param[in] conn_ctx Connection context acquired with ::sr_connect call.
- */
-void sr_disconnect(sr_conn_ctx_t *conn_ctx);
-
-/**
- * @brief Starts a new configuration session.
- *
- * @see @ref ds_page "Datastores & Sessions" for more information about datastores and sessions.
- *
- * @param[in] conn_ctx Connection context acquired with ::sr_connect call.
- * @param[in] datastore Datastore on which all sysrepo functions within this
- * session will operate. Later on, datastore can be later changed using
- * ::sr_session_switch_ds call. Functionality of some sysrepo calls does not depend on
- * datastore. If your session will contain just calls like these, you can pass
- * any valid value (e.g. SR_RUNNING).
- * @param[in] opts Options overriding default session handling.
- * @param[out] session Session context that can be used for subsequent API
- * calls (automatically allocated, can be released by calling ::sr_session_stop).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_session_start(sr_conn_ctx_t *conn_ctx, const sr_datastore_t datastore,
- const sr_sess_options_t opts, sr_session_ctx_t **session);
-
-/**
- * @brief Starts a new configuration session on behalf of a different user.
- *
- * This call is intended for northbound access to sysrepo from management
- * applications, that need sysrepo to authorize the operations not only
- * against the user under which the management application is running, but
- * also against another user (e.g. user that connected to the management application).
- *
- * @note Be aware that authorization of specified user may fail with unexpected
- * errors in case that the client library uses its own Sysrepo Engine at the
- * moment and your process in not running under root privileges. To prevent
- * this situation, consider specifying SR_CONN_DAEMON_REQUIRED flag by
- * ::sr_connect call or using ::sr_session_start instead of this function.
- *
- * @see @ref ds_page "Datastores & Sessions" for more information about datastores and sessions.
- *
- * @param[in] conn_ctx Connection context acquired with ::sr_connect call.
- * @param[in] user_name Effective user name used to authorize the access to
- * datastore (in addition to automatically-detected real user name).
- * @param[in] datastore Datastore on which all sysrepo functions within this
- * session will operate. Functionality of some sysrepo calls does not depend on
- * datastore. If your session will contain just calls like these, you can pass
- * any valid value (e.g. SR_RUNNING).
- * @param[in] opts Options overriding default session handling.
- * @param[out] session Session context that can be used for subsequent API calls
- * (automatically allocated, it is supposed to be released by caller using ::sr_session_stop).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_session_start_user(sr_conn_ctx_t *conn_ctx, const char *user_name, const sr_datastore_t datastore,
- const sr_sess_options_t opts, sr_session_ctx_t **session);
-
-/**
- * @brief Stops current session and releases resources tied to the session.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_session_stop(sr_session_ctx_t *session);
-
-/**
- * @brief Refreshes configuration data cached within the session and starts
- * operating on fresh data loaded from the datastore.
- *
- * Call this function in case that you leave session open for longer time period
- * and you expect that the data in the datastore may have been changed since
- * last data (re)load (which occurs by ::sr_session_start, ::sr_commit and
- * ::sr_discard_changes).
- *
- * @see @ref ds_page "Datastores & Sessions" for information about session data caching.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_session_refresh(sr_session_ctx_t *session);
-
-/**
- * @brief Checks aliveness and validity of the session & connection tied to it.
- *
- * If the connection to the Sysrepo Engine has been lost in the meantime, returns SR_ERR_DICONNECT.
- * In this case, the application is supposed to stop the session (::sr_session_stop), disconnect (::sr_disconnect)
- * and then reconnect (::sr_connect) and start a new session (::sr_session_start).
- *
- * @note If the client library loses connection to the Sysrepo Engine during the lifetime of the application,
- * all Sysrepo API calls will start returning SR_ERR_DISCONNECT error on active sessions. This is the primary
- * mechanism that can be used to detect connection issues, ::sr_session_check is just an addition to it. Since
- * ::sr_session_check sends a message to the Sysrepo Engine and waits for the response, it costs some extra overhead
- * in contrast to catching SR_ERR_DISCONNECT error.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK in case that the session is healthy,
- * SR_ERR_DICONNECT in case that connection to the Sysrepo Engine has been lost).
- */
-int sr_session_check(sr_session_ctx_t *session);
-
-/**
- * @brief Changes datastore to which the session is tied to. All subsequent
- * calls will be issued on the chosen datastore.
- *
- * @param [in] session
- * @param [in] ds
- * @return Error code (SR_ERR_OK on success)
- */
-int sr_session_switch_ds(sr_session_ctx_t *session, sr_datastore_t ds);
-
-/**
- * @brief Alter the session options. E.g.: set/unset SR_SESS_CONFIG_ONLY flag.
- *
- * @param [in] session
- * @param [in] opts - new value for session options
- * @return Error code (SR_ERR_OK on success)
- */
-int sr_session_set_options(sr_session_ctx_t *session, const sr_sess_options_t opts);
-
-/**
- * @brief Retrieves detailed information about the error that has occurred
- * during the last operation executed within provided session.
- *
- * If multiple errors has occurred within the last operation, only the first
- * one is returned. This call is sufficient for all data retrieval and data
- * manipulation functions that operate on single-item basis. For operations
- * such as ::sr_validate or ::sr_commit where multiple errors can occur,
- * use ::sr_get_last_errors instead.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[out] error_info Detailed error information. Be aware that
- * returned pointer may change by the next API call executed within the provided
- * session, so it's not safe to use this function by concurrent access to the
- * same session within multiple threads. Do not free or modify returned values.
- *
- * @return Error code of the last operation executed within provided session.
- */
-int sr_get_last_error(sr_session_ctx_t *session, const sr_error_info_t **error_info);
-
-/**
- * @brief Retrieves detailed information about all errors that have occurred
- * during the last operation executed within provided session.
- *
- * Use this call instead of ::sr_get_last_error by operations where multiple
- * errors can occur, such as ::sr_validate or ::sr_commit.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[out] error_info Array of detailed error information. Be aware that
- * returned pointer may change by the next API call executed within the provided
- * session, so it's not safe to use this function by concurrent access to the
- * same session within multiple threads. Do not free or modify returned values.
- * @param[out] error_cnt Number of errors returned in the error_info array.
- *
- * @return Error code of the last operation executed within provided session.
- */
-int sr_get_last_errors(sr_session_ctx_t *session, const sr_error_info_t **error_info, size_t *error_cnt);
-
-/**
- * @brief Sets detailed error information into provided session. Used to notify
- * the client library about errors that occurred in application code.
- *
- * @note Intended for commit verifiers (notification session) - the call has no
- * impact on any other sessions.
- *
- * @param[in] session Session context passed into notification callback.
- * @param[in] message Human-readable error message.
- * @param[in] xpath XPath to the node where the error has occurred. NULL value
- * is also accepted.
- *
- * @return Error code (SR_ERR_OK on success)
- */
-int sr_set_error(sr_session_ctx_t *session, const char *message, const char *xpath);
-
-/**
- * @brief Returns the assigned id of the session. Can be used to pair the session with
- * netconf-config-change notification initiator.
- * @param [in] session
- * @return session id or 0 in case of error
- */
-uint32_t sr_session_get_id(sr_session_ctx_t *session);
-
-////////////////////////////////////////////////////////////////////////////////
-// Data Retrieval API (get / get-config functionality)
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Structure that contains information about one particular schema file installed in sysrepo.
- */
-typedef struct sr_sch_revision_s {
- const char *revision; /**< Revision of the module/submodule. */
- const char *file_path_yang; /**< Absolute path to file where the module/submodule is stored (YANG format). */
- const char *file_path_yin; /**< Absolute path to file where the module/submodule is stored (.yin format). */
-} sr_sch_revision_t;
-
-/**
- * @brief Structure that contains information about submodules of a module installed in sysrepo.
- */
-typedef struct sr_sch_submodule_s {
- const char *submodule_name; /**< Submodule name. */
- sr_sch_revision_t revision; /**< Revision of the submodule. */
-} sr_sch_submodule_t;
-
-/**
- * @brief Structure that contains information about a module installed in sysrepo.
- */
-typedef struct sr_schema_s {
- /**
- * Memory context used internally by Sysrepo for efficient storage
- * and conversion of this structure.
- */
- sr_mem_ctx_t *_sr_mem;
-
- const char *module_name; /**< Name of the module. */
- const char *ns; /**< Namespace of the module used in @ref xp_page "XPath". */
- const char *prefix; /**< Prefix of the module. */
- bool installed; /**< TRUE if the module was explicitly installed. */
- bool implemented; /**< TRUE if the module is implemented (does not have to be installed),
- not just imported. */
-
- sr_sch_revision_t revision; /**< Revision the module. */
-
- sr_sch_submodule_t *submodules; /**< Array of all installed submodules of the module. */
- size_t submodule_count; /**< Number of module's submodules. */
-
- char **enabled_features; /**< Array of enabled features */
- size_t enabled_feature_cnt; /**< Number of enabled feature */
-} sr_schema_t;
-
-/**
- * @brief Format types of ::sr_get_schema result
- */
-typedef enum sr_schema_format_e {
- SR_SCHEMA_YANG, /**< YANG format */
- SR_SCHEMA_YIN /**< YIN format */
-} sr_schema_format_t;
-
-/**
- * @brief Iterator used for accessing data nodes via ::sr_get_items_iter call.
- */
-typedef struct sr_val_iter_s sr_val_iter_t;
-
-/**
- * @brief Retrieves list of schemas installed in the sysrepo datastore.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[out] schemas Array of installed schemas information (allocated by
- * the function, it is supposed to be freed by caller using ::sr_free_schemas call).
- * @param[out] schema_cnt Number of schemas returned in the array.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_list_schemas(sr_session_ctx_t *session, sr_schema_t **schemas, size_t *schema_cnt);
-
-/**
- * @brief Retrieves the content of specified schema file. If the module
- * can not be found SR_ERR_NOT_FOUND is returned.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] module_name Name of the requested module.
- * @param[in] revision Requested revision of the module. If NULL
- * is passed, the latest revision will be returned.
- * @param[in] submodule_name Name of the requested submodule. Pass NULL if you are
- * requesting the content of the main module.
- * @param[in] format of the returned schema
- * @param[out] schema_content Content of the specified schema file. Automatically
- * allocated by the function, should be freed by the caller.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_schema(sr_session_ctx_t *session, const char *module_name, const char *revision,
- const char *submodule_name, sr_schema_format_t format, char **schema_content);
-
-/**
- * @brief Retrieves the content of the specified submodule schema file. If the submodule
- * cannot be found, SR_ERR_NOT_FOUND is returned.
- *
- * @param[in] session Session context acquired from ::sr_session_start call.
- * @param[in] submodule_name Name of the requested submodule.
- * @param[in] submodule_revision Requested revision of the submodule. If NULL
- * is passed, the latest revision will be returned.
- * @param[in] format of the returned schema.
- * @param[out] schema_content Content of the specified schema file. Automatically
- * allocated by the function, should be freed by the caller.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_submodule_schema(sr_session_ctx_t *session, const char *submodule_name, const char *submodule_revision,
- sr_schema_format_t format, char **schema_content);
-
-/**
- * @brief Retrieves a single data element stored under provided XPath. If multiple
- * nodes matches the xpath SR_ERR_INVAL_ARG is returned.
- *
- * If the xpath identifies an empty leaf, a list or a container, the value
- * has no data filled in and its type is set properly (SR_LEAF_EMPTY_T / SR_LIST_T / SR_CONTAINER_T / SR_CONTAINER_PRESENCE_T).
- *
- * @see @ref xp_page "Path Addressing" documentation, or
- * https://tools.ietf.org/html/draft-ietf-netmod-yang-json#section-6.11
- * for XPath syntax used for identification of yang nodes in sysrepo calls.
- *
- * @see Use ::sr_get_items or ::sr_get_items_iter for retrieving larger chunks
- * of data from the datastore. Since they retrieve the data from datastore in
- * larger chunks, they can work much more efficiently than multiple ::sr_get_item calls.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element to be retrieved.
- * @param[out] value Structure containing information about requested element
- * (allocated by the function, it is supposed to be freed by the caller using ::sr_free_val).
- *
- * @return Error code (SR_ERR_OK on success)
- */
-int sr_get_item(sr_session_ctx_t *session, const char *xpath, sr_val_t **value);
-
-/**
- * @brief Retrieves an array of data elements matching provided XPath
- *
- * All data elements are transferred within one message from the datastore,
- * which is much more efficient that calling multiple ::sr_get_item calls.
- *
- * If the user does not have read permission to access certain nodes, these
- * won't be part of the result. SR_ERR_NOT_FOUND will be returned if there are
- * no nodes matching xpath in the data tree, or the user does not have read permission to access them.
- *
- * If the response contains too many elements time out may be exceeded, SR_ERR_TIME_OUT
- * will be returned, use ::sr_get_items_iter.
- *
- * @see @ref xp_page "Path Addressing" documentation
- * for Path syntax used for identification of yang nodes in sysrepo calls.
- *
- * @see ::sr_get_items_iter can be used for the same purpose as ::sr_get_items
- * call if you expect that ::sr_get_items could return too large data sets.
- * Since ::sr_get_items_iter also retrieves the data from datastore in larger chunks,
- * in can still work very efficiently for large datasets.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element to be retrieved.
- * @param[out] values Array of structures containing information about requested data elements
- * (allocated by the function, it is supposed to be freed by the caller using ::sr_free_values).
- * @param[out] value_cnt Number of returned elements in the values array.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_items(sr_session_ctx_t *session, const char *xpath, sr_val_t **values, size_t *value_cnt);
-
-/**
- * @brief Creates an iterator for retrieving of the data elements stored under provided xpath.
- *
- * Requested data elements are transferred from the datastore in larger chunks
- * of pre-defined size, which is much more efficient that calling multiple
- * ::sr_get_item calls, and may be less memory demanding than calling ::sr_get_items
- * on very large datasets.
- *
- * @see @ref xp_page "Path Addressing" documentation, or
- * https://tools.ietf.org/html/draft-ietf-netmod-yang-json#section-6.11
- * for XPath syntax used for identification of yang nodes in sysrepo calls.
- *
- * @see ::sr_get_item_next for iterating over returned data elements.
- * @note Iterator allows to iterate through the values once. To start iteration
- * from the beginning new iterator must be created.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element / subtree to be retrieved.
- * @param[out] iter Iterator context that can be used to retrieve individual data
- * elements via ::sr_get_item_next calls. Allocated by the function, should be
- * freed with ::sr_free_val_iter.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_items_iter(sr_session_ctx_t *session, const char *xpath, sr_val_iter_t **iter);
-
-/**
- * @brief Returns the next item from the dataset of provided iterator created
- * by ::sr_get_items_iter call. If there is no item left SR_ERR_NOT_FOUND is returned.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in,out] iter Iterator acquired with ::sr_get_items_iter call.
- * @param[out] value Structure containing information about requested element
- * (allocated by the function, it is supposed to be freed by the caller using ::sr_free_val).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_item_next(sr_session_ctx_t *session, sr_val_iter_t *iter, sr_val_t **value);
-
-/**
- * @brief Flags used to customize the behaviour of ::sr_get_subtree and ::sr_get_subtrees calls.
- */
-typedef enum sr_get_subtree_flag_e {
- /**
- * Default get-subtree(s) behaviour.
- * All matched subtrees are sent with all their content in one message.
- */
- SR_GET_SUBTREE_DEFAULT = 0,
-
- /**
- * The iterative get-subtree(s) behaviour.
- * The matched subtrees are sent in chunks and only as needed while they are iterated
- * through using functions ::sr_node_get_child, ::sr_node_get_next_sibling and
- * ::sr_node_get_parent from "sysrepo/trees.h". This behaviour gives much better
- * performance than the default one if only a small portion of matched subtree(s) is
- * actually iterated through.
- * @note It is considered a programming error to access \p next, \p prev, \p parent,
- * \p first_child and \p last_child data members of ::sr_node_t on a partially loaded tree.
- */
- SR_GET_SUBTREE_ITERATIVE = 1
-} sr_get_subtree_flag_t;
-
-/**
- * @brief Options for get-subtree and get-subtrees operations.
- * It is supposed to be bitwise OR-ed value of any ::sr_get_subtree_flag_t flags.
- */
-typedef uint32_t sr_get_subtree_options_t;
-
-/**
- * @brief Retrieves a single subtree whose root node is stored under the provided XPath.
- * If multiple nodes matches the xpath SR_ERR_INVAL_ARG is returned.
- *
- * The functions returns values and all associated information stored under the root node and
- * all its descendants. While the same data can be obtained using ::sr_get_items in combination
- * with the expressive power of XPath addressing, the recursive nature of the output data type
- * also preserves the hierarchical relationships between data elements.
- *
- * Values of internal nodes of the subtree have no data filled in and their type is set properly
- * (SR_LIST_T / SR_CONTAINER_T / SR_CONTAINER_PRESENCE_T), whereas leaf nodes are carrying actual
- * data (apart from SR_LEAF_EMPTY_T).
- *
- * @see @ref xp_page "Path Addressing" documentation
- * for XPath syntax used for identification of yang nodes in sysrepo calls.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier referencing the root node of the subtree to be retrieved.
- * @param[in] opts Options overriding default behavior of this operation.
- * @param[out] subtree Nested structure storing all data of the requested subtree
- * (allocated by the function, it is supposed to be freed by the caller using ::sr_free_tree).
- *
- * @return Error code (SR_ERR_OK on success)
- */
-int sr_get_subtree(sr_session_ctx_t *session, const char *xpath, sr_get_subtree_options_t opts,
- sr_node_t **subtree);
-
-/**
- * @brief Retrieves an array of subtrees whose root nodes match the provided XPath.
- *
- * If the user does not have read permission to access certain nodes, these together with
- * their descendants won't be part of the result. SR_ERR_NOT_FOUND will be returned if there are
- * no nodes matching xpath in the data tree, or the user does not have read permission to access them.
- *
- * Subtrees that match the provided XPath are not merged even if they overlap. This significantly
- * simplifies the implementation and decreases the cost of this operation. The downside is that
- * the user must choose the XPath carefully. If the subtree selection process results in too many
- * node overlaps, the cost of the operation may easily outshine the benefits. As an example,
- * a common XPath expression "//." is normally used to select all nodes in a data tree, but for this
- * operation it would result in an excessive duplication of transfered data elements.
- * Since you get all the descendants of each matched node implicitly, you probably should not need
- * to use XPath wildcards deeper than on the top-level.
- * (i.e. "/." is preferred alternative to "//." for get-subtrees operation).
- *
- * If the response contains too many elements time out may be exceeded, SR_ERR_TIME_OUT
- * will be returned.
- *
- * @see @ref xp_page "Path Addressing" documentation, or
- * https://tools.ietf.org/html/draft-ietf-netmod-yang-json#section-6.11
- * for XPath syntax used for identification of yang nodes in sysrepo calls.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier referencing root nodes of subtrees to be retrieved.
- * @param[in] opts Options overriding default behavior of this operation.
- * @param[out] subtrees Array of nested structures storing all data of the requested subtrees
- * (allocated by the function, it is supposed to be freed by the caller using ::sr_free_trees).
- * @param[out] subtree_cnt Number of returned trees in the subtrees array.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_subtrees(sr_session_ctx_t *session, const char *xpath, sr_get_subtree_options_t opts,
- sr_node_t **subtrees, size_t *subtree_cnt);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Data Manipulation API (edit-config functionality)
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Flags used to override default behavior of data manipulation calls.
- */
-typedef enum sr_edit_flag_e {
- SR_EDIT_DEFAULT = 0, /**< Default behavior - recursive and non-strict. */
- SR_EDIT_NON_RECURSIVE = 1, /**< Non-recursive behavior:
- by ::sr_set_item, all preceding nodes (parents) of the identified element must exist,
- by ::sr_delete_item xpath must not identify an non-empty list or non-empty container. */
- SR_EDIT_STRICT = 2 /**< Strict behavior:
- by ::sr_set_item the identified element must not exist (similar to netconf create operation),
- by ::sr_delete_item the identified element must exist (similar to netconf delete operation). */
-} sr_edit_flag_t;
-
-/**
- * @brief Options overriding default behavior of data manipulation calls,
- * it is supposed to be bitwise OR-ed value of any ::sr_edit_flag_t flags.
- */
-typedef uint32_t sr_edit_options_t;
-
-/**
- * @brief Options for specifying move direction of ::sr_move_item call.
- */
-typedef enum sr_move_position_e {
- SR_MOVE_BEFORE = 0, /**< Move the specified item before the selected sibling. */
- SR_MOVE_AFTER = 1, /**< Move the specified item after the selected. */
- SR_MOVE_FIRST = 2, /**< Move the specified item to the position of the first child. */
- SR_MOVE_LAST = 3, /**< Move the specified item to the position of the last child. */
-} sr_move_position_t;
-
-/**
- * @brief Sets the value of the leaf, leaf-list, list or presence container.
- *
- * With default options it recursively creates all missing nodes (containers and
- * lists including their key leaves) in the xpath to the specified node (can be
- * turned off with SR_EDIT_NON_RECURSIVE option). If SR_EDIT_STRICT flag is set,
- * the node must not exist (otherwise an error is returned).
- *
- * To create a list use xpath with key values included and pass NULL as value argument.
- *
- * Setting of a leaf-list value appends the value at the end of the leaf-list.
- * A value of leaf-list can be specified either by predicate in xpath or by value argument.
- * If both are present, value argument is ignored and xpath predicate is used.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element to be set.
- * @param[in] value Value to be set on specified xpath. xpath member of the
- * ::sr_val_t structure can be NULL. Value will be copied - can be allocated on stack.
- * @param[in] opts Options overriding default behavior of this call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_set_item(sr_session_ctx_t *session, const char *xpath, const sr_val_t *value, const sr_edit_options_t opts);
-
-
-/**
- * @brief Functions is similar to ::sr_set_item with the difference that the value to be set
- * is provided as string.
- * @param [in] session Session context acquired with ::sr_session_start call.
- * @param [in] xpath @ref xp_page "Data Path" identifier of the data element to be set.
- * @param [in] value string representation of the value to be set
- * @param [in] opts same as for ::sr_set_item
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_set_item_str(sr_session_ctx_t *session, const char *xpath, const char *value, const sr_edit_options_t opts);
-/**
- * @brief Deletes the nodes under the specified xpath.
- *
- * To delete non-empty lists or containers SR_EDIT_NON_RECURSIVE flag must not be set.
- * If SR_EDIT_STRICT flag is set the specified node must must exist in the datastore.
- * If the xpath includes the list keys, the specified list instance is deleted.
- * If the xpath to list does not include keys, all instances of the list are deleted.
- * SR_ERR_UNAUTHORIZED will be returned if the user does not have write permission to any affected node.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element to be deleted.
- * @param[in] opts Options overriding default behavior of this call.
- *
- * @return Error code (SR_ERR_OK on success).
- **/
-int sr_delete_item(sr_session_ctx_t *session, const char *xpath, const sr_edit_options_t opts);
-
-/**
- * @brief Move the instance of an user-ordered list or leaf-list to the specified position.
- *
- * Item can be move to the first or last position or positioned relatively to its sibling.
- * @note To determine current order, you can issue a ::sr_get_items call
- * (without specifying keys of the list in question).
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the data element to be moved.
- * @param[in] position Requested move direction.
- * @param[in] relative_item xpath Identifier of the data element that is used
- * to determine relative position, used only if position argument is SR_MOVE_BEFORE or SR_MOVE_AFTER.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_move_item(sr_session_ctx_t *session, const char *xpath, const sr_move_position_t position, const char *relative_item);
-
-/**
- * @brief Perform the validation of changes made in current session, but do not
- * commit nor discard them.
- *
- * Provides only YANG validation, commit verify subscribers won't be notified in this case.
- *
- * @see Use ::sr_get_last_errors to retrieve error information if the validation
- * returned with an error.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_validate(sr_session_ctx_t *session);
-
-/**
- * @brief Apply changes made in current session.
- *
- * @note Note that in case that you are committing to the running datstore, you also
- * need to copy the config to startup to make changes permanent after restart.
- *
- * @see Use ::sr_get_last_errors to retrieve error information if the commit
- * operation returned with an error.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_commit(sr_session_ctx_t *session);
-
-/**
- * @brief Discard non-committed changes made in current session.
- *
- * @note Since the function effectively clears all the cached data within the session,
- * the next operation will operate on fresh data loaded from the datastore
- * (i.e. no need to call ::sr_session_refresh afterwards).
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_discard_changes(sr_session_ctx_t *session);
-
-/**
- * @brief Replaces an entire configuration datastore with the contents of
- * another complete configuration datastore. If the module is specified, limits
- * the copy operation only to one specified module. If it's not specified,
- * the operation is performed on all modules that are currently active in the
- * source datastore.
- *
- * If the target datastore exists, it is overwritten. Otherwise, a new one is created.
- *
- * @note ::sr_session_refresh is needed to see the result of a copy-config operation
- * in a session apart from the case when SR_DS_CANDIDATE is the destination datastore.
- * Since the candidate is not shared among sessions, data trees are copied only to the
- * canidate in the session issuing the copy-config operation.
- *
- * @note Operation may fail, if it tries to copy a not enabled configuration to the
- * running datastore.
- *
- * @note \p session \p dst_datastore uncommitted changes will get discarded.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] module_name If specified, only limits the copy operation only to
- * one specified module.
- * @param[in] src_datastore Source datastore.
- * @param[in] dst_datastore Destination datastore.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_copy_config(sr_session_ctx_t *session, const char *module_name,
- sr_datastore_t src_datastore, sr_datastore_t dst_datastore);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Locking API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Locks the datastore which the session is tied to. If there is
- * a module locked by the other session SR_ERR_LOCKED is returned.
- * Operation fails if there is a modified data tree in session.
- *
- * All data models within the datastore will be locked for writing until
- * ::sr_unlock_datastore is called or until the session is stopped or terminated
- * for any reason.
- *
- * The lock operation will not be allowed if the user does not have sufficient
- * permissions for writing into each of the data models in the datastore.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_lock_datastore(sr_session_ctx_t *session);
-
-/**
- * @brief Unlocks the datastore which the session is tied to.
- *
- * All data models within the datastore will be unlocked if they were locked
- * by this session.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_unlock_datastore(sr_session_ctx_t *session);
-
-/**
- * @brief Locks specified data module within the datastore which the session
- * is tied to. Operation fails if the data tree has been modified.
- *
- * Specified data module will be locked for writing in the datastore until
- * ::sr_unlock_module is called or until the session is stopped or terminated
- * for any reason.
- *
- * The lock operation will not be allowed if the user does not have sufficient
- * permissions for writing into the specified data module.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] module_name Name of the module to be locked.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_lock_module(sr_session_ctx_t *session, const char *module_name);
-
-/**
- * @brief Unlocks specified data module within the datastore which the session
- * is tied to.
- *
- * Specified data module will be unlocked if was locked in the datastore
- * by this session.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] module_name Name of the module to be unlocked.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_unlock_module(sr_session_ctx_t *session, const char *module_name);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Change Notifications API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Flags used to override default handling of subscriptions.
- */
-typedef enum sr_subscr_flag_e {
- /**
- * @brief Default behavior of the subscription. In case of ::sr_module_change_subscribe and
- * ::sr_subtree_change_subscribe calls it means that:
- *
- * - the subscriber is the "owner" of the subscribed data tree and and the data tree will be enabled in the running
- * datastore while this subscription is alive (if not already, can be changed using ::SR_SUBSCR_PASSIVE flag),
- * - configuration data of the subscribed module or subtree is copied from startup to running datastore
- * (only if the module was not enabled before),
- * - the callback will be called twice, once with ::SR_EV_VERIFY event and once with ::SR_EV_APPLY / ::SR_EV_ABORT
- * event passed in (can be changed with ::SR_SUBSCR_APPLY_ONLY flag).
- */
- SR_SUBSCR_DEFAULT = 0,
-
- /**
- * @brief This option enables the application to re-use an already existing subscription context previously returned
- * from any sr_*_subscribe call instead of requesting the creation of a new one. In that case a single
- * ::sr_unsubscribe call unsubscribes from all subscriptions filed within the context.
- */
- SR_SUBSCR_CTX_REUSE = 1,
-
- /**
- * @brief The subscriber is not the "owner" of the subscribed data tree, just a passive watcher for changes.
- * When this option is passed in to ::sr_module_change_subscribe or ::sr_subtree_change_subscribe,
- * the subscription will have no effect on the presence of the subtree in the running datastore.
- */
- SR_SUBSCR_PASSIVE = 2,
-
- /**
- * @brief The subscriber does not support verification of the changes and wants to be notified only after
- * the changes has been applied in the datastore, without the possibility to deny them
- * (it will receive only ::SR_EV_APPLY events).
- */
- SR_SUBSCR_APPLY_ONLY = 4,
-
- /**
- * @brief The subscriber wants ::SR_EV_ENABLED notifications to be sent to them.
- */
- SR_SUBSCR_EV_ENABLED = 8,
-
- /**
- * @brief The subscriber will not receive ::SR_EV_ABORT if he returns an error in verify phase
- * (if the commit is refused by other verifier ::SR_EV_ABORT will be delivered).
- */
- SR_SUBSCR_NO_ABORT_FOR_REFUSED_CFG = 16,
-
- /**
- * @brief No real-time notifications will be delivered until ::sr_event_notif_replay is called
- * and replay has finished (::SR_EV_NOTIF_T_REPLAY_COMPLETE is delivered).
- */
- SR_SUBSCR_NOTIF_REPLAY_FIRST = 32,
-} sr_subscr_flag_t;
-
-/**
- * @brief Type of the notification event that has occurred (passed to notification callbacks).
- *
- * @note Each change is normally notified twice: first as ::SR_EV_VERIFY event and then as ::SR_EV_APPLY or ::SR_EV_ABORT
- * event. If the subscriber does not support verification, it can subscribe only to ::SR_EV_APPLY event by providing
- * ::SR_SUBSCR_APPLY_ONLY subscription flag.
- */
-typedef enum sr_notif_event_e {
- SR_EV_VERIFY, /**< Occurs just before the changes are committed to the datastore,
- the subscriber is supposed to verify that the changes are valid and can be applied
- and prepare all resources required for the changes. The subscriber can still deny the changes
- in this phase by returning an error from the callback. */
- SR_EV_APPLY, /**< Occurs just after the changes have been successfully committed to the datastore,
- the subscriber is supposed to apply the changes now, but it cannot deny the changes in this
- phase anymore (any returned errors are just logged and ignored). */
- SR_EV_ABORT, /**< Occurs in case that the commit transaction has failed (possibly because one of the verifiers
- has denied the change / returned an error). The subscriber is supposed to return the managed
- application to the state before the commit. Any returned errors are just logged and ignored. */
- SR_EV_ENABLED, /**< Occurs just after the subscription. Subscriber gets notified about configuration that was copied
- from startup to running. This allows to reuse the callback for applying changes made in running to
- reflect the changes when the configuration is copied from startup to running during subscription process */
-} sr_notif_event_t;
-
-/**
- * @brief Type of the operation made on an item, used by changeset retrieval in ::sr_get_change_next.
- */
-typedef enum sr_change_oper_e {
- SR_OP_CREATED, /**< The item has been created by the change. */
- SR_OP_MODIFIED, /**< The value of the item has been modified by the change. */
- SR_OP_DELETED, /**< The item has been deleted by the change. */
- SR_OP_MOVED, /**< The item has been moved in the subtree by the change (applicable for leaf-lists and user-ordered lists). */
-} sr_change_oper_t;
-
-/**
- * @brief State of a module as returned by the ::sr_module_install_cb callback.
- */
-typedef enum sr_module_state_e {
- SR_MS_UNINSTALLED, /**< The module is not installed in the sysrepo repository. */
- SR_MS_IMPORTED, /**< The module has been implicitly installed into the sysrepo repository
- as it is imported by another implemented/imported module. */
- SR_MS_IMPLEMENTED /**< The module has been explicitly installed into the sysrepo repository by the user. */
-} sr_module_state_t;
-
-/**
- * @brief Sysrepo subscription context returned from sr_*_subscribe calls,
- * it is supposed to be released by the caller using ::sr_unsubscribe call.
- */
-typedef struct sr_subscription_ctx_s sr_subscription_ctx_t;
-
-/**
- * @brief Iterator used for retrieval of a changeset using ::sr_get_changes_iter call.
- */
-typedef struct sr_change_iter_s sr_change_iter_t;
-
-/**
- * @brief Options overriding default behavior of subscriptions,
- * it is supposed to be a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- */
-typedef uint32_t sr_subscr_options_t;
-
-/**
- * @brief Callback to be called by the event of changing any running datastore
- * content within the specified module. Subscribe to it by ::sr_module_change_subscribe call.
- *
- * @param[in] session Automatically-created session that can be used for obtaining changed data
- * (e.g. by ::sr_get_changes_iter call ot ::sr_get_item -like calls). Do not stop this session.
- * @param[in] module_name Name of the module where the change has occurred.
- * @param[in] event Type of the notification event that has occurred.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to
- * ::sr_module_change_subscribe call.
- */
-typedef int (*sr_module_change_cb)(sr_session_ctx_t *session, const char *module_name,
- sr_notif_event_t event, void *private_ctx);
-
-/**
- * @brief Callback to be called by the event of changing any running datastore
- * content within the specified subtree. Subscribe to it by ::sr_subtree_change_subscribe call.
- *
- * @param[in] session Automatically-created session that can be used for obtaining changed data
- * (e.g. by ::sr_get_changes_iter call or ::sr_get_item -like calls). Do not stop this session.
- * @param[in] xpath @ref xp_page "Data Path" of the subtree where the change has occurred.
- * @param[in] event Type of the notification event that has occurred.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to
- * ::sr_subtree_change_subscribe call.
- */
-typedef int (*sr_subtree_change_cb)(sr_session_ctx_t *session, const char *xpath,
- sr_notif_event_t event, void *private_ctx);
-
-/**
- * @brief Callback to be called by the event of installation / uninstallation
- * of a new module into sysrepo. Subscribe to it by ::sr_module_install_subscribe call.
- *
- * @param[in] module_name Name of the newly installed / uinstalled module.
- * @param[in] revision Revision of the newly installed module (if specified
- * within the YANG model).
- * @param[in] state The new state of the module (uninstalled vs. imported vs. implemented).
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to
- * ::sr_module_install_subscribe call.
- */
-typedef void (*sr_module_install_cb)(const char *module_name, const char *revision, sr_module_state_t state,
- void *private_ctx);
-
-/**
- * @brief Callback to be called by the event of enabling / disabling of
- * a YANG feature within a module. Subscribe to it by ::sr_feature_enable_subscribe call.
- *
- * @param[in] module_name Name of the module where the feature has been enabled / disabled.
- * @param[in] feature_name Name of the feature that has been enabled / disabled.
- * @param[in] enabled TRUE if the feature has been enabled, FALSE if disabled.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to
- * ::sr_feature_enable_subscribe call.
- */
-typedef void (*sr_feature_enable_cb)(const char *module_name, const char *feature_name, bool enabled, void *private_ctx);
-
-/**
- * @brief Subscribes for notifications about the changes made within specified
- * module in running datastore.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] module_name Name of the module of interest for change notifications.
- * @param[in] callback Callback to be called when the change in the datastore occurs.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] priority Specifies the order in which the callbacks will be called (callbacks with higher
- * priority will be called sooner, callbacks with the priority of 0 will be called at the end).
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_module_change_subscribe(sr_session_ctx_t *session, const char *module_name, sr_module_change_cb callback,
- void *private_ctx, uint32_t priority, sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for notifications about the changes made within specified
- * subtree in running datastore.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the subtree of the interest for change notifications.
- * @param[in] callback Callback to be called when the change in the datastore occurs.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] priority Specifies the order in which the callbacks will be called (callbacks with higher
- * priority will be called sooner, callbacks with the priority of 0 will be called at the end).
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_subtree_change_subscribe(sr_session_ctx_t *session, const char *xpath, sr_subtree_change_cb callback,
- void *private_ctx, uint32_t priority, sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for notifications about installation / uninstallation
- * of a new module into sysrepo.
- *
- * Mainly intended for northbound management applications that need to be
- * always aware of all active modules installed in sysrepo.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] callback Callback to be called when the event occurs.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_module_install_subscribe(sr_session_ctx_t *session, sr_module_install_cb callback, void *private_ctx,
- sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for notifications about enabling / disabling of
- * a YANG feature within a module.
- *
- * Mainly intended for northbound management applications that need to be
- * always aware of all active features within the modules installed in sysrepo.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] callback Callback to be called when the event occurs.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_feature_enable_subscribe(sr_session_ctx_t *session, sr_feature_enable_cb callback, void *private_ctx,
- sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Unsubscribes from a subscription acquired by any of sr_*_subscribe
- * calls and releases all subscription-related data.
- *
- * @note In case that the same subscription context was used to subscribe for
- * multiple subscriptions, unsubscribes from all of them.
- *
- * @param[in] session Session context acquired with ::sr_session_start call. Does not
- * need to be the same as used for subscribing. NULL can be passed too, in that case
- * a temporary session used for unsubscribe will be automatically created by sysrepo.
- * @param[in] subscription Subscription context acquired by any of sr_*_subscribe calls.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_unsubscribe(sr_session_ctx_t *session, sr_subscription_ctx_t *subscription);
-
-/**
- * @brief Creates an iterator for retrieving of the changeset (list of newly
- * added / removed / modified nodes) in notification callbacks.
- *
- * @see ::sr_get_change_next for iterating over the changeset using this iterator.
- *
- * @param[in] session Session context as passed to notication the callbacks (e.g.
- * ::sr_module_change_cb or ::sr_subtree_change_cb). Will not work with any other sessions.
- * @param[in] xpath @ref xp_page "Data Path" identifier of the subtree from which the changeset
- * should be obtained. Only XPaths that would be accepted by ::sr_subtree_change_subscribe are allowed.
- * @param[out] iter Iterator context that can be used to retrieve individual changes using
- * ::sr_get_change_next calls. Allocated by the function, should be freed with ::sr_free_change_iter.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_changes_iter(sr_session_ctx_t *session, const char *xpath, sr_change_iter_t **iter);
-
-/**
- * @brief Returns the next change from the changeset of provided iterator created
- * by ::sr_get_changes_iter call. If there is no item left, SR_ERR_NOT_FOUND is returned.
- *
- * @note If the operation is ::SR_OP_MOVED the meaning of new_value and old value argument is
- * as follows - the value pointed by new_value was moved after the old_value. If the
- * old value is NULL it was moved to the first position.
- *
- * @param[in] session Session context as passed to notication the callbacks (e.g.
- * ::sr_module_change_cb or ::sr_subtree_change_cb). Will not work with any other sessions.
- * @param[in,out] iter Iterator acquired with ::sr_get_changes_iter call.
- * @param[out] operation Type of the operation made on the returned item.
- * @param[out] old_value Old value of the item (the value before the change).
- * NULL in case that the item has been just created (operation == SR_OP_CREATED).
- * @param[out] new_value New (modified) value of the the item. NULL in case that
- * the item has been just deleted (operation == SR_OP_DELETED).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_get_change_next(sr_session_ctx_t *session, sr_change_iter_t *iter, sr_change_oper_t *operation,
- sr_val_t **old_value, sr_val_t **new_value);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// RPC (Remote Procedure Calls) API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Check if the owner of this session is authorized by NACM to invoke the protocol
- * operation defined in a (installed) YANG module under the given xpath (as RPC or Action).
- *
- * This call is intended for northbound management applications that need to implement
- * the NETCONF Access Control Model (RFC 6536) to restrict the protocol operations that
- * each user is authorized to execute.
- *
- * NETCONF access control is already included in the processing of ::sr_rpc_send,
- * ::sr_rpc_send_tree, ::sr_action_send and ::sr_action_send_tree and thus it should be
- * sufficient to call this function only prior to executing any of the NETCONF standard
- * protocol operations as they cannot be always directly translated to a single sysrepo
- * API call.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the protocol operation.
- * @param[out] permitted TRUE if the user is permitted to execute the given operation, FALSE otherwise.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_check_exec_permission(sr_session_ctx_t *session, const char *xpath, bool *permitted);
-
-/**
- * @brief Callback to be called by the delivery of RPC specified by xpath.
- * Subscribe to it by ::sr_rpc_subscribe call.
- *
- * @param[in] xpath @ref xp_page "Data Path" identifying the RPC.
- * @param[in] input Array of input parameters.
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters. Should be allocated on heap,
- * will be freed by sysrepo after sending of the RPC response.
- * @param[out] output_cnt Number of output parameters.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to ::sr_rpc_subscribe call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-typedef int (*sr_rpc_cb)(const char *xpath, const sr_val_t *input, const size_t input_cnt,
- sr_val_t **output, size_t *output_cnt, void *private_ctx);
-
-/**
- * @brief Callback to be called by the delivery of RPC specified by xpath.
- * This RPC callback variant operates with sysrepo trees rather than with sysrepo values,
- * use it with ::sr_rpc_subscribe_tree and ::sr_rpc_send_tree.
- *
- * @param[in] xpath @ref xp_page "Data Path" identifying the RPC.
- * @param[in] input Array of input parameters (represented as trees).
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters (represented as trees). Should be allocated on heap,
- * will be freed by sysrepo after sending of the RPC response.
- * @param[out] output_cnt Number of output parameters.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to ::sr_rpc_subscribe_tree call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-typedef int (*sr_rpc_tree_cb)(const char *xpath, const sr_node_t *input, const size_t input_cnt,
- sr_node_t **output, size_t *output_cnt, void *private_ctx);
-
-/**
- * @brief Subscribes for delivery of RPC specified by xpath.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying the RPC.
- * @param[in] callback Callback to be called when the RPC is called.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_rpc_subscribe(sr_session_ctx_t *session, const char *xpath, sr_rpc_cb callback, void *private_ctx,
- sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for delivery of RPC specified by xpath. Unlike ::sr_rpc_subscribe, this
- * function expects callback of type ::sr_rpc_tree_cb, therefore use this version if you prefer
- * to manipulate with RPC input and output data organized in a list of trees rather than as a flat
- * enumeration of all values.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying the RPC.
- * @param[in] callback Callback to be called when the RPC is called.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_rpc_subscribe_tree(sr_session_ctx_t *session, const char *xpath, sr_rpc_tree_cb callback,
- void *private_ctx, sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Sends a RPC specified by xpath and waits for the result.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the RPC.
- * @param[in] input Array of input parameters (array of all nodes that hold some
- * data in RPC input subtree - same as ::sr_get_items would return).
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters (all nodes that hold some data
- * in RPC output subtree). Will be allocated by sysrepo and should be freed by
- * caller using ::sr_free_values.
- * @param[out] output_cnt Number of output parameters.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_rpc_send(sr_session_ctx_t *session, const char *xpath,
- const sr_val_t *input, const size_t input_cnt, sr_val_t **output, size_t *output_cnt);
-
-/**
- * @brief Sends a RPC specified by xpath and waits for the result. Input and output data
- * are represented as arrays of subtrees reflecting the scheme of RPC arguments.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the RPC.
- * @param[in] input Array of input parameters (organized in trees).
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters (organized in trees).
- * Will be allocated by sysrepo and should be freed by caller using ::sr_free_trees.
- * @param[out] output_cnt Number of output parameters.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_rpc_send_tree(sr_session_ctx_t *session, const char *xpath,
- const sr_node_t *input, const size_t input_cnt, sr_node_t **output, size_t *output_cnt);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Action API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Callback to be called by the delivery of Action (operation connected to a specific data node)
- * specified by xpath. Subscribe to it by ::sr_action_subscribe call.
- * @see This type is an alias for @ref sr_rpc_cb "the RPC callback type"
- */
-typedef sr_rpc_cb sr_action_cb;
-
-/**
- * @brief Callback to be called by the delivery of Action (operation connected to a specific data node)
- * specified by xpath.
- * This callback variant operates with sysrepo trees rather than with sysrepo values,
- * use it with ::sr_action_subscribe_tree and ::sr_action_send_tree.
- * @see This type is an alias for tree variant of @ref sr_rpc_tree_cb "the RPC callback "
- */
-typedef sr_rpc_tree_cb sr_action_tree_cb;
-
-/**
- * @brief Subscribes for delivery of Action specified by xpath.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying the Action.
- * @param[in] callback Callback to be called when the Action is called.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_action_subscribe(sr_session_ctx_t *session, const char *xpath, sr_action_cb callback, void *private_ctx,
- sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for delivery of Action specified by xpath. Unlike ::sr_action_subscribe, this
- * function expects callback of type ::sr_action_tree_cb, therefore use this version if you prefer
- * to manipulate with Action input and output data organized in a list of trees rather than as a flat
- * enumeration of all values.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying the Action.
- * @param[in] callback Callback to be called when the Action is called.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_action_subscribe_tree(sr_session_ctx_t *session, const char *xpath, sr_action_tree_cb callback,
- void *private_ctx, sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Executes an action specified by xpath and waits for the result.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the Action.
- * @param[in] input Array of input parameters (array of all nodes that hold some
- * data in Action input subtree - same as ::sr_get_items would return).
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters (all nodes that hold some data
- * in Action output subtree). Will be allocated by sysrepo and should be freed by
- * caller using ::sr_free_values.
- * @param[out] output_cnt Number of output parameters.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_action_send(sr_session_ctx_t *session, const char *xpath,
- const sr_val_t *input, const size_t input_cnt, sr_val_t **output, size_t *output_cnt);
-
-/**
- * @brief Executes an action specified by xpath and waits for the result. Input and output data
- * are represented as arrays of subtrees reflecting the scheme of Action arguments.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the Action.
- * @param[in] input Array of input parameters (organized in trees).
- * @param[in] input_cnt Number of input parameters.
- * @param[out] output Array of output parameters (organized in trees).
- * Will be allocated by sysrepo and should be freed by caller using ::sr_free_trees.
- * @param[out] output_cnt Number of output parameters.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_action_send_tree(sr_session_ctx_t *session, const char *xpath,
- const sr_node_t *input, const size_t input_cnt, sr_node_t **output, size_t *output_cnt);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Event Notifications API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Type of the notification passed to the ::sr_event_notif_cb and ::sr_event_notif_tree_cb callbacks.
- */
-typedef enum sr_ev_notif_type_e {
- SR_EV_NOTIF_T_REALTIME, /**< Real-time notification. The only possible type if you don't use ::sr_event_notif_replay. */
- SR_EV_NOTIF_T_REPLAY, /**< Replayed notification. */
- SR_EV_NOTIF_T_REPLAY_COMPLETE, /**< Not a real notification, just a signal that the notification replay has completed
- (all the stored notifications from the given time interval have been delivered). */
- SR_EV_NOTIF_T_REPLAY_STOP, /**< Not a real notification, just a signal that replay stop time has been reached
- (delivered only if stop_time was specified to ::sr_event_notif_replay). */
-} sr_ev_notif_type_t;
-
-/**
- * @brief Flags used to override default notification handling i the datastore.
- */
-typedef enum sr_ev_notif_flag_e {
- SR_EV_NOTIF_DEFAULT = 0, /**< Notification will be handled normally. */
- SR_EV_NOTIF_EPHEMERAL = 1, /**< Notification will not be stored in the notification store
- (and therefore will be also delivered faster). */
-} sr_ev_notif_flag_t;
-
-/**
- * @brief Callback to be called by the delivery of event notification specified by xpath.
- * Subscribe to it by ::sr_event_notif_subscribe call.
- *
- * @param[in] notif_type Type of the notification.
- * @param[in] xpath @ref xp_page "Data Path" identifying the event notification.
- * @param[in] values Array of all nodes that hold some data in event notification subtree.
- * @param[in] values_cnt Number of items inside the values array.
- * @param[in] timestamp Time when the notification was generated
- * @param[in] private_ctx Private context opaque to sysrepo,
- * as passed to ::sr_event_notif_subscribe call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-typedef void (*sr_event_notif_cb)(const sr_ev_notif_type_t notif_type, const char *xpath,
- const sr_val_t *values, const size_t values_cnt, time_t timestamp, void *private_ctx);
-
-/**
- * @brief Callback to be called by the delivery of event notification specified by xpath.
- * This callback variant operates with sysrepo trees rather than with sysrepo values,
- * use it with ::sr_event_notif_subscribe_tree and ::sr_event_notif_send_tree.
- *
- * @param[in] notif_type Type of the notification.
- * @param[in] xpath @ref xp_page "Data Path" identifying the event notification.
- * @param[in] trees Array of subtrees carrying event notification data.
- * @param[in] tree_cnt Number of subtrees with data.
- * @param[in] timestamp Time when the notification was generated
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to ::sr_event_notif_subscribe_tree call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-typedef void (*sr_event_notif_tree_cb)(const sr_ev_notif_type_t notif_type, const char *xpath,
- const sr_node_t *trees, const size_t tree_cnt, time_t timestamp, void *private_ctx);
-
-/**
- * @brief Subscribes for delivery of an event notification specified by xpath.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying one event notification or special
- * path in the form of a module name in which the whole module is subscribed to.
- * @param[in] callback Callback to be called when the event notification is send.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_event_notif_subscribe(sr_session_ctx_t *session, const char *xpath,
- sr_event_notif_cb callback, void *private_ctx, sr_subscr_options_t opts,
- sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Subscribes for delivery of event notification specified by xpath.
- * Unlike ::sr_event_notif_subscribe, this function expects callback of type ::sr_event_notif_tree_cb,
- * therefore use this version if you prefer to manipulate with event notification data organized
- * in a list of trees rather than as a flat enumeration of all values.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Schema Path" identifying one event notification or special
- * path in the form of a module name in which the whole module is subscribed to.
- * @param[in] callback Callback to be called when the event notification is called.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- * @note An existing context may be passed in case that SR_SUBSCR_CTX_REUSE option is specified.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_event_notif_subscribe_tree(sr_session_ctx_t *session, const char *xpath,
- sr_event_notif_tree_cb callback, void *private_ctx, sr_subscr_options_t opts,
- sr_subscription_ctx_t **subscription);
-
-/**
- * @brief Sends an event notification specified by xpath and waits for the result.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the event notification.
- * @param[in] values Array of all nodes that hold some data in event notification subtree
- * (same as ::sr_get_items would return).
- * @param[in] values_cnt Number of items inside the values array.
- * @param[in] opts Options overriding default handling of the notification, it is supposed to be
- * a bitwise OR-ed value of any ::sr_ev_notif_flag_t flags.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_event_notif_send(sr_session_ctx_t *session, const char *xpath, const sr_val_t *values,
- const size_t values_cnt, sr_ev_notif_flag_t opts);
-
-/**
- * @brief Sends an event notification specified by xpath and waits for the result.
- * The notification data are represented as arrays of subtrees reflecting the scheme
- * of the event notification.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the RPC.
- * @param[in] trees Array of subtrees carrying event notification data.
- * @param[in] tree_cnt Number of subtrees with data.
- * @param[in] opts Options overriding default handling of the notification, it is supposed to be
- * a bitwise OR-ed value of any ::sr_ev_notif_flag_t flags.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_event_notif_send_tree(sr_session_ctx_t *session, const char *xpath, const sr_node_t *trees,
- const size_t tree_cnt, sr_ev_notif_flag_t opts);
-
-/**
- * @brief Replays already generated notifications stored in the notification store related to
- * the provided notification subscription (or subscriptions, in case that ::SR_SUBSCR_CTX_REUSE
- * was used). Notification callbacks of the given susbscriptions will be called with the type set to
- * ::SR_EV_NOTIF_T_REPLAY, ::SR_EV_NOTIF_T_REPLAY_COMPLETE or ::SR_EV_NOTIF_T_REPLAY_STOP.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] subscription Session context acquired with ::sr_session_start call.
- * @param[in] start_time Starting time of the desired time window for notification replay.
- * @param[in] stop_time End time of the desired time window for notification replay. If set to 0,
- * no stop time will be applied (all notifications up to the current time will be delivered,
- * ::SR_EV_NOTIF_T_REPLAY_STOP notification won't be delivered).
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_event_notif_replay(sr_session_ctx_t *session, sr_subscription_ctx_t *subscription,
- time_t start_time, time_t stop_time);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Operational Data API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Callback to be called when operational data at the selected level is requested.
- * Subscribe to it by ::sr_dp_get_items_subscribe call.
- *
- * Callback handler is supposed to provide data of all nodes at the level selected by the xpath argument:
- *
- * - If the xpath identifies a container, the provider is supposed to return all leaves and leaf-lists values within it.
- * Nested lists and containers should not be provided - sysrepo will ask for them in subsequent calls.
- * - If the xpath identifies a list, the provider is supposed to return all leaves (except for keys!) and
- * leaf-lists values within all instances of the list. Nested lists and containers should not be provided - sysrepo
- * will ask for them in subsequent calls.
- * - If the xpath identifies a leaf-list, the provider is supposed to return all leaf-list values.
- * - If the xpath identifies a leaf, the provider is supposed to return just the leaf in question.
- *
- * The xpath argument passed to callback can be only the xpath that was used for the subscription, or xpath of
- * any nested lists or containers.
- *
- * @param[in] xpath @ref xp_page "Data Path" identifying the level under which the nodes are requested.
- * @param[out] values Array of values at the selected level (allocated by the provider).
- * @param[out] values_cnt Number of values returned.
- * @param[in] request_id An ID identifying the originating request.
- * @param[in] private_ctx Private context opaque to sysrepo, as passed to ::sr_dp_get_items_subscribe call.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-typedef int (*sr_dp_get_items_cb)(const char *xpath, sr_val_t **values, size_t *values_cnt, uint64_t request_id, void *private_ctx);
-
-/**
- * @brief Registers for providing of operational data under given xpath.
- *
- * @note The XPath must be generic - must not include any list key values.
- * @note This API works only for operational data (subtrees marked in YANG as "config false").
- * Subscribing as a data provider for configuration data does not have any effect.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] xpath @ref xp_page "Data Path" identifying the subtree under which the provider is able to provide
- * operational data.
- * @param[in] callback Callback to be called when the operational data nder given xpat is needed.
- * @param[in] private_ctx Private context passed to the callback function, opaque to sysrepo.
- * @param[in] opts Options overriding default behavior of the subscription, it is supposed to be
- * a bitwise OR-ed value of any ::sr_subscr_flag_t flags.
- * @param[in,out] subscription Subscription context that is supposed to be released by ::sr_unsubscribe.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_dp_get_items_subscribe(sr_session_ctx_t *session, const char *xpath, sr_dp_get_items_cb callback, void *private_ctx,
- sr_subscr_options_t opts, sr_subscription_ctx_t **subscription);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Application-local File Descriptor Watcher API
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Event that has occurred on a monitored file descriptor.
- */
-typedef enum sr_fd_event_e {
- SR_FD_INPUT_READY = 1, /**< File descriptor is now readable without blocking. */
- SR_FD_OUTPUT_READY = 2, /**< File descriptor is now writable without blocking. */
-} sr_fd_event_t;
-
-/**
- * @brief Action that needs to be taken on a file descriptor.
- */
-typedef enum sr_fd_action_s {
- SR_FD_START_WATCHING, /**< Start watching for the specified event on the file descriptor. */
- SR_FD_STOP_WATCHING, /**< Stop watching for the specified event on the file descriptor. */
-} sr_fd_action_t;
-
-/**
- * @brief Structure representing a change in the set of file descriptors monitored by the application.
- */
-typedef struct sr_fd_change_s {
- int fd; /**< File descriptor whose monitored state should be changed. */
- int events; /**< Monitoring events tied to the change (or-ed value of ::sr_fd_event_t). */
- sr_fd_action_t action; /**< Action that is supposed to be performed by application-local file descriptor watcher. */
-} sr_fd_change_t;
-
-/**
- * @brief Callback when the subscription manager is terminated
- */
-typedef void (*sr_fd_sm_terminated_cb)();
-
-/**
- * @brief Initializes application-local file descriptor watcher.
- *
- * This can be used in those applications that subscribe for changes or providing data in sysrepo, which have their
- * own event loop that is capable of monitoring of the events on provided file descriptors. In case that the
- * application-local file descriptor watcher is initialized, sysrepo client library won't use a separate thread
- * for the delivery of the notifications and for calling the callbacks - they will be called from the main thread of the
- * application's event loop (inside of ::sr_fd_event_process calls).
- *
- * @note Calling this function has global consequences on the behavior of the sysrepo client library within the process
- * that called it. It is supposed to be called as the first sysrepo API call within the application.
- *
- * @param[out] fd Initial file descriptor that is supposed to be monitored for readable events by the application.
- * Once there is an event detected on this file descriptor, the application is supposed to call ::sr_fd_event_process.
- *
- * @param[in] sm_terminate_cb Function to be called when the subscription manager is terminated. If this callback is provided,
- * it shall block until all pending events on any file descriptor associated with sysrepo have been handled. I.e., ensure that
- * the event loop has called sr_fd_event_process() for all pending events before returning from this callback. If this callback
- * doesn't block, errors will be shown in the log.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_fd_watcher_init(int *fd, sr_fd_sm_terminated_cb sm_terminate_cb);
-
-/**
- * @brief Cleans-up the application-local file descriptor watcher previously initiated by ::sr_fd_watcher_init.
- * It is supposed to be called as the last sysrepo API within the application.
- */
-void sr_fd_watcher_cleanup();
-
-/**
- * @brief Processes an event that has occurred on one of the file descriptors that the application is monitoring for
- * sysrepo client library purposes. As a result of this event, another file descriptors may need to be started or
- * stopped monitoring by the application. These are returned as \p fd_change_set array.
- *
- * @param[in] fd File descriptor where an event occurred.
- * @param[in] event Type of the event that occurred on the given file descriptor.
- * @param[out] fd_change_set Array of file descriptors that need to be started or stopped monitoring for specified event
- * by the application. The application is supposed to free this array after it processes it.
- * @param[out] fd_change_set_cnt Count of the items in the \p fd_change_set array.
- *
- * @return Error code (SR_ERR_OK on success).
- */
-int sr_fd_event_process(int fd, sr_fd_event_t event, sr_fd_change_t **fd_change_set, size_t *fd_change_set_cnt);
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Cleanup Routines
-////////////////////////////////////////////////////////////////////////////////
-
-/**
- * @brief Frees ::sr_val_t structure and all memory allocated within it.
- *
- * @param[in] value Value to be freed.
- */
-void sr_free_val(sr_val_t *value);
-
-/**
- * @brief Frees array of ::sr_val_t structures (and all memory allocated
- * within of each array element).
- *
- * @param[in] values Array of values to be freed.
- * @param[in] count Number of elements stored in the array.
- */
-void sr_free_values(sr_val_t *values, size_t count);
-
-/**
- * @brief Frees ::sr_val_iter_t iterator and all memory allocated within it.
- *
- * @param[in] iter Iterator to be freed.
- */
-void sr_free_val_iter(sr_val_iter_t *iter);
-
-/**
- * @brief Frees ::sr_change_iter_t iterator and all memory allocated within it.
- *
- * @param[in] iter Iterator to be freed.
- */
-void sr_free_change_iter(sr_change_iter_t *iter);
-
-/**
- * @brief Frees array of ::sr_schema_t structures (and all memory allocated
- * within of each array element).
- *
- * @param [in] schemas Array of schemas to be freed.
- * @param [in] count Number of elements stored in the array.
- */
-void sr_free_schemas(sr_schema_t *schemas, size_t count);
-
-/**
- * @brief Frees sysrepo tree data.
- *
- * @param[in] tree Tree data to be freed.
- */
-void sr_free_tree(sr_node_t *tree);
-
-/**
- * @brief Frees array of sysrepo trees. For each tree, the ::sr_free_tree is called too.
- *
- * @param[in] trees
- * @param[in] count length of array
- */
-void sr_free_trees(sr_node_t *trees, size_t count);
-
-/**@} cl */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* SYSREPO_H_ */
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Connection.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Connection.hpp
deleted file mode 100644
index eb944f72c..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Connection.hpp
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * @file Connection.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo Connection class header.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- * Modifications Copyright (C) 2019 Nokia. All rights reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CONNECTION_H
-#define CONNECTION_H
-
-#include <iostream>
-
-#include "Sysrepo.hpp"
-#include "Internal.hpp"
-
-extern "C" {
-#include "../sysrepo.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-/**
- * @brief Class for wrapping sr_conn_ctx_t.
- * @class Connection
- */
-class Connection
-{
-public:
- /** Wrapper for [sr_connect](@ref sr_connect) */
- Connection(const char *app_name, const sr_conn_options_t opts = CONN_DEFAULT);
- ~Connection();
-
- sr_conn_ctx_t *_conn;
- friend class Session;
-
-private:
- sr_conn_options_t _opts;
-};
-
-/**@} */
-}
-
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Internal.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Internal.hpp
deleted file mode 100644
index aec62f9f1..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Internal.hpp
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * @file Internal.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo class header for internal C++ classes.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef INTERNAL_H
-#define INTERNAL_H
-
-#include <iostream>
-#include <memory>
-
-extern "C" {
-#include "../sysrepo.h"
-#include "../sysrepo/trees.h"
-}
-
-namespace sysrepo {
-
-enum class Free_Type {
- VAL,
- VALS,
- VALS_POINTER,
- TREE,
- TREES,
- TREES_POINTER,
- SCHEMAS,
- SESSION,
-};
-
-typedef union value_e {
- sr_val_t *_val;
- sr_val_t **p_vals;
- sr_node_t *_tree;
- sr_node_t **p_trees;
- sr_schema_t *_sch;
- sr_session_ctx_t *_sess;
-} value_t;
-
-typedef union count_e {
- size_t _cnt;
- size_t *p_cnt;
-} count_t;
-
-class Deleter
-{
-public:
- Deleter(sr_val_t *val);
- Deleter(sr_val_t *vals, size_t cnt);
- Deleter(sr_val_t **vals, size_t *cnt);
- Deleter(sr_node_t *tree);
- Deleter(sr_node_t *trees, size_t cnt);
- Deleter(sr_node_t **trees, size_t *cnt);
- Deleter(sr_schema_t *sch, size_t cnt);
- Deleter(sr_session_ctx_t *sess);
- ~Deleter();
-
-private:
- count_t c;
- value_t v;
- Free_Type _t;
-};
-
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Session.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Session.hpp
deleted file mode 100644
index 02d03ed38..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Session.hpp
+++ /dev/null
@@ -1,245 +0,0 @@
-/**
- * @file Session.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo Session class header.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SESSION_H
-#define SESSION_H
-
-#include <iostream>
-#include <memory>
-#include <map>
-#include <vector>
-
-#include "Sysrepo.hpp"
-#include "Internal.hpp"
-#include "Struct.hpp"
-#include "Tree.hpp"
-#include "Connection.hpp"
-#include "Session.hpp"
-
-extern "C" {
-#include "../sysrepo.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-/**
- * @brief Class for wrapping sr_session_ctx_t.
- * @class Session
- */
-class Session
-{
-
-public:
- /** Wrapper for [sr_session_start](@ref sr_session_start) and [sr_session_start_user](@ref sr_session_start_user)
- * if user_name is set.*/
- Session(S_Connection conn, sr_datastore_t datastore = (sr_datastore_t) DS_RUNNING, \
- const sr_sess_options_t opts = SESS_DEFAULT, const char *user_name = nullptr);
- /** Wrapper for [sr_session_ctx_t](@ref sr_session_ctx_t), for internal use only.*/
- Session(sr_session_ctx_t *sess, sr_sess_options_t opts = SESS_DEFAULT, S_Deleter deleter = nullptr);
- /** Wrapper for [sr_session_stop](@ref sr_session_stop) */
- void session_stop();
- /** Wrapper for [sr_session_switch_ds](@ref sr_session_switch_ds) */
- void session_switch_ds(sr_datastore_t ds);
- /** Wrapper for [sr_get_last_error](@ref sr_get_last_error) */
- S_Error get_last_error();
- /** Wrapper for [sr_get_last_errors](@ref sr_get_last_errors) */
- S_Errors get_last_errors();
- /** Wrapper for [sr_list_schemas](@ref sr_list_schemas) */
- S_Yang_Schemas list_schemas();
- /** Wrapper for [sr_get_schema](@ref sr_get_schema) */
- std::string get_schema(const char *module_name, const char *revision,
- const char *submodule_name, sr_schema_format_t format);
- /** Wrapper for [sr_get_item](@ref sr_get_item) */
- S_Val get_item(const char *xpath);
- /** Wrapper for [sr_get_items](@ref sr_get_items) */
- S_Vals get_items(const char *xpath);
- /** Wrapper for [sr_get_items_iter](@ref sr_get_items_iter) */
- S_Iter_Value get_items_iter(const char *xpath);
- /** Wrapper for [sr_get_item_next](@ref sr_get_item_next) */
- S_Val get_item_next(S_Iter_Value iter);
- /** Wrapper for [sr_get_subtree](@ref sr_get_subtree) */
- S_Tree get_subtree(const char *xpath, sr_get_subtree_options_t opts = GET_SUBTREE_DEFAULT);
- /** Wrapper for [sr_get_subtrees](@ref sr_get_subtrees) */
- S_Trees get_subtrees(const char *xpath, sr_get_subtree_options_t opts = GET_SUBTREE_DEFAULT);
-
- /** Wrapper for [sr_node_get_child](@ref sr_node_get_child) */
- S_Tree get_child(S_Tree in_tree);
- /** Wrapper for [sr_node_get_next_sibling](@ref sr_node_get_next_sibling) */
- S_Tree get_next_sibling(S_Tree in_tree);
- /** Wrapper for [sr_node_get_parent](@ref sr_node_get_parent) */
- S_Tree get_parent(S_Tree in_tree);
-
- /** Wrapper for [sr_set_item](@ref sr_set_item) */
- void set_item(const char *xpath, S_Val value = nullptr, const sr_edit_options_t opts = EDIT_DEFAULT);
- /** Wrapper for [sr_set_item_str](@ref sr_set_item_str) */
- void set_item_str(const char *xpath, const char *value, const sr_edit_options_t opts = EDIT_DEFAULT);
- /** Wrapper for [sr_delete_item](@ref sr_delete_item) */
- void delete_item(const char *xpath, const sr_edit_options_t opts = EDIT_DEFAULT);
- /** Wrapper for [sr_move_item](@ref sr_move_item) */
- void move_item(const char *xpath, const sr_move_position_t position, const char *relative_item = nullptr);
- /** Wrapper for [sr_session_refresh](@ref sr_session_refresh) */
- void refresh();
- /** Wrapper for [sr_validate](@ref sr_validate) */
- void validate();
- /** Wrapper for [sr_commit](@ref sr_commit) */
- void commit();
- /** Wrapper for [sr_lock_datastore](@ref sr_lock_datastore) */
- void lock_datastore();
- /** Wrapper for [sr_unlock_datastore](@ref sr_unlock_datastore) */
- void unlock_datastore();
- /** Wrapper for [sr_lock_module](@ref sr_lock_module) */
- void lock_module(const char *module_name);
- /** Wrapper for [sr_unlock_module](@ref sr_unlock_module) */
- void unlock_module(const char *module_name);
- /** Wrapper for [sr_discard_changes](@ref sr_discard_changes) */
- void discard_changes();
- /** Wrapper for [sr_copy_config](@ref sr_copy_config) */
- void copy_config(const char *module_name, sr_datastore_t src_datastore, sr_datastore_t dst_datastore);
- /** Wrapper for [sr_session_set_options](@ref sr_session_set_options) */
- void set_options(const sr_sess_options_t opts);
- /** Wrapper for [sr_get_changes_iter](@ref sr_get_changes_iter) */
- S_Iter_Change get_changes_iter(const char *xpath);
- /** Wrapper for [sr_get_change_next](@ref sr_get_change_next) */
- S_Change get_change_next(S_Iter_Change iter);
- ~Session();
-
- /** Wrapper for [sr_rpc_send](@ref sr_rpc_send) */
- S_Vals rpc_send(const char *xpath, S_Vals input);
- /** Wrapper for [sr_rpc_send_tree](@ref sr_rpc_send_tree) */
- S_Trees rpc_send(const char *xpath, S_Trees input);
- /** Wrapper for [sr_action_send](@ref sr_action_send) */
- S_Vals action_send(const char *xpath, S_Vals input);
- /** Wrapper for [sr_action_send_tree](@ref sr_action_send_tree) */
- S_Trees action_send(const char *xpath, S_Trees input);
- /** Wrapper for [sr_event_notif_send](@ref sr_event_notif_send) */
- void event_notif_send(const char *xpath, S_Vals values, const sr_ev_notif_flag_t options = SR_EV_NOTIF_DEFAULT);
- /** Wrapper for [sr_event_notif_send_tree](@ref sr_event_notif_send_tree) */
- void event_notif_send(const char *xpath, S_Trees trees, const sr_ev_notif_flag_t options = SR_EV_NOTIF_DEFAULT);
-
- friend class Subscribe;
-
-private:
- sr_session_ctx_t *_sess;
- sr_datastore_t _datastore;
- sr_sess_options_t _opts;
- S_Connection _conn;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Helper class for calling C callbacks, C++ only.
- * @class Callback
- */
-class Callback
-{
-public:
- Callback();
- virtual ~Callback();
-
- /** Wrapper for [sr_module_change_cb](@ref sr_module_change_cb) callback.*/
- virtual int module_change(S_Session session, const char *module_name, sr_notif_event_t event, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_subtree_change_cb](@ref sr_subtree_change_cb) callback.*/
- virtual int subtree_change(S_Session session, const char *xpath, sr_notif_event_t event, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_module_install_cb](@ref sr_module_install_cb) callback.*/
- virtual void module_install(const char *module_name, const char *revision, sr_module_state_t state, void *private_ctx) {return;};
- /** Wrapper for [sr_feature_enable_cb](@ref sr_feature_enable_cb) callback.*/
- virtual void feature_enable(const char *module_name, const char *feature_name, bool enabled, void *private_ctx) {return;};
- /** Wrapper for [sr_rpc_cb](@ref sr_rpc_cb) callback.*/
- virtual int rpc(const char *xpath, const S_Vals input, S_Vals_Holder output, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_action_cb](@ref sr_action_cb) callback.*/
- virtual int action(const char *xpath, const S_Vals input, S_Vals_Holder output, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_rpc_tree_cb](@ref sr_rpc_tree_cb) callback.*/
- virtual int rpc_tree(const char *xpath, const S_Trees input, S_Trees_Holder output, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_action_tree_cb](@ref sr_action_tree_cb) callback.*/
- virtual int action_tree(const char *xpath, const S_Trees input, S_Trees_Holder output, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_dp_get_items_cb](@ref sr_dp_get_items_cb) callback.*/
- virtual int dp_get_items(const char *xpath, S_Vals_Holder vals, uint64_t request_id, void *private_ctx) {return SR_ERR_OK;};
- /** Wrapper for [sr_event_notif_cb](@ref sr_event_notif_cb) callback.*/
- virtual void event_notif(const sr_ev_notif_type_t notif_type, const char *xpath, S_Vals vals, time_t timestamp, void *private_ctx) {return;};
- /** Wrapper for [sr_event_notif_tree_cb](@ref sr_event_notif_tree_cb) callback.*/
- virtual void event_notif_tree(const sr_ev_notif_type_t notif_type, const char *xpath, S_Trees trees, time_t timestamp, void *private_ctx) {return;};
- Callback *get() {return this;};
-
- std::map<const char *, void*> private_ctx;
-};
-
-/**
- * @brief Class for wrapping sr_subscription_ctx_t.
- * @class Subscribe
- */
-class Subscribe
-{
-
-public:
- /** Wrapper for [sr_subscription_ctx_t](@ref sr_subscription_ctx_t), for internal use only.*/
- Subscribe(S_Session sess);
-
- /** Wrapper for [sr_module_change_subscribe](@ref sr_module_change_subscribe) */
- void module_change_subscribe(const char *module_name, S_Callback callback, void *private_ctx = nullptr, uint32_t priority = 0, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_subtree_change_subscribe](@ref sr_subtree_change_subscribe) */
- void subtree_change_subscribe(const char *xpath, S_Callback callback, void *private_ctx = nullptr, uint32_t priority = 0, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_module_install_subscribe](@ref sr_module_install_subscribe) */
- void module_install_subscribe(S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_feature_enable_subscribe](@ref sr_feature_enable_subscribe) */
- void feature_enable_subscribe(S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_rpc_subscribe](@ref sr_rpc_subscribe) */
- void rpc_subscribe(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_action_subscribe](@ref sr_action_subscribe) */
- void action_subscribe(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_event_notif_subscribe_tree](@ref sr_event_notif_subscribe_tree) */
- void event_notif_subscribe_tree(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_event_notif_subscribe](@ref sr_event_notif_subscribe) */
- void event_notif_subscribe(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_rpc_subscribe_tree](@ref sr_rpc_subscribe_tree) */
- void rpc_subscribe_tree(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_action_subscribe_tree](@ref sr_action_subscribe_tree) */
- void action_subscribe_tree(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- /** Wrapper for [sr_dp_get_items_subscribe](@ref sr_dp_get_items_subscribe) */
- void dp_get_items_subscribe(const char *xpath, S_Callback callback, void *private_ctx = nullptr, sr_subscr_options_t opts = SUBSCR_DEFAULT);
- std::vector<S_Callback > cb_list;
-
- /** Wrapper for [sr_unsubscribe](@ref sr_unsubscribe) */
- void unsubscribe();
- ~Subscribe();
-
- /** SWIG specific, internal use only.*/
- sr_subscription_ctx_t **swig_sub() { return &_sub;};
- /** SWIG specific, internal use only.*/
- sr_session_ctx_t *swig_sess() {return _sess->_sess;};
- /** SWIG specific, internal use only.*/
- std::vector<void*> wrap_cb_l;
- /** SWIG specific, internal use only.*/
- void additional_cleanup(void *private_ctx) {return;};
-
-private:
- sr_subscription_ctx_t *_sub;
- S_Session _sess;
- S_Deleter sess_deleter;
-};
-
-/**@} */
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Struct.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Struct.hpp
deleted file mode 100644
index 7f48d562d..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Struct.hpp
+++ /dev/null
@@ -1,514 +0,0 @@
-/**
- * @file Struct.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo class header for C struts.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STRUCT_H
-#define STRUCT_H
-
-#include <iostream>
-#include <memory>
-
-#include "Sysrepo.hpp"
-#include "Internal.hpp"
-
-extern "C" {
-#include "../sysrepo.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-/**
- * @brief Class for wrapping sr_data_t.
- * @class Data
- */
-class Data
-{
-public:
- /** Wrapper for [sr_data_t](@ref sr_data_t), for internal use only.*/
- Data(sr_data_t data, sr_type_t type, S_Deleter deleter);
- ~Data();
- /** Getter for binary data.*/
- char *get_binary() const;
- /** Getter for bits.*/
- char *get_bits() const;
- /** Getter for bool.*/
- bool get_bool() const;
- /** Getter for decimal64.*/
- double get_decimal64() const;
- /** Getter for enum.*/
- char *get_enum() const;
- /** Getter for identityref.*/
- char *get_identityref() const;
- /** Getter for instanceid.*/
- char *get_instanceid() const;
- /** Getter for int8.*/
- int8_t get_int8() const;
- /** Getter for int16.*/
- int16_t get_int16() const;
- /** Getter for int32.*/
- int32_t get_int32() const;
- /** Getter for int64.*/
- int64_t get_int64() const;
- /** Getter for string.*/
- char *get_string() const;
- /** Getter for uint8.*/
- uint8_t get_uint8() const;
- /** Getter for uint16.*/
- uint16_t get_uint16() const;
- /** Getter for uint32.*/
- uint32_t get_uint32() const;
- /** Getter for uint64.*/
- uint64_t get_uint64() const;
-
-private:
- sr_data_t _d;
- sr_type_t _t;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_val_t.
- * @class Val
- */
-class Val
-{
-public:
- /** Constructor for an empty value.*/
- Val();
- /** Wrapper for [sr_val_t](@ref sr_val_t).*/
- Val(sr_val_t *val, S_Deleter deleter);
- /** Constructor for string value, type can be SR_STRING_T, SR_BINARY_T, SR_BITS_T, SR_ENUM_T,
- * SR_IDENTITYREF_T and SR_INSTANCEID_T.*/
- Val(const char *val, sr_type_t type = SR_STRING_T);
- /** Constructor for bool value.*/
- Val(bool bool_val, sr_type_t type = SR_BOOL_T);
- /** Constructor for decimal64 value.*/
- Val(double decimal64_val);
- /** Constructor for int8 value, C++ only.*/
- Val(int8_t int8_val, sr_type_t type);
- /** Constructor for int16 value, C++ only.*/
- Val(int16_t int16_val, sr_type_t type);
- /** Constructor for int32 value, C++ only.*/
- Val(int32_t int32_val, sr_type_t type);
- /** Constructor for int64 value, type can be SR_INT8_T, SR_INT16_T, SR_INT32_T,
- * SR_INT64_T, SR_UINT8_T, SR_UINT16_T and SR_UINT32_T,*/
- Val(int64_t int64_val, sr_type_t type);
- /** Constructor for uint8 value, C++ only.*/
- Val(uint8_t uint8_val, sr_type_t type);
- /** Constructor for uint16 value, C++ only.*/
- Val(uint16_t uint16_val, sr_type_t type);
- /** Constructor for uint32 value, C++ only.*/
- Val(uint32_t uint32_val, sr_type_t type);
- /** Constructor for uint64 value, C++ only.*/
- Val(uint64_t uint64_val, sr_type_t type);
- ~Val();
- /** Setter for string value, type can be SR_STRING_T, SR_BINARY_T, SR_BITS_T, SR_ENUM_T,
- * SR_IDENTITYREF_T and SR_INSTANCEID_T.*/
- void set(const char *xpath, const char *val, sr_type_t type = SR_STRING_T);
- /** Setter for bool value.*/
- void set(const char *xpath, bool bool_val, sr_type_t type = SR_BOOL_T);
- /** Setter for decimal64 value.*/
- void set(const char *xpath, double decimal64_val);
- /** Setter for int8 value, C++ only.*/
- void set(const char *xpath, int8_t int8_val, sr_type_t type);
- /** Setter for int16 value, C++ only.*/
- void set(const char *xpath, int16_t int16_val, sr_type_t type);
- /** Setter for int32 value, C++ only.*/
- void set(const char *xpath, int32_t int32_val, sr_type_t type);
- /** Setter for int64 value, type can be SR_INT8_T, SR_INT16_T, SR_INT32_T,
- * SR_INT64_T, SR_UINT8_T, SR_UINT16_T and SR_UINT32_T,*/
- void set(const char *xpath, int64_t int64_val, sr_type_t type);
- /** Setter for uint8 value, C++ only.*/
- void set(const char *xpath, uint8_t uint8_val, sr_type_t type);
- /** Setter for uint16 value, C++ only.*/
- void set(const char *xpath, uint16_t uint16_val, sr_type_t type);
- /** Setter for uint32 value, C++ only.*/
- void set(const char *xpath, uint32_t uint32_val, sr_type_t type);
- /** Setter for uint64 value, C++ only.*/
- void set(const char *xpath, uint64_t uint64_val, sr_type_t type);
- /** Getter for xpath.*/
- char *xpath() {return _val->xpath;};
- /** Setter for xpath.*/
- void xpath_set(char *xpath);
- /** Getter for type.*/
- sr_type_t type() {return _val->type;};
- /** Getter for dflt.*/
- bool dflt() {return _val->dflt;};
- /** Setter for dflt.*/
- void dflt_set(bool data) {_val->dflt = data;};
- /** Getter for data.*/
- S_Data data() {S_Data data(new Data(_val->data, _val->type, _deleter)); return data;};
- /** Wrapper for [sr_print_val_mem](@ref sr_print_val_mem) */
- std::string to_string();
- /** Wrapper for [sr_val_to_string](@ref sr_val_to_string) */
- std::string val_to_string();
- /** Wrapper for [sr_dup_val](@ref sr_dup_val) */
- S_Val dup();
-
- friend class Session;
- friend class Subscribe;
-
-private:
- sr_val_t *_val;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_val_t array.
- * @class Vals
- */
-class Vals
-{
-public:
- /** Wrapper for [sr_val_t](@ref sr_val_t) array, internal use only.*/
- Vals(const sr_val_t *vals, const size_t cnt, S_Deleter deleter = nullptr);
- /** Wrapper for [sr_val_t](@ref sr_val_t) array, internal use only.*/
- Vals(sr_val_t **vals, size_t *cnt, S_Deleter deleter = nullptr);
- /** Wrapper for [sr_val_t](@ref sr_val_t) array, create n-array.*/
- Vals(size_t cnt);
- /** Constructor for an empty [sr_val_t](@ref sr_val_t) array.*/
- Vals();
- ~Vals();
- /** Getter for [sr_val_t](@ref sr_val_t), get the n-th element in array.*/
- S_Val val(size_t n);
- /** Getter for array size */
- size_t val_cnt() {return _cnt;};
- /** Wrapper for [sr_dup_values](@ref sr_dup_values) */
- S_Vals dup();
-
- friend class Session;
- friend class Subscribe;
-
-private:
- size_t _cnt;
- sr_val_t *_vals;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_val_t in callbacks.
- * @class Vals_holder
- */
-class Vals_Holder
-{
-public:
- /** Wrapper for [sr_val_t](@ref sr_val_t) array, used only in callbacks.*/
- Vals_Holder(sr_val_t **vals, size_t *cnt);
- /** Create [sr_val_t](@ref sr_val_t) array of n size.*/
- S_Vals allocate(size_t n);
- ~Vals_Holder();
-
-private:
- size_t *p_cnt;
- sr_val_t **p_vals;
- bool _allocate;
-};
-
-/**
- * @brief Class for wrapping sr_val_iter_t.
- * @class Val_Iter
- */
-class Val_Iter
-{
-public:
- /** Wrapper for [sr_val_iter_t](@ref sr_val_iter_t).*/
- Val_Iter(sr_val_iter_t *iter = nullptr);
- ~Val_Iter();
- /** Getter for [sr_val_iter_t](@ref sr_val_iter_t).*/
- sr_val_iter_t *iter() {return _iter;};
-
-private:
- sr_val_iter_t *_iter;
-};
-
-/**
- * @brief Class for wrapping sr_change_iter_t.
- * @class Change_Iter
- */
-class Change_Iter
-{
-public:
- /** Wrapper for [sr_change_iter_t](@ref sr_change_iter_t).*/
- Change_Iter(sr_change_iter_t *iter = nullptr);
- ~Change_Iter();
- /** Getter for [sr_change_iter_t](@ref sr_change_iter_t).*/
- sr_change_iter_t *iter() {return _iter;};
-
-private:
- sr_change_iter_t *_iter;
-};
-
-/**
- * @brief Class for wrapping sr_error_info_t.
- * @class Error
- */
-class Error
-{
-public:
- /** Constructor for an empty [sr_error_info_t](@ref sr_error_info_t).*/
- Error();
- /** Wrapper for [sr_error_info_t](@ref sr_error_info_t).*/
- Error(const sr_error_info_t *info);
- ~Error();
- /** Getter for message.*/
- const char *message() const {if (_info) return _info->message; else return nullptr;};
- /** Getter for xpath.*/
- const char *xpath() const {if (_info) return _info->xpath; else return nullptr;};
-
- friend class Session;
-
-private:
- const sr_error_info_t *_info;
-};
-
-/**
- * @brief Class for wrapping sr_error_info_t array.
- * @class Errors
- */
-class Errors
-{
-public:
- /** Constructor for an empty [sr_error_info_t](@ref sr_error_info_t) array.*/
- Errors();
- ~Errors();
- /** Getter for [sr_error_info_t](@ref sr_error_info_t), get the n-th element in array.*/
- S_Error error(size_t n);
- /** Getter for array size.*/
- size_t error_cnt() {return _cnt;};
-
- friend class Session;
-
-private:
- size_t _cnt;
- const sr_error_info_t *_info;
-};
-
-/**
- * @brief Class for wrapping sr_sch_revision_t array.
- * @class Schema_Revision
- */
-class Schema_Revision
-{
-public:
- /** Wrapper for [sr_sch_revision_t](@ref sr_sch_revision_t).*/
- Schema_Revision(sr_sch_revision_t rev);
- ~Schema_Revision();
- /** Getter for revision.*/
- const char *revision() const {return _rev.revision;};
- /** Getter for file_path_yang.*/
- const char *file_path_yang() const {return _rev.file_path_yang;};
- /** Getter for file_path_yin.*/
- const char *file_path_yin() const {return _rev.file_path_yin;};
-
-private:
- sr_sch_revision_t _rev;
-};
-
-/**
- * @brief Class for wrapping sr_sch_submodule_t.
- * @class Schema_Submodule
- */
-class Schema_Submodule
-{
-public:
- /** Wrapper for [sr_sch_submodule_t](@ref sr_sch_submodule_t).*/
- Schema_Submodule(sr_sch_submodule_t sub, S_Deleter deleter);
- ~Schema_Submodule();
- /** Getter for submodule_name.*/
- const char *submodule_name() const {return _sub.submodule_name;};
- /** Getter for revision.*/
- S_Schema_Revision revision();
-
-private:
- sr_sch_submodule_t _sub;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_schema_t.
- * @class Yang_Schema
- */
-class Yang_Schema
-{
-public:
- /** Wrapper for [sr_schema_t](@ref sr_schema_t).*/
- Yang_Schema(sr_schema_t *sch, S_Deleter deleter);
- ~Yang_Schema();
- /** Getter for module_name.*/
- const char *module_name() const {return _sch->module_name;};
- /** Getter for ns.*/
- const char *ns() const {return _sch->ns;};
- /** Getter for prefix.*/
- const char *prefix() const {return _sch->prefix;};
- /** Getter for implemented.*/
- bool implemented() const {return _sch->implemented;};
- /** Getter for revision.*/
- S_Schema_Revision revision();
- /** Getter for submodule.*/
- S_Schema_Submodule submodule(size_t n);
- /** Getter for submodule_cnt.*/
- size_t submodule_cnt() const {return _sch->submodule_count;};
- /** Getter for enabled_features.*/
- char *enabled_features(size_t n);
- /** Getter for enabled_features_cnt.*/
- size_t enabled_feature_cnt() const {return _sch->enabled_feature_cnt;};
-
- friend class Session;
-
-private:
- sr_schema_t *_sch;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_schema_t array.
- * @class Yang_Schemas
- */
-class Yang_Schemas
-{
-public:
- /** Constructor for an empty [sr_schema_t](@ref sr_schema_t) array.*/
- Yang_Schemas();
- ~Yang_Schemas();
- /** Getter for [sr_schema_t](@ref sr_schema_t) array, get the n-th element in array.*/
- S_Yang_Schema schema(size_t n);
- /** Getter for array size.*/
- size_t schema_cnt() const {return _cnt;};
-
- friend class Session;
-
-private:
- size_t _cnt;
- sr_schema_t *_sch;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_fd_change_t.
- * @class Fd_Change
- */
-class Fd_Change
-{
-public:
- /** Wrapper for [sr_fd_change_t](@ref sr_fd_change_t).*/
- Fd_Change(sr_fd_change_t *ch);
- ~Fd_Change();
- /** Getter for fd.*/
- int fd() const {return _ch->fd;};
- /** Getter for events.*/
- int events() const {return _ch->events;};
- /** Getter for action.*/
- sr_fd_action_t action() const {return _ch->action;};
-
-private:
- sr_fd_change_t *_ch;
-};
-
-/**
- * @brief Class for wrapping sr_fd_change_t array.
- * @class Fd_Changes
- */
-class Fd_Changes
-{
-public:
- /** Wrapper for [sr_fd_change_t](@ref sr_fd_change_t) array.*/
- Fd_Changes(sr_fd_change_t *ch, size_t cnt);
- ~Fd_Changes();
- /** Getter for [sr_fd_change_t](@ref sr_fd_change_t) array, get the n-th element in array.*/
- S_Fd_Change fd_change(size_t n);
-
-private:
- sr_fd_change_t *_ch;
- size_t _cnt;
-};
-
-/**
- * @brief Class for wrapping sr_val_iter_t.
- * @class Fd_Changes
- */
-class Iter_Value
-{
-
-public:
- /** Wrapper for [sr_val_iter_t](@ref sr_val_iter_t).*/
- Iter_Value(sr_val_iter_t *iter = nullptr);
- ~Iter_Value();
- /** Setter for [sr_val_iter_t](@ref sr_val_iter_t).*/
- void Set(sr_val_iter_t *iter);
-
- friend class Session;
-
-private:
- sr_val_iter_t *_iter;
-};
-
-/**
- * @brief Class for wrapping sr_change_iter_t.
- * @class Iter_Change
- */
-class Iter_Change
-{
-
-public:
- /** Wrapper for [sr_change_iter_t](@ref sr_change_iter_t).*/
- Iter_Change(sr_change_iter_t *iter = nullptr);
- ~Iter_Change();
-
- friend class Session;
-
-private:
- sr_change_iter_t *_iter;
-};
-
-/**
- * @brief Class for wrapping sr_change_oper_t.
- * @class Change
- */
-class Change
-{
-public:
- /** Constructor for an empty [sr_change_oper_t](@ref sr_change_oper_t).*/
- Change();
- ~Change();
- /** Getter for sr_change_oper_t.*/
- sr_change_oper_t oper() {return _oper;};
- /** Getter for new sr_val_t.*/
- S_Val new_val();
- /** Getter for old sr_val_t.*/
- S_Val old_val();
-
- friend class Session;
-
-private:
- sr_change_oper_t _oper;
- sr_val_t *_new;
- sr_val_t *_old;
- S_Deleter _deleter_new;
- S_Deleter _deleter_old;
-};
-
-/**@} */
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Sysrepo.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Sysrepo.hpp
deleted file mode 100644
index d3b76483f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Sysrepo.hpp
+++ /dev/null
@@ -1,177 +0,0 @@
-/**
- * @file Sysrepo.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo Sysrepo class header.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_H
-#define SYSREPO_H
-
-#include <iostream>
-#include <memory>
-#include <stdexcept>
-
-#include "Internal.hpp"
-
-extern "C" {
-#include "../sysrepo.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-class Iter_Value;
-class Iter_Change;
-class Session;
-class Subscribe;
-class Connection;
-class Operation;
-class Schema_Content;
-class Error;
-class Errors;
-class Data;
-class Schema_Revision;
-class Schema_Submodule;
-class Yang_Schema;
-class Yang_Schemas;
-class Fd_Change;
-class Fd_Changes;
-class Val;
-class Vals_Holder;
-class Vals;
-class Tree;
-class Trees;
-class Trees_Holder;
-class Xpath_Ctx;
-class Logs;
-class Change;
-class Counter;
-class Callback;
-class Deleter;
-
-#ifdef SWIGLUA
-using S_Iter_Value = Iter_Value*;
-using S_Iter_Change = Iter_Change*;
-using S_Session = Session*;
-using S_Subscribe = Subscribe*;
-using S_Connection = Connection*;
-using S_Operation = Operation*;
-using S_Schema_Content = Schema_Content*;
-using S_Error = Error*;
-using S_Errors = Errors*;
-using S_Data = Data*;
-using S_Schema_Revision = Schema_Revision*;
-using S_Schema_Submodule = Schema_Submodule*;
-using S_Yang_Schema = Yang_Schema*;
-using S_Yang_Schemas = Yang_Schemas*;
-using S_Fd_Change = Fd_Change*;
-using S_Fd_Changes = Fd_Changes*;
-using S_Val = Val*;
-using S_Vals_Holder = Vals_Holder*;
-using S_Vals = Vals*;
-using S_Tree = Tree*;
-using S_Trees = Trees*;
-using S_Trees_Holder = Trees_Holder*;
-using S_Xpath_Ctx = Xpath_Ctx*;
-using S_Logs = Logs*;
-using S_Change = Change*;
-using S_Counter = Counter*;
-using S_Callback = Callback*;
-#else
-using S_Iter_Value = std::shared_ptr<Iter_Value>;
-using S_Iter_Change = std::shared_ptr<Iter_Change>;
-using S_Session = std::shared_ptr<Session>;
-using S_Subscribe = std::shared_ptr<Subscribe>;
-using S_Connection = std::shared_ptr<Connection>;
-using S_Operation = std::shared_ptr<Operation>;
-using S_Schema_Content = std::shared_ptr<Schema_Content>;
-using S_Error = std::shared_ptr<Error>;
-using S_Errors = std::shared_ptr<Errors>;
-using S_Data = std::shared_ptr<Data>;
-using S_Schema_Revision = std::shared_ptr<Schema_Revision>;
-using S_Schema_Submodule = std::shared_ptr<Schema_Submodule>;
-using S_Yang_Schema = std::shared_ptr<Yang_Schema>;
-using S_Yang_Schemas = std::shared_ptr<Yang_Schemas>;
-using S_Fd_Change = std::shared_ptr<Fd_Change>;
-using S_Fd_Changes = std::shared_ptr<Fd_Changes>;
-using S_Val = std::shared_ptr<Val>;
-using S_Vals_Holder = std::shared_ptr<Vals_Holder>;
-using S_Vals = std::shared_ptr<Vals>;
-using S_Tree = std::shared_ptr<Tree>;
-using S_Trees = std::shared_ptr<Trees>;
-using S_Trees_Holder = std::shared_ptr<Trees_Holder>;
-using S_Xpath_Ctx = std::shared_ptr<Xpath_Ctx>;
-using S_Logs = std::shared_ptr<Logs>;
-using S_Change = std::shared_ptr<Change>;
-using S_Counter = std::shared_ptr<Counter>;
-using S_Callback = std::shared_ptr<Callback>;
-using S_Deleter = std::shared_ptr<Deleter>;
-#endif
-
-/* this is a workaround for python not recognizing
- * enum's in function default values */
-static const int SESS_DEFAULT = SR_SESS_DEFAULT;
-static const int DS_RUNNING = SR_DS_RUNNING;
-static const int EDIT_DEFAULT = SR_EDIT_DEFAULT;
-static const int CONN_DEFAULT = SR_CONN_DEFAULT;
-static const int GET_SUBTREE_DEFAULT = SR_GET_SUBTREE_DEFAULT;
-static const int SUBSCR_DEFAULT = SR_SUBSCR_DEFAULT;
-
-#ifdef SWIG
-// https://github.com/swig/swig/issues/1158
-void throw_exception (int error);
-#else
-void throw_exception [[noreturn]] (int error);
-#endif
-
-/**
- * @brief Class for wrapping sr_error_t.
- * @class sysrepo_exception
- */
-class sysrepo_exception : public std::runtime_error
-{
-public:
- explicit sysrepo_exception(const sr_error_t error_code);
- virtual ~sysrepo_exception() override;
- sr_error_t error_code() const;
-private:
- sr_error_t m_error_code;
-};
-
-/**
- * @brief Class for wrapping ref sr_log_level_t.
- * @class Logs
- */
-class Logs
-{
-public:
- Logs();
- ~Logs();
- /** Wrapper for [sr_log_stderr](@ref sr_log_stderr) */
- void set_stderr(sr_log_level_t log_level);
- /** Wrapper for [sr_log_syslog](@ref sr_log_syslog) */
- void set_syslog(sr_log_level_t log_level);
-};
-
-/**@} */
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Tree.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Tree.hpp
deleted file mode 100644
index 31f3abd47..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Tree.hpp
+++ /dev/null
@@ -1,176 +0,0 @@
-/**
- * @file Trees.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo class header for C header trees.h.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TREE_H
-#define TREE_H
-
-#include "Sysrepo.hpp"
-#include "Struct.hpp"
-
-extern "C" {
-#include "../sysrepo.h"
-#include "../sysrepo/trees.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-/**
- * @brief Class for wrapping sr_node_t.
- * @class Tree
- */
-class Tree
-{
-public:
- /** Constructor for an empty [sr_node_t](@ref sr_node_t).*/
- Tree();
- /** Wrapper for [sr_new_tree](@ref sr_new_tree).*/
- Tree(const char *root_name, const char *root_module_name);
- /** Wrapper for [sr_node_t](@ref sr_node_t).*/
- Tree(sr_node_t *tree, S_Deleter deleter);
- /** Wrapper for [sr_dup_tree](@ref sr_dup_tree).*/
- S_Tree dup();
- /** Get the node value.*/
- S_Tree node();
- /** Getter for name.*/
- char *name() {return _node->name;};
- /** Getter for type.*/
- sr_type_t type() {return _node->type;};
- /** Getter for dflt.*/
- bool dflt() {return _node->dflt;};
- /** Getter for data.*/
- S_Data data() {S_Data data(new Data(_node->data, _node->type, _deleter)); return data;};
- /** Getter for module_name.*/
- char *module_name() {return _node->module_name;};
- /** Getter for parent.*/
- S_Tree parent();
- /** Getter for next.*/
- S_Tree next();
- /** Getter for prev.*/
- S_Tree prev();
- /** Getter for first_child.*/
- S_Tree first_child();
- /** Getter for last_child.*/
- S_Tree last_child();
- /** Wrapper for [sr_print_tree_mem](@ref sr_print_tree_mem).*/
- std::string to_string(int depth_limit);
- /** Wrapper for [sr_print_val_mem](@ref sr_print_val_mem).*/
- std::string value_to_string();
- /** Wrapper for [sr_node_set_name](@ref sr_node_set_name).*/
- void set_name(const char *name);
- /** Wrapper for [sr_node_set_module](@ref sr_node_set_module).*/
- void set_module(const char *module_name);
- /** Wrapper for [sr_node_set_str_data](@ref sr_node_set_str_data).*/
- void set_str_data(sr_type_t type, const char *string_val);
- /** Wrapper for [sr_node_add_child](@ref sr_node_add_child).*/
- void add_child(const char *child_name, const char *child_module_name, S_Tree child);
- /** Setter for string value, type can be SR_STRING_T, SR_BINARY_T, SR_BITS_T, SR_ENUM_T,
- * SR_IDENTITYREF_T and SR_INSTANCEID_T.*/
- void set(const char *val, sr_type_t type = SR_STRING_T);
- /** Setter for bool value.*/
- void set(bool bool_val, sr_type_t type = SR_BOOL_T);
- /** Setter for decimal64 value.*/
- void set(double decimal64_val);
- /** Setter for int8 value, C++ only.*/
- void set(int8_t int8_val, sr_type_t type);
- /** Setter for int16 value, C++ only.*/
- void set(int16_t int16_val, sr_type_t type);
- /** Setter for int32 value, C++ only.*/
- void set(int32_t int32_val, sr_type_t type);
- /** Setter for int64 value, type can be SR_INT8_T, SR_INT16_T, SR_INT32_T,
- * SR_INT64_T, SR_UINT8_T, SR_UINT16_T and SR_UINT32_T,*/
- void set(int64_t int64_val, sr_type_t type);
- /** Setter for uint8 value, C++ only.*/
- void set(uint8_t uint8_val, sr_type_t type);
- /** Setter for uint16 value, C++ only.*/
- void set(uint16_t uint16_val, sr_type_t type);
- /** Setter for uint32 value, C++ only.*/
- void set(uint32_t uint32_val, sr_type_t type);
- /** Setter for uint64 value, C++ only.*/
- void set(uint64_t uint64_val, sr_type_t type);
- ~Tree();
-
- friend class Session;
- friend class Subscribe;
-
-private:
- sr_node_t *_node;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_node_t array.
- * @class Trees
- */
-class Trees
-{
-public:
- /** Constructor for an empty [sr_node_t](@ref sr_node_t) array.*/
- Trees();
- /** Wrapper for [sr_node_t](@ref sr_node_t) array, create n-array.*/
- Trees(size_t n);
- /** Wrapper for [sr_node_t](@ref sr_node_t) array, internal use only.*/
- Trees(sr_node_t **trees, size_t *cnt, S_Deleter deleter = nullptr);
- /** Wrapper for [sr_node_t](@ref sr_node_t) array, internal use only.*/
- Trees(const sr_node_t *trees, const size_t n, S_Deleter deleter = nullptr);
- /** Getter for [sr_node_t](@ref sr_node_t), get the n-th element in array.*/
- S_Tree tree(size_t n);
- /** Wrapper for [sr_dup_trees](@ref sr_dup_trees) */
- S_Trees dup();
- /** Getter for array size */
- size_t tree_cnt() {return _cnt;};
- ~Trees();
-
- friend class Session;
- friend class Subscribe;
-
-private:
- size_t _cnt;
- sr_node_t *_trees;
- S_Deleter _deleter;
-};
-
-/**
- * @brief Class for wrapping sr_node_t in callbacks.
- * @class Trees_Holder
- */
-class Trees_Holder
-{
-public:
- /** Wrapper for [sr_node_t](@ref sr_node_t) array, used only in callbacks.*/
- Trees_Holder(sr_node_t **trees, size_t *cnt);
- /** Create [sr_node_t](@ref sr_node_t) array of n size.*/
- S_Trees allocate(size_t n);
- ~Trees_Holder();
-
-private:
- size_t *p_cnt;
- sr_node_t **p_trees;
- bool _allocate;
-};
-
-/**@} */
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Xpath.hpp b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Xpath.hpp
deleted file mode 100644
index 4406134da..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/Xpath.hpp
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * @file Xpath.h
- * @author Mislav Novakovic <mislav.novakovic@sartura.hr>
- * @brief Sysrepo class header for C header xpath_utils.h.
- *
- * @copyright
- * Copyright 2016 Deutsche Telekom AG.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef XPATH_H
-#define XPATH_H
-
-#include <iostream>
-
-extern "C" {
-#include "../sysrepo/xpath.h"
-}
-
-namespace sysrepo {
-
-/**
- * @defgroup classes C++/Python
- * @{
- */
-
-/**
- * @brief Class for wrapping sr_xpath_ctx_t.
- * @class Xpath_Ctx
- */
-class Xpath_Ctx
-{
-public:
- /** Constructor for an empty [sr_xpath_ctx_t](@ref sr_xpath_ctx_t).*/
- Xpath_Ctx();
- /** Getter for begining.*/
- char *begining() {if (_state != nullptr) return _state->begining; return nullptr;};
- /** Getter for current_node.*/
- char *current_node() {if (_state != nullptr) return _state->current_node; return nullptr;};
- /** Getter for replaced_position.*/
- char *replaced_position() {if (_state != nullptr) return _state->replaced_position; return nullptr;};
- /** Getter for replaced_char.*/
- char replaced_char() {if (_state != nullptr) return _state->replaced_char; return (char) 0;};
- ~Xpath_Ctx();
- /** Wrapper for [sr_xpath_next_node](@ref sr_xpath_next_node).*/
- char *next_node(char *xpath) {return sr_xpath_next_node(xpath, _state);};
- /** Wrapper for [sr_xpath_next_node_with_ns](@ref sr_xpath_next_node_with_ns).*/
- char *next_node_with_ns(char *xpath) {return sr_xpath_next_node_with_ns(xpath, _state);};
- /** Wrapper for [sr_xpath_next_key_name](@ref sr_xpath_next_key_name).*/
- char *next_key_name(char *xpath) {return sr_xpath_next_key_name(xpath, _state);};
- /** Wrapper for [sr_xpath_next_key_value](@ref sr_xpath_next_key_value).*/
- char *next_key_value(char *xpath) {return sr_xpath_next_key_value(xpath, _state);};
- /** Wrapper for [sr_xpath_node](@ref sr_xpath_node).*/
- char *node(char *xpath, const char *node_name) {return sr_xpath_node(xpath, node_name, _state);};
- /** Wrapper for [sr_xpath_node_rel](@ref sr_xpath_node_rel).*/
- char *node_rel(char *xpath, const char *node_name) {return sr_xpath_node_rel(xpath, node_name, _state);};
- /** Wrapper for [sr_xpath_node_idx](@ref sr_xpath_node_idx).*/
- char *node_idx(char *xpath, size_t index) {return sr_xpath_node_idx(xpath, index, _state);};
- /** Wrapper for [sr_xpath_node_idx_rel](@ref sr_xpath_node_idx_rel).*/
- char *node_idx_rel(char *xpath, size_t index) {return sr_xpath_node_idx_rel(xpath, index, _state);};
- /** Wrapper for [sr_xpath_node_key_value](@ref sr_xpath_node_key_value).*/
- char *node_key_value(char *xpath, const char *key) {return sr_xpath_node_key_value(xpath, key, _state);};
- /** Wrapper for [sr_xpath_node_key_value_idx](@ref sr_xpath_node_key_value_idx).*/
- char *node_key_value_idx(char *xpath, size_t index) {return sr_xpath_node_key_value_idx(xpath, index, _state);};
- /** Wrapper for [sr_xpath_key_value](@ref sr_xpath_key_value).*/
- char *key_value(char *xpath, const char *node_name, const char *key_name) {
- return sr_xpath_key_value(xpath, node_name, key_name, _state);};
- /** Wrapper for [sr_xpath_key_value_idx](@ref sr_xpath_key_value_idx).*/
- char *key_value_idx(char *xpath, size_t node_index, size_t key_index) {
- return sr_xpath_key_value_idx(xpath, node_index, key_index, _state);};
- /** Wrapper for [sr_xpath_last_node](@ref sr_xpath_last_node).*/
- char *last_node(char *xpath) {return sr_xpath_last_node(xpath, _state);};
- /** Wrapper for [sr_xpath_node_name](@ref sr_xpath_node_name).*/
- char *node_name(const char *xpath) {return sr_xpath_node_name(xpath);};
- /** Wrapper for [sr_xpath_node_name_eq](@ref sr_xpath_node_name_eq).*/
- bool node_name_eq(const char *xpath, const char *node_str) {return sr_xpath_node_name_eq(xpath, node_str);};
- /** Wrapper for [sr_xpath_recover](@ref sr_xpath_recover).*/
- void recover() {return sr_xpath_recover(_state);};
-
-private:
- sr_xpath_ctx_t *_state;
-};
-
-/**@} */
-}
-#endif
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/plugins.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/plugins.h
deleted file mode 100644
index 3c4efc9a9..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/plugins.h
+++ /dev/null
@@ -1,139 +0,0 @@
-/**
- * @file plugins.h
- * @author Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>
- * @brief Sysrepo helpers for plugin integrations.
- *
- * @copyright
- * Copyright 2016 Cisco Systems, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_PLUGINS_H_
-#define SYSREPO_PLUGINS_H_
-
-#include <stdio.h>
-#include <stdint.h>
-#include <errno.h>
-#include <string.h>
-#include <syslog.h>
-
-#include <sysrepo.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/**
- * @defgroup plugin_utils Plugin Utilities
- * @{
- *
- * @brief Utilities that expand sysrepo API aimed for sysrepo plugins.
- *
- * The provided features are: logging macros.
- */
-
-/** Prints an error message (with format specifiers). */
-#define SRP_LOG_ERR(MSG, ...) SRP_LOG__INTERNAL(SR_LL_ERR, MSG, __VA_ARGS__)
-/** Prints an error message. */
-#define SRP_LOG_ERR_MSG(MSG) SRP_LOG__INTERNAL(SR_LL_ERR, MSG "%s", "")
-
-/** Prints a warning message (with format specifiers). */
-#define SRP_LOG_WRN(MSG, ...) SRP_LOG__INTERNAL(SR_LL_WRN, MSG, __VA_ARGS__)
-/** Prints a warning message. */
-#define SRP_LOG_WRN_MSG(MSG) SRP_LOG__INTERNAL(SR_LL_WRN, MSG "%s", "")
-
-/** Prints an informational message (with format specifiers). */
-#define SRP_LOG_INF(MSG, ...) SRP_LOG__INTERNAL(SR_LL_INF, MSG, __VA_ARGS__)
-/** Prints an informational message. */
-#define SRP_LOG_INF_MSG(MSG) SRP_LOG__INTERNAL(SR_LL_INF, MSG "%s", "")
-
-/** Prints a development debug message (with format specifiers). */
-#define SRP_LOG_DBG(MSG, ...) SRP_LOG__INTERNAL(SR_LL_DBG, MSG, __VA_ARGS__)
-/** Prints a development debug message. */
-#define SRP_LOG_DBG_MSG(MSG) SRP_LOG__INTERNAL(SR_LL_DBG, MSG "%s", "")
-
-/**@} plugin_utils */
-
-
-////////////////////////////////////////////////////////////////////////////////
-// Internal macros (not intended to be used directly)
-////////////////////////////////////////////////////////////////////////////////
-
-#ifdef NDEBUG
- #define SRP_LOG_PRINT_FUNCTION_NAMES (0) /**< Do not print function names in messages */
-#else
- #define SRP_LOG_PRINT_FUNCTION_NAMES (1) /**< Every message will include the function that generated the output */
-#endif
-
-extern volatile uint8_t sr_ll_stderr; /**< Holds current level of stderr debugs. */
-extern volatile uint8_t sr_ll_syslog; /**< Holds current level of syslog debugs. */
-
-/**
- * @brief Matching log level to message beginning
- */
-#define SRP_LOG__LL_STR(LL) \
- ((SR_LL_DBG == LL) ? "DBG" : \
- (SR_LL_INF == LL) ? "INF" : \
- (SR_LL_WRN == LL) ? "WRN" : \
- "ERR")
-
-/**
- * @brief Matching log level to message macros
- */
-#define SRP_LOG__LL_FACILITY(LL) \
- ((SR_LL_DBG == LL) ? LOG_DEBUG : \
- (SR_LL_INF == LL) ? LOG_INFO : \
- (SR_LL_WRN == LL) ? LOG_WARNING : \
- LOG_ERR)
-
-#if SRP_LOG_PRINT_FUNCTION_NAMES
-/**
- * @brief Syslog output macro with function names.
- */
-#define SRP_LOG__SYSLOG(LL, MSG, ...) \
- syslog(SRP_LOG__LL_FACILITY(LL), "[%s] (%s:%d) " MSG, SRP_LOG__LL_STR(LL), __func__, __LINE__, __VA_ARGS__);
-/**
- * @brief Stderr output macro with function names.
- */
-#define SRP_LOG__STDERR(LL, MSG, ...) \
- fprintf(stderr, "[%s] (%s:%d) " MSG "\n", SRP_LOG__LL_STR(LL), __func__, __LINE__, __VA_ARGS__);
-#else
-/**
- * @brief Syslog output macro without function names.
- */
-#define SRP_LOG__SYSLOG(LL, MSG, ...) \
- syslog(SRP_LOG__LL_FACILITY(LL), "[%s] " MSG, SRP_LOG__LL_STR(LL), __VA_ARGS__);
-/**
- * @brief Stderr output macro without function names.
- */
-#define SRP_LOG__STDERR(LL, MSG, ...) \
- fprintf(stderr, "[%s] " MSG "\n", SRP_LOG__LL_STR(LL), __VA_ARGS__);
-#endif
-
-/**
- * @brief Internal outptu macro.
- */
-#define SRP_LOG__INTERNAL(LL, MSG, ...) \
- do { \
- if (sr_ll_stderr >= LL) \
- SRP_LOG__STDERR(LL, MSG, __VA_ARGS__) \
- if (sr_ll_syslog >= LL) \
- SRP_LOG__SYSLOG(LL, MSG, __VA_ARGS__) \
- } while(0)
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* SYSREPO_PLUGINS_H_ */
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/trees.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/trees.h
deleted file mode 100644
index 8db1602e6..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/trees.h
+++ /dev/null
@@ -1,226 +0,0 @@
-/**
- * @file trees.h
- * @author Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>,
- * Milan Lenco <milan.lenco@pantheon.tech>
- * @brief Functions for simplified manipulation with Sysrepo trees.
- *
- * @copyright
- * Copyright 2016 Cisco Systems, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_TREES_H_
-#define SYSREPO_TREES_H_
-
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/**
- * @defgroup trees Tree Manipulation Utilities
- * @{
- *
- * @brief Set of functions facilitating simplified manipulation and traversal
- * of Sysrepo trees. As there are many connections between the tree nodes
- * and also some internal attributes associated with each node, it is actually
- * recommended to use these function rather than to allocate and initialize trees
- * manually, which is very likely to lead to time-wasting and hard-to-debug programming
- * errors.
- * Iterative tree loading (@see SR_GET_SUBTREE_ITERATIVE) even requires to use
- * designated functions for tree traversal -- ::sr_node_get_child and ::sr_node_get_next_sibling.
- *
- * Another added benefit of using these function is that the trees created using
- * ::sr_new_tree and ::sr_new_trees will be allocated using the Sysrepo's own memory management
- * (if enabled) which was proven to be more efficient for larger data sets
- * (far less copying, quicker conversion to/from google protocol buffer messages,
- * stable memory footprint, etc.).
- */
-
-/**
- * @brief Allocate an instance of Sysrepo tree. The newly allocated tree has only
- * one node -- the tree root -- and can be expanded to its full desired size
- * through a repeated use of the function ::sr_node_add_child.
- *
- * @param [in] root_name Name for the newly allocated tree root. Can be NULL.
- * @param [in] root_module_name Name of the module that defines scheme of the tree root.
- * Can be NULL.
- * @param [out] tree Returned newly allocated Sysrepo tree.
- */
-int sr_new_tree(const char *root_name, const char *root_module_name, sr_node_t **tree);
-
-/**
- * @brief Allocate an array of sysrepo trees (uninitialized tree roots).
- *
- * @param [in] tree_cnt Length of the array to allocate.
- * @param [out] trees Returned newly allocated array of trees.
- */
-int sr_new_trees(size_t tree_cnt, sr_node_t **trees);
-
-/**
- * @brief Reallocate an array of sysrepo trees (uninitialized tree roots).
- *
- * @param [in] old_tree_cnt Current length of the tree array.
- * @param [in] new_tree_cnt Desired length of the tree array.
- * @param [in,out] trees Returned newly allocated/enlarged array of trees.
- */
-int sr_realloc_trees(size_t old_tree_cnt, size_t new_tree_cnt, sr_node_t **trees);
-
-/**
- * @brief Set/change name of a Sysrepo node.
- *
- * @param [in] node Sysrepo node to change the name of.
- * @param [in] name Name to set.
- */
-int sr_node_set_name(sr_node_t *node, const char *name);
-
-/**
- * @brief Set/change module of a Sysrepo node.
- *
- * @param [in] node Sysrepo node to change the module of.
- * @param [in] module_name Module name to set.
- */
-int sr_node_set_module(sr_node_t *node, const char *module_name);
-
-/**
- * @brief Store data of string type into the Sysrepo node data.
- *
- * @param [in] node Sysrepo node to edit.
- * @param [in] type Exact type of the data.
- * @param [in] string_val String value to set.
- */
-int sr_node_set_str_data(sr_node_t *node, sr_type_t type, const char *string_val);
-
-/**
- * @brief Store data of string type into the Sysrepo node data. The actual data
- * will be built from the a format string and a variable arguments list.
- *
- * @param [in] node Sysrepo node to edit.
- * @param [in] type Exact type of the data.
- * @param [in] format Format string used to build the data.
- */
-int sr_node_build_str_data(sr_node_t *node, sr_type_t type, const char *format, ...);
-
-/**
- * @brief Create a new child for a given Sysrepo node.
- *
- * @param [in] parent Sysrepo node that should be parent of the newly created node.
- * @param [in] child_name Name of the newly created child node. Can be NULL.
- * @param [in] child_module_name Name of the module that defines scheme of the newly created
- * child node. Can be NULL.
- * @param [out] child Returned newly allocated child node.
- */
-int sr_node_add_child(sr_node_t *parent, const char *child_name, const char *child_module_name,
- sr_node_t **child);
-
-/**
- * @brief Duplicate node and all its descendants (with or without Sysrepo memory context)
- * into a new instance of Sysrepo tree with memory context.
- *
- * @param [in] tree Sysrepo tree to duplicate.
- * @param [out] tree_dup Returned duplicate of the input tree.
- */
-int sr_dup_tree(const sr_node_t *tree, sr_node_t **tree_dup);
-
-/**
- * @brief Duplicate an array of trees (with or without Sysrepo memory context) into a new
- * array of trees with memory context.
- *
- * @param [in] trees Array of sysrepo trees to duplicate.
- * @param [in] count Size of the array to duplicate.
- * @param [out] trees_dup Returned duplicate of the input array.
- */
-int sr_dup_trees(const sr_node_t *trees, size_t count, sr_node_t **trees_dup);
-
-/**
- * @brief Print sysrepo tree to STDOUT.
- *
- * @param [in] tree Sysrepo tree to print.
- * @param [in] depth_limit Maximum number of tree levels to print.
- */
-int sr_print_tree(const sr_node_t *tree, int depth_limit);
-
-/**
- * @brief Print sysrepo tree to the specified file descriptor.
- *
- * @param [in] fd File descriptor to print the tree into.
- * @param [in] tree Sysrepo tree to print.
- * @param [in] depth_limit Maximum number of tree levels to print.
- */
-int sr_print_tree_fd(int fd, const sr_node_t *tree, int depth_limit);
-
-/**
- * @brief Print sysrepo tree to the specified output file stream.
- *
- * @param [in] stream Output file stream to print the tree into.
- * @param [in] tree Sysrepo tree to print.
- * @param [in] depth_limit Maximum number of tree levels to print.
- */
-int sr_print_tree_stream(FILE *stream, const sr_node_t *tree, int depth_limit);
-
-/**
- * @brief Print sysrepo tree into a newly allocated memory buffer.
- * The caller is expected to eventually free the returned string.
- *
- * @param [in] mem_p Pointer to store the resulting dump.
- * @param [in] tree Sysrepo tree to print.
- * @param [in] depth_limit Maximum number of tree levels to print.
- */
-int sr_print_tree_mem(char **mem_p, const sr_node_t *tree, int depth_limit);
-
-/**
- * @brief Returns pointer to the first child (based on the schema) of a given node.
- * For a fully loaded tree it is equivalent to "node->first_child". For a partially
- * loaded tree (@see SR_GET_SUBTREE_ITERATIVE) it may internally issue a sysrepo
- * get-subtree-chunk request in order to obtain the data of the child
- * (and the data of some surrounding nodes with it for efficiency).
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] node Node to get the child of.
- * @return Pointer to a child node. NULL if there is none or an error occured.
- */
-sr_node_t *sr_node_get_child(sr_session_ctx_t *session, sr_node_t *node);
-
-/**
- * @brief Returns pointer to the next sibling (based on the schema) of a given node.
- * For a fully loaded tree it is equivalent to "node->next". For a partially
- * loaded tree (@see SR_GET_SUBTREE_ITERATIVE) it may internally issue a sysrepo
- * get-subtree-chunk request in order to obtain the data of the next sibling
- * (and the data of some surrounding nodes with it for efficiency).
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] node Node to get the next sibling of.
- * @return Pointer to the next sibling. NULL if this is the last sibling or an error occured.
- */
-sr_node_t *sr_node_get_next_sibling(sr_session_ctx_t *session, sr_node_t *node);
-
-/**
- * @brief Get the parent of a given node. It is equivalent to "node->parent", but for
- * a partially loaded tree it is preferred to use this function rather than to access
- * the pointer directly just for the sake of code cleanliness.
- *
- * @param[in] session Session context acquired with ::sr_session_start call.
- * @param[in] node Node to get the parent of.
- * @return Pointer to the node's parent or NULL if the node is a root of a (sub)tree.
- */
-sr_node_t *sr_node_get_parent(sr_session_ctx_t *session, sr_node_t *node);
-
-/**@} trees */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* SYSREPO_TREES_H_ */
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/values.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/values.h
deleted file mode 100644
index 049c82f19..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/values.h
+++ /dev/null
@@ -1,196 +0,0 @@
-/**
- * @file values.h
- * @author Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>,
- * Milan Lenco <milan.lenco@pantheon.tech>
- * @brief Functions for simplified manipulation with Sysrepo values.
- *
- * @copyright
- * Copyright 2016 Cisco Systems, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_VALUES_H_
-#define SYSREPO_VALUES_H_
-
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/**
- * @defgroup values Value Manipulation Utilities
- * @{
- *
- * @brief Set of functions facilitating simplified manipulation with sysrepo
- * values. It is not necessary to use these functions in any scenario, values
- * can be allocated and initialized manually (just remember to set all uninitialized
- * members to zero!).
- *
- * Using these utilities, however, has several benefits. Firstly, all the memory
- * allocations associated with creating values and setting their attributes get
- * hidden behind these functions. The "old-way" was (and still is) to set xpath
- * and string values using strdup, which may repeat in applications communicating
- * with sysrepo very often and becomes very annoying to write.
- * Secondly, the programmer may actually forget to copy or give-up on the ownership
- * of a string passed to sysrepo value which will then get unexpectedly deallocated
- * in ::sr_free_val or ::sr_free_values.
- * The third benefit is that the values created using ::sr_new_val
- * and ::sr_new_values will be allocated using the Sysrepo's own memory management
- * (if enabled) which was proven to be more efficient for larger data sets
- * (far less copying, quicker conversion to/from google protocol buffer messages,
- * stable memory footprint, etc.).
- */
-
-/**
- * @brief Allocate an instance of Sysrepo value.
- *
- * @param [in] xpath Xpath to set for the newly allocated value. Can be NULL.
- * @param [out] value Returned newly allocated value.
- */
-int sr_new_val(const char *xpath, sr_val_t **value);
-
-/**
- * @brief Allocate an array of sysrepo values.
- *
- * @param [in] value_cnt Length of the array to allocate.
- * @param [out] values Returned newly allocated array of values.
- */
-int sr_new_values(size_t value_cnt, sr_val_t **values);
-
-/**
- * @brief Reallocate an array of sysrepo values.
- *
- * @param [in] old_value_cnt Current length of the value array.
- * @param [in] new_value_cnt Desired length of the value array.
- * @param [in,out] values Returned newly allocated/enlarged array of values.
- */
-int sr_realloc_values(size_t old_value_cnt, size_t new_value_cnt, sr_val_t **values);
-
-/**
- * @brief Set/change xpath of a Sysrepo value.
- *
- * @param [in] value Sysrepo value to change the xpath of.
- * @param [in] xpath XPath to set.
- */
-int sr_val_set_xpath(sr_val_t *value, const char *xpath);
-
-/**
- * @brief Set/change xpath of a Sysrepo value to a new one, built from
- * a format string and a variable arguments list.
- *
- * @param [in] value Sysrepo value to change the xpath of.
- * @param [in] format Format string used to build XPath.
- */
-int sr_val_build_xpath(sr_val_t *value, const char *format, ...);
-
-/**
- * @brief Store data of string type into the Sysrepo value data.
- *
- * @param [in] value Sysrepo value to edit.
- * @param [in] type Exact type of the data.
- * @param [in] string_val String value to set.
- */
-int sr_val_set_str_data(sr_val_t *value, sr_type_t type, const char *string_val);
-
-/**
- * @brief Store data of string type into the Sysrepo value data. The actual data
- * will be built from the a format string and a variable arguments list.
- *
- * @param [in] value Sysrepo value to edit.
- * @param [in] type Exact type of the data.
- * @param [in] format Format string used to build the data.
- */
-int sr_val_build_str_data(sr_val_t *value, sr_type_t type, const char *format, ...);
-
-/**
- * @brief Duplicate value (with or without Sysrepo memory context) into a new
- * instance with memory context.
- *
- * @param [in] value Sysrepo value to duplicate
- * @param [out] value_dup Returned duplicate of the input value.
- */
-int sr_dup_val(const sr_val_t *value, sr_val_t **value_dup);
-
-/**
- * @brief Duplicate values (with or without Sysrepo memory context) into a new
- * array with memory context.
- *
- * @param [in] values Array of sysrepo values to duplicate
- * @param [in] count Size of the array to duplicate.
- * @param [out] values_dup Returned duplicate of the input array.
- */
-int sr_dup_values(const sr_val_t *values, size_t count, sr_val_t **values_dup);
-
-/**
- * @brief Print sysrepo value to STDOUT.
- *
- * @param [in] value Sysrepo value to print.
- */
-int sr_print_val(const sr_val_t *value);
-
-/**
- * @brief Print sysrepo value to the specified file descriptor.
- *
- * @param [in] fd File descriptor to print the value into.
- * @param [in] value Sysrepo value to print.
- */
-int sr_print_val_fd(int fd, const sr_val_t *value);
-
-/**
- * @brief Print sysrepo value to the specified output file stream.
- *
- * @param [in] stream Output file stream to print the value into.
- * @param [in] value Sysrepo value to print.
- */
-int sr_print_val_stream(FILE *stream, const sr_val_t *value);
-
-/**
- * @brief Print sysrepo value into a newly allocated memory buffer.
- * The caller is expected to eventually free the returned string.
- *
- * @param [in] mem_p Pointer to store the resulting dump.
- * @param [in] value Sysrepo value to print.
- */
-int sr_print_val_mem(char **mem_p, const sr_val_t *value);
-
-/**
- * @brief Converts value to string representation
- * @param [in] value
- * @return allocated string representation of value (must be freed by caller), NULL in case of error
- * @note In case of SR_DECIMAL64_T type, number of fraction digits doesn't have to
- * correspond to schema.
- */
-char *sr_val_to_str(const sr_val_t *value);
-
-/**
- * @brief Converts value to string and prints it to the provided buffer including
- * terminating NULL byte
- * @param [in] value
- * @param [in] buffer - buffer provided by caller where the data will be printed
- * @param [in] size - the size of the buffer
- * @return number of characters that was written in case of success, otherwise number of characters which would have been
- * written if enough space had been available (excluding terminating NULL byte)
- * @note In case of SR_DECIMAL64_T type, number of fraction digits doesn't have to
- * correspond to schema.
- */
-int sr_val_to_buff(const sr_val_t *value, char buffer[], size_t size);
-
-/**@} values */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* SYSREPO_VALUES_H_ */
diff --git a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/xpath.h b/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/xpath.h
deleted file mode 100644
index 7eca41e57..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/netopeer-change-saver-native/sysrepo/xpath.h
+++ /dev/null
@@ -1,232 +0,0 @@
-/**
- * @file xpath.h
- * @author Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>
- * @brief Sysrepo helpers for node's address manipulation.
- *
- * @copyright
- * Copyright 2015 Cisco Systems, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SYSREPO_XPATH_H_
-#define SYSREPO_XPATH_H_
-
-#include <stdbool.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/**
- * @defgroup xpath_utils XPath Processing Utilities
- * @{
- *
- * @brief Set of helpers working on a subset of xpath expressions used of node identification
- * Functions modify inputs arguments by placing termination zero at appropriate places to save up
- * string duplication. The state of processing is stored in ::sr_xpath_ctx_t opaque for user.
- * It allows to continue in processing where the processing stopped or recover processed input.
- *
- * Similarly to strtok function in all subsequent calls that is supposed to work with the same
- * input xpath must be NULL.
- */
-
-/**
- * @brief State of xpath parsing. User must not modify nor rely on the content
- * of the structure.
- */
-typedef struct sr_xpath_ctx_s {
- char *begining; /**< Pointer to the begining of the processed string */
- char *current_node; /**< Pointer to the currently processed node, used as a context for key search */
- char *replaced_position; /**< Pointer to the posistion where the last terminating 0 by was written */
- char replaced_char; /**< Character that was overwritten by the last termination 0 */
-} sr_xpath_ctx_t;
-
-/**
- * @brief The function returns a pointer to the following node. If xpath is
- * not NULL returns the first node name, otherwise returns the subsequent node
- * according to the state.
- *
- * The state is modified upon function successful return from function, so the subsequent
- * calls can continue in processing or xpath can be recovered by calling ::sr_xpath_recover.
- *
- * @note It writes terminating zero at the and of the node name.
- *
- * @note Skips the namespace if it is present to get node name qualified by namespace use ::sr_xpath_next_node_with_ns
- *
- * @param [in] xpath - xpath to be processed, can be NULL
- * @param [in] state
- * @return Pointer to the node name, NULL if there are no more node names
- */
-char *sr_xpath_next_node(char *xpath, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns pointer to the last node.
- * @param [in] xpath
- * @param [in] state
- * @return Pointer to the last node
- */
-char *sr_xpath_last_node(char *xpath, sr_xpath_ctx_t *state);
-
-/**
- * @brief Same as ::sr_xpath_next_node with the difference that namespace is included in result if present in xpath
- *
- * @param [in] xpath - xpath to be processed, can be NULL if the user wants to continue in processing of previous input
- * @param [in] state
- * @return Pointer to the node name including namespace, NULL if there are no more node names
- */
-char *sr_xpath_next_node_with_ns(char *xpath, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns the name of the next key at the current level in processed xpath.
- *
- * @param [in] xpath
- * @param [in] state
- * @return Pointer to the key name, NULL if there are no more keys at the current level
- */
-char *sr_xpath_next_key_name(char *xpath, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns the value of the next key at the current level in processed xpath.
- *
- * @param [in] xpath
- * @param [in] state
- * @return Pointer to the key value, NULL if there are no more keys at the current level
- */
-char *sr_xpath_next_key_value(char *xpath, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns a pointer to the node specified by name. It searches from the beginning of the xpath, returns first match.
- * Can be used to jump at the desired node xpath and subsequent analysis of key values.
- *
- * @param [in] xpath
- * @param [in] node_name
- * @param [in] state
- * @return Pointer to the node, NULL if the node with the specified name is not found
- */
-char *sr_xpath_node(char *xpath, const char *node_name, sr_xpath_ctx_t *state);
-
-/**
- * @brief Similar to ::sr_xpath_node. The difference is that search start at current node
- * according to the state.
- *
- * @param [in] xpath
- * @param [in] node_name
- * @param [in] state
- * @return Pointer to the node, NULL if the node with the specified name is not found
- */
-char *sr_xpath_node_rel(char *xpath, const char *node_name, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns node specified by index starting at the begin of expression.
- * First node has index 0.
- *
- * @param [in] xpath
- * @param [in] index
- * @param [in] state
- * @return Pointer to the specified node, NULL if the index is out of bounds
- */
-char *sr_xpath_node_idx(char *xpath, size_t index, sr_xpath_ctx_t *state);
-
-/**
- * @brief Return node specified by index. Following node has index zero.
- *
- * @param [in] xpath
- * @param [in] index
- * @param [in] state
- * @return Pointer to the specified node, NULL if the index is out of bounds
- */
-char *sr_xpath_node_idx_rel(char *xpath, size_t index, sr_xpath_ctx_t *state);
-
-/**
- * @brief Looks up the value for the key at the current level in xpath.
- *
- * @param [in] xpath
- * @param [in] key - key name to be looked up
- * @param [in] state
- * @return Key value, NULL if the key with the specified name is not found
- */
-char *sr_xpath_node_key_value(char *xpath, const char *key, sr_xpath_ctx_t *state);
-
-/**
- * @brief Looks up the value for the key at the current level in xpath specified by index.
- * First key has index zero.
- *
- * @param [in] xpath
- * @param [in] index
- * @param [in] state
- * @return Key value, NULL if the index is out of bound
- */
-char *sr_xpath_node_key_value_idx(char *xpath, size_t index, sr_xpath_ctx_t *state);
-
-/**
- * @brief Looks up the value of the key in a node specified by name.
- *
- * @param [in] xpath
- * @param [in] node_name
- * @param [in] key_name
- * @param [in] state
- * @return Pointer to the key value, NULL if not found
- */
-char *sr_xpath_key_value(char *xpath, const char *node_name, const char *key_name, sr_xpath_ctx_t *state);
-
-/**
- * @brief Looks up the value of the key in a node specified by index. First node has index zero.
- *
- * @param [in] xpath
- * @param [in] node_index
- * @param [in] key_index
- * @param [in] state
- * @return Pointer to the key value, NULL if not found or index out of bound
- */
-char *sr_xpath_key_value_idx(char *xpath, size_t node_index, size_t key_index, sr_xpath_ctx_t *state);
-
-/**
- * @brief Returns pointer to the string after the last slash in xpath (node name).
- *
- * @note The returned string can also contain namespace and/or key values
- * if they were specified for the last node in xpath.
- *
- * @param [in] xpath
- * @return Result, NULL in case of the slash was not found
- */
-char *sr_xpath_node_name(const char *xpath);
-
-/**
- * @brief Compares string after the last slash in xpath (node name) with provided string.
- *
- * @note The returned string can also contain namespace and/or key values
- * if they were specified for the last node in xpath.
- *
- * @param [in] xpath
- * @param [in] node_str String to test for equality.
- * @return true in case that the Node names are equal, false otherwise
- */
-bool sr_xpath_node_name_eq(const char *xpath, const char *node_str);
-
-/**
- * @brief Recovers the xpath string to the original state (puts back the character
- * that was replaced by termination zero).
- *
- * @param [in] state
- */
-void sr_xpath_recover(sr_xpath_ctx_t *state);
-
-/**@} xpath_utils */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* SYSREPO_XPATH_H_ */
-
diff --git a/test/mocks/pnfsimulator/netconfsimulator/pom.xml b/test/mocks/pnfsimulator/netconfsimulator/pom.xml
deleted file mode 100644
index d83c5b397..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/pom.xml
+++ /dev/null
@@ -1,276 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.simulator</groupId>
- <artifactId>simulator-parent</artifactId>
- <version>5.0.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>netconfsimulator</artifactId>
- <version>5.0.0-SNAPSHOT</version>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- <maven.build.timestamp.format>yyyyMMdd'T'HHmmss</maven.build.timestamp.format>
- <docker.registry>nexus3.onap.org:10003</docker.registry>
- <docker.image.tag>latest</docker.image.tag>
- <docker.image.name>onap/${project.artifactId}</docker.image.name>
- <spring.boot.version>2.1.6.RELEASE</spring.boot.version>
- <spring.kafka.version>2.2.7.RELEASE</spring.kafka.version>
- <apache.httpclient.version>4.5.6</apache.httpclient.version>
- <dependency.directory.name>libs</dependency.directory.name>
- <dependency.directory.location>${project.build.directory}/${dependency.directory.name}
- </dependency.directory.location>
- <netopeer-saver-project-name>netopeer-change-saver</netopeer-saver-project-name>
- <netopeer-saver-source-dir>${project.basedir}/netopeer-change-saver-native</netopeer-saver-source-dir>
- <netopeer-saver-build-dir>${project.build.directory}/cmake</netopeer-saver-build-dir>
- <netopeer-saver-executable-dir>${netopeer-saver-build-dir}/bin</netopeer-saver-executable-dir>
- <skipITs>true</skipITs>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter</artifactId>
- <version>${spring.boot.version}</version>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- <version>${spring.boot.version}</version>
- </dependency>
- <dependency>
- <groupId>org.projectlombok</groupId>
- <artifactId>lombok</artifactId>
- <version>1.18.2</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.onosproject</groupId>
- <artifactId>jnc</artifactId>
- <version>1.0</version>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-websocket</artifactId>
- <version>${spring.boot.version}</version>
- </dependency>
- <dependency>
- <groupId>javax.websocket</groupId>
- <artifactId>javax.websocket-api</artifactId>
- <version>1.1</version>
- </dependency>
-
- <!-- Kafka -->
-
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka</artifactId>
- <version>${spring.kafka.version}</version>
- </dependency>
-
- <!-- TEST DEPENDENCIES -->
-
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>3.9.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>2.18.3</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-engine</artifactId>
- <version>5.3.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.junit.vintage</groupId>
- <artifactId>junit-vintage-engine</artifactId>
- <version>5.3.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.12</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <version>${spring.boot.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.kafka</groupId>
- <artifactId>spring-kafka-test</artifactId>
- <version>${spring.kafka.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.bitbucket.radistao.test</groupId>
- <artifactId>before-after-spring-test-runner</artifactId>
- <version>0.1.0</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.palantir.docker.compose</groupId>
- <artifactId>docker-compose-rule-junit4</artifactId>
- <version>0.29.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.5.6</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpmime</artifactId>
- <version>4.5.6</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>${apache.httpclient.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpmime</artifactId>
- <version>${apache.httpclient.version}</version>
- </dependency>
-
- <dependency>
- <groupId>io.springfox</groupId>
- <artifactId>springfox-swagger2</artifactId>
- <version>2.9.2</version>
- </dependency>
- <dependency>
- <groupId>io.springfox</groupId>
- <artifactId>springfox-swagger-ui</artifactId>
- <version>2.9.2</version>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <configuration>
- <outputDirectory>${dependency.directory.location}</outputDirectory>
- <includeScope>runtime</includeScope>
- <silent>true</silent>
- </configuration>
- <executions>
- <execution>
- <id>copy-external-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.19</version>
- <dependencies>
- <dependency>
- <groupId>org.junit.platform</groupId>
- <artifactId>junit-platform-surefire-provider</artifactId>
- <version>1.1.1</version>
- </dependency>
- </dependencies>
- <configuration>
- <detail>true</detail>
- <printSummary>true</printSummary>
- <useSystemClassLoader>false</useSystemClassLoader>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- <version>2.19.1</version>
- <configuration>
- <skipITs>${skipITs}</skipITs>
- </configuration>
- <executions>
- <execution>
- <goals>
- <goal>integration-test</goal>
- <goal>verify</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.spotify</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>1.0.0</version>
- <configuration>
- <imageName>${docker.registry}/${docker.image.name}</imageName>
- <dockerDirectory>${project.basedir}/docker</dockerDirectory>
- <forceTags>true</forceTags>
- <registryUrl>${docker.registry}</registryUrl>
- <imageTags>
- <imageTag>latest</imageTag>
- </imageTags>
- <resources>
- <resource>
- <targetPath>${dependency.directory.name}</targetPath>
- <directory>${dependency.directory.location}</directory>
- </resource>
- <resource>
- <targetPath>/</targetPath>
- <directory>${project.build.directory}</directory>
- <include>${project.build.finalName}.jar</include>
- </resource>
- </resources>
- <forceTags>true</forceTags>
- </configuration>
- </plugin>
- </plugins>
- </build>
- <repositories>
- <repository>
- <id>Palantir</id>
- <url>https://dl.bintray.com/palantir/releases/</url>
- </repository>
- </repositories>
-</project>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Configuration.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Configuration.java
deleted file mode 100644
index 92e5b2327..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Configuration.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator;
-
-import org.apache.http.client.HttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.springframework.context.annotation.Bean;
-
-@org.springframework.context.annotation.Configuration
-public class Configuration {
-
- @Bean
- public HttpClient httpClient() {
- return HttpClientBuilder.create().build();
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Main.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Main.java
deleted file mode 100644
index e2a0ed0c0..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/Main.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.netconfsimulator;
-
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-
-@SpringBootApplication
-public class Main {
-
- public static void main(String[] args) {
- SpringApplication.run(Main.class, args);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/SwaggerConfig.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/SwaggerConfig.java
deleted file mode 100644
index 2e9df997e..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/SwaggerConfig.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator;
-
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import springfox.documentation.builders.PathSelectors;
-import springfox.documentation.builders.RequestHandlerSelectors;
-import springfox.documentation.spi.DocumentationType;
-import springfox.documentation.spring.web.plugins.Docket;
-import springfox.documentation.swagger2.annotations.EnableSwagger2;
-
-@EnableSwagger2
-@Configuration
-class SwaggerConfig {
-
- @Bean
- Docket api() {
- return new Docket(DocumentationType.SWAGGER_2)
- .select()
- .apis(RequestHandlerSelectors.basePackage("org.onap.netconfsimulator"))
- .paths(PathSelectors.any())
- .build();
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/Config.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/Config.java
deleted file mode 100644
index 9ae564103..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/Config.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.onap.netconfsimulator.kafka.listener.KafkaListenerHandler;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.annotation.EnableKafka;
-import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
-
-@Configuration
-@EnableKafka
-class Config {
-
- @Value("${spring.kafka.bootstrap-servers}")
- private String bootstrapServer;
-
- @Value("${spring.kafka.consumer.auto-offset-reset}")
- private String offsetReset;
-
- @Bean
- ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(ConsumerFactory<String, String> consumerFactory) {
- ConcurrentKafkaListenerContainerFactory<String, String> containerFactory = new ConcurrentKafkaListenerContainerFactory<>();
- containerFactory.setConsumerFactory(consumerFactory);
- return containerFactory;
- }
-
- @Bean
- ConsumerFactory<String, String> consumerFactory() {
- Map<String, Object> props = new HashMap<>();
- props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
- props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
- props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
- props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetReset);
- return new DefaultKafkaConsumerFactory<>(props);
- }
-
-
- @Bean
- KafkaListenerHandler kafkaListenerHandler(ConsumerFactory<String, String> consumerFactory) {
- return new KafkaListenerHandler(consumerFactory);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/MessageDTO.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/MessageDTO.java
deleted file mode 100644
index 4311cd61f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/MessageDTO.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-
-@Getter
-@AllArgsConstructor
-class MessageDTO {
- private long timestamp;
- private String configuration;
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreController.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreController.java
deleted file mode 100644
index 33bbdf7cf..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreController.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import java.util.List;
-
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-
-@RestController
-@Slf4j
-@RequestMapping("/store")
-public class StoreController {
-
- private StoreService service;
-
- @Autowired
- public StoreController(StoreService service) {
- this.service = service;
- }
-
- @GetMapping("/ping")
- String ping() {
- return "pong";
- }
-
- @GetMapping("cm-history")
- List<MessageDTO> getAllConfigurationChanges() {
- return service.getAllMessages();
- }
-
- @GetMapping("/less")
- List<MessageDTO> less(@RequestParam(value = "offset", required = false, defaultValue = "${spring.kafka.default-offset}") long offset) {
- return service.getLastMessages(offset);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreService.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreService.java
deleted file mode 100644
index 5fddff5a2..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/StoreService.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.TopicPartition;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.stereotype.Service;
-
-import java.time.Instant;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-@Slf4j
-@Service
-public class StoreService {
-
- private static final String CONFIG_TOPIC = "config";
- private static final long CONSUMING_DURATION_IN_MS = 1000;
-
- private ConsumerFactory<String, String> consumerFactory;
- static final List<String> TOPICS_TO_SUBSCRIBE = Collections.singletonList(CONFIG_TOPIC);
-
- @Autowired
- StoreService(ConsumerFactory<String, String> consumerFactory) {
- this.consumerFactory = consumerFactory;
- }
-
- List<MessageDTO> getAllMessages() {
- List<MessageDTO> messages = new ArrayList<>();
- String clientID = Long.toString(Instant.now().getEpochSecond());
- try (Consumer<String, String> consumer = consumerFactory.createConsumer(clientID, clientID)) {
- consumer.subscribe(TOPICS_TO_SUBSCRIBE);
- ConsumerRecords<String, String> consumerRecords = consumer.poll(CONSUMING_DURATION_IN_MS);
- consumerRecords.forEach(
- consumerRecord ->
- messages.add(new MessageDTO(consumerRecord.timestamp(), consumerRecord.value())));
- log.debug(String.format("consumed %d messages", consumerRecords.count()));
- }
- return messages;
- }
-
- List<MessageDTO> getLastMessages(long offset) {
- List<MessageDTO> messages = new ArrayList<>();
- try (Consumer<String, String> consumer = createConsumer(offset)) {
- ConsumerRecords<String, String> consumerRecords = consumer.poll(CONSUMING_DURATION_IN_MS);
- consumerRecords.forEach(consumerRecord ->
- messages.add(new MessageDTO(consumerRecord.timestamp(), consumerRecord.value())));
- }
- return messages;
- }
-
- private Consumer<String, String> createConsumer(long offsetFromLastIndex) {
- String clientID = Long.toString(Instant.now().getEpochSecond());
- Consumer<String, String> consumer = consumerFactory.createConsumer(clientID, clientID);
- consumer.subscribe(TOPICS_TO_SUBSCRIBE);
- seekConsumerTo(consumer, offsetFromLastIndex);
- return consumer;
- }
-
- private void seekConsumerTo(Consumer<String, String> consumer, long offsetFromLastIndex) {
- consumer.seekToEnd(consumer.assignment());
- consumer.poll(CONSUMING_DURATION_IN_MS);
- TopicPartition topicPartition = consumer.assignment().iterator().next();
- long topicCurrentSize = consumer.position(topicPartition);
- long indexToSeek = offsetFromLastIndex > topicCurrentSize ? 0 : topicCurrentSize - offsetFromLastIndex;
- consumer.seek(topicPartition, indexToSeek);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerEntry.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerEntry.java
deleted file mode 100644
index e3c04c9fc..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerEntry.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka.listener;
-
-import lombok.Getter;
-import org.springframework.kafka.listener.AbstractMessageListenerContainer;
-
-@Getter
-public class KafkaListenerEntry {
-
- private String clientId;
- private AbstractMessageListenerContainer listenerContainer;
-
- public KafkaListenerEntry(String clientId, AbstractMessageListenerContainer listenerContainer) {
- this.clientId = clientId;
- this.listenerContainer = listenerContainer;
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandler.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandler.java
deleted file mode 100644
index 604315d5f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandler.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka.listener;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.kafka.core.ConsumerFactory;
-
-import org.springframework.kafka.listener.ContainerProperties;
-import org.springframework.kafka.listener.KafkaMessageListenerContainer;
-import org.springframework.kafka.listener.MessageListener;
-
-
-import org.springframework.kafka.support.TopicPartitionInitialOffset;
-
-import java.time.Instant;
-
-public class KafkaListenerHandler {
-
- private static final int PARTITION = 0;
- private static final long NUMBER_OF_HISTORICAL_MESSAGES_TO_SHOW = -10L;
- private static final boolean RELATIVE_TO_CURRENT = false;
- private ConsumerFactory<String, String> consumerFactory;
-
-
- @Autowired
- public KafkaListenerHandler(ConsumerFactory<String, String> consumerFactory) {
- this.consumerFactory = consumerFactory;
- }
-
-
- public KafkaListenerEntry createKafkaListener(MessageListener messageListener, String topicName) {
- String clientId = Long.toString(Instant.now().getEpochSecond());
- ContainerProperties containerProperties = new ContainerProperties(topicName);
- containerProperties.setGroupId(clientId);
- KafkaMessageListenerContainer<String, String> listenerContainer = createListenerContainer(containerProperties,
- topicName);
-
- listenerContainer.setupMessageListener(messageListener);
- return new KafkaListenerEntry(clientId, listenerContainer);
- }
-
-
- KafkaMessageListenerContainer<String, String> createListenerContainer(ContainerProperties containerProperties,
- String topicName) {
- TopicPartitionInitialOffset config = new TopicPartitionInitialOffset(topicName, PARTITION,
- NUMBER_OF_HISTORICAL_MESSAGES_TO_SHOW, RELATIVE_TO_CURRENT);
- return new KafkaMessageListenerContainer<>(consumerFactory, containerProperties, config);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/model/KafkaMessage.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/model/KafkaMessage.java
deleted file mode 100644
index 90f283acf..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/kafka/model/KafkaMessage.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka.model;
-
-import lombok.Getter;
-
-@Getter
-public class KafkaMessage {
- private long timestamp;
- private String configuration;
-
- public KafkaMessage(long timestamp, String configuration) {
- this.timestamp = timestamp;
- this.configuration = configuration;
- }
-
- KafkaMessage() {
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/NetconfController.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/NetconfController.java
deleted file mode 100644
index cdb4a8f97..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/NetconfController.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * NETCONF-CONTROLLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore;
-
-import com.tailf.jnc.JNCException;
-
-import java.io.IOException;
-
-import lombok.extern.slf4j.Slf4j;
-import org.onap.netconfsimulator.netconfcore.configuration.NetconfConfigurationService;
-import org.onap.netconfsimulator.netconfcore.model.LoadModelResponse;
-import org.onap.netconfsimulator.netconfcore.model.NetconfModelLoaderService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.DeleteMapping;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestPart;
-import org.springframework.web.bind.annotation.ResponseStatus;
-import org.springframework.web.bind.annotation.RestController;
-import org.springframework.web.multipart.MultipartFile;
-
-@Slf4j
-@RestController
-@RequestMapping("netconf")
-class NetconfController {
-
- private final NetconfConfigurationService netconfService;
- private final NetconfModelLoaderService netconfModelLoaderService;
-
- @Autowired
- NetconfController(NetconfConfigurationService netconfService,
- NetconfModelLoaderService netconfModelLoaderService) {
- this.netconfService = netconfService;
- this.netconfModelLoaderService = netconfModelLoaderService;
- }
-
- @GetMapping(value = "get", produces = "application/xml")
- ResponseEntity<String> getNetconfConfiguration() throws IOException, JNCException {
- return ResponseEntity.ok(netconfService.getCurrentConfiguration());
- }
-
- @GetMapping(value = "get/{model}/{container}", produces = "application/xml")
- ResponseEntity<String> getNetconfConfiguration(@PathVariable String model,
- @PathVariable String container)
- throws IOException {
- ResponseEntity<String> entity;
- try {
- entity = ResponseEntity.ok(netconfService.getCurrentConfiguration(model, container));
- } catch (JNCException exception) {
- log.error("Get configuration for model {} and container {} failed.", model, container,
- exception);
- entity = ResponseEntity.badRequest().body(exception.toString());
- }
- return entity;
- }
-
- @PostMapping(value = "edit-config", produces = "application/xml")
- @ResponseStatus(HttpStatus.ACCEPTED)
- ResponseEntity<String> editConfig(@RequestPart("editConfigXml") MultipartFile editConfig)
- throws IOException, JNCException {
- log.info("Loading updated configuration");
- if (editConfig == null || editConfig.isEmpty()) {
- throw new IllegalArgumentException("No XML file with proper name: editConfigXml found.");
- }
- return ResponseEntity
- .status(HttpStatus.ACCEPTED)
- .body(netconfService.editCurrentConfiguration(editConfig));
- }
-
- @PostMapping("model/{moduleName}")
- ResponseEntity<String> loadNewYangModel(@RequestBody MultipartFile yangModel,
- @RequestBody MultipartFile initialConfig, @PathVariable String moduleName)
- throws IOException {
- LoadModelResponse response = netconfModelLoaderService.loadYangModel(yangModel, initialConfig, moduleName);
- return ResponseEntity
- .status(response.getStatusCode())
- .body(response.getMessage());
- }
-
- @DeleteMapping("model/{modelName}")
- ResponseEntity<String> deleteYangModel(@PathVariable String modelName)
- throws IOException {
- LoadModelResponse response = netconfModelLoaderService.deleteYangModel(modelName);
- return ResponseEntity
- .status(response.getStatusCode())
- .body(response.getMessage());
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfBeanConfiguration.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfBeanConfiguration.java
deleted file mode 100644
index d90c60d58..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfBeanConfiguration.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-class NetconfBeanConfiguration {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfBeanConfiguration.class);
-
- @Value("${netconf.port}")
- private Integer netconfPort;
-
- @Value("${netconf.address}")
- private String netconfAddress;
-
- @Value("${netconf.user}")
- private String netconfUser;
-
- @Value("${netconf.password}")
- private String netconfPassword;
-
- @Bean
- NetconfConfigurationReader configurationReader() {
- NetconfConnectionParams params = new NetconfConnectionParams(netconfAddress, netconfPort, netconfUser, netconfPassword);
- LOGGER.info("Configuration params are : {}", params);
- return new NetconfConfigurationReader(params, new NetconfSessionHelper());
- }
-
- @Bean
- NetconfConfigurationEditor configurationEditor() {
- NetconfConnectionParams params =
- new NetconfConnectionParams(netconfAddress, netconfPort, netconfUser, netconfPassword);
- return new NetconfConfigurationEditor(params, new NetconfSessionHelper());
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditor.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditor.java
deleted file mode 100644
index 992c88d5a..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import lombok.extern.slf4j.Slf4j;
-
-import java.io.IOException;
-
-@Slf4j
-public class NetconfConfigurationEditor {
-
- private NetconfConnectionParams params;
- private NetconfSessionHelper netconfSessionHelper;
-
- public NetconfConfigurationEditor(NetconfConnectionParams params, NetconfSessionHelper netconfSessionHelper) {
- this.params = params;
- this.netconfSessionHelper = netconfSessionHelper;
- }
-
- void editConfig(Element configurationXmlElement) throws JNCException, IOException {
- log.debug("New configuration passed to simulator: {}", configurationXmlElement.toXMLString());
- NetconfSession session = netconfSessionHelper.createNetconfSession(params);
- session.editConfig(configurationXmlElement);
- session.closeSession();
-
- log.info("Successfully updated configuration");
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReader.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReader.java
deleted file mode 100644
index 10fe40e2f..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReader.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.NodeSet;
-import java.io.IOException;
-import java.util.Objects;
-
-class NetconfConfigurationReader {
-
- private NetconfConnectionParams params;
- private NetconfSessionHelper netconfSessionHelper;
-
- NetconfConfigurationReader(NetconfConnectionParams params, NetconfSessionHelper netconfSessionHelper) {
- this.params = params;
- this.netconfSessionHelper = netconfSessionHelper;
- }
-
- String getRunningConfig() throws IOException, JNCException {
- NetconfSession session = netconfSessionHelper.createNetconfSession(params);
- String config = session.getConfig().toXMLString();
- session.closeSession();
- return config;
- }
-
- String getRunningConfig(String modelPath) throws IOException, JNCException {
- NetconfSession session = netconfSessionHelper.createNetconfSession(params);
- NodeSet config = session.getConfig(modelPath);
- if (Objects.isNull(config) || Objects.isNull(config.first())) {
- throw new JNCException(JNCException.ELEMENT_MISSING, modelPath);
- }
- session.closeSession();
- return config.first().toXMLString();
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationService.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationService.java
deleted file mode 100644
index 248aec46a..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationService.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.XMLParser;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-import org.springframework.web.multipart.MultipartFile;
-import org.xml.sax.InputSource;
-
-@Service
-public class NetconfConfigurationService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfConfigurationService.class);
- private static final String CONFIGURATION_HAS_BEEN_ACTIVATED = "New configuration has been activated";
-
- private final NetconfConfigurationReader netconfConfigurationReader;
- private NetconfConfigurationEditor configurationEditor;
- private XMLParser parser;
-
- @Autowired
- public NetconfConfigurationService(NetconfConfigurationReader netconfConfigurationReader,
- NetconfConfigurationEditor netconfConfigurationEditor) throws JNCException {
- this.netconfConfigurationReader = netconfConfigurationReader;
- this.configurationEditor = netconfConfigurationEditor;
- this.parser = new XMLParser();
- }
-
- public String getCurrentConfiguration() throws IOException, JNCException {
- return netconfConfigurationReader.getRunningConfig();
- }
-
- public String getCurrentConfiguration(String model, String container) throws IOException, JNCException {
- String path = String.format("/%s:%s", model, container);
- return netconfConfigurationReader.getRunningConfig(path);
- }
-
- public String editCurrentConfiguration(MultipartFile newConfiguration) throws IOException, JNCException {
- Element configurationElement = convertMultipartToXmlElement(newConfiguration);
- configurationEditor.editConfig(configurationElement);
-
- LOGGER.debug("Loading new configuration: \n{}", configurationElement.toXMLString());
- return CONFIGURATION_HAS_BEEN_ACTIVATED;
- }
-
- private Element convertMultipartToXmlElement(MultipartFile editConfig) throws IOException, JNCException {
- InputSource inputSourceUpdateConfig = new InputSource(new ByteArrayInputStream(editConfig.getBytes()));
- return parser.parse(inputSourceUpdateConfig);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationTO.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationTO.java
deleted file mode 100644
index e43ff690e..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationTO.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-
-@Getter
-@AllArgsConstructor
-public class NetconfConfigurationTO {
-
- private String configuration;
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConnectionParams.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConnectionParams.java
deleted file mode 100644
index ace0ee04c..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConnectionParams.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.ToString;
-
-@AllArgsConstructor
-@ToString
-@Getter
-class NetconfConnectionParams {
-
- private final String address;
- private final int port;
- private final String user;
- private final String password;
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfSessionHelper.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfSessionHelper.java
deleted file mode 100644
index 69fda7d63..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfSessionHelper.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.SSHConnection;
-import com.tailf.jnc.SSHSession;
-import java.io.IOException;
-
-class NetconfSessionHelper {
-
- NetconfSession createNetconfSession(NetconfConnectionParams params) throws IOException, JNCException {
- SSHConnection sshConnection = new SSHConnection(params.getAddress(), params.getPort());
- sshConnection.authenticateWithPassword(params.getUser(), params.getPassword());
- return new NetconfSession(new SSHSession(sshConnection));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderService.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderService.java
deleted file mode 100644
index 7e0739579..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderService.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.model;
-
-import java.io.IOException;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.mime.MultipartEntityBuilder;
-import org.apache.http.util.EntityUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.http.HttpStatus;
-import org.springframework.stereotype.Service;
-import org.springframework.web.multipart.MultipartFile;
-
-@Service
-public class NetconfModelLoaderService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfModelLoaderService.class);
-
- @Value("${netconf.address}")
- private String netconfIp;
-
- @Value("${netconf.model-loader.port}")
- private String modelLoaderPort;
-
- private final HttpClient httpClient;
-
- @Autowired
- public NetconfModelLoaderService(HttpClient httpClient) {
- this.httpClient = httpClient;
- }
-
- public LoadModelResponse deleteYangModel(String yangModelName) throws IOException {
- String uri = getDeleteAddress(yangModelName);
- HttpDelete httpDelete = new HttpDelete(uri);
- HttpResponse httpResponse = httpClient.execute(httpDelete);
- return parseResponse(httpResponse);
- }
-
- public LoadModelResponse loadYangModel(MultipartFile yangModel, MultipartFile initialConfig, String moduleName)
- throws IOException {
- HttpPost httpPost = new HttpPost(getBackendAddress());
- HttpEntity httpEntity = MultipartEntityBuilder.create()
- .addBinaryBody("yangModel", yangModel.getInputStream(), ContentType.MULTIPART_FORM_DATA,
- yangModel.getOriginalFilename())
- .addBinaryBody("initialConfig", initialConfig.getInputStream(), ContentType.MULTIPART_FORM_DATA,
- initialConfig.getOriginalFilename())
- .addTextBody("moduleName", moduleName)
- .build();
- httpPost.setEntity(httpEntity);
- HttpResponse response = httpClient.execute(httpPost);
- return parseResponse(response);
- }
-
- String getBackendAddress() {
- return String.format("http://%s:%s/model", netconfIp, modelLoaderPort);
- }
-
- String getDeleteAddress(String yangModelName) {
- return String.format("%s?yangModelName=%s", getBackendAddress(), yangModelName);
- }
-
-
- private LoadModelResponse parseResponse(HttpResponse response) throws IOException {
- int statusCode = response.getStatusLine().getStatusCode();
- String responseBody = EntityUtils.toString(response.getEntity());
-
- logResponse(statusCode, responseBody);
- return new LoadModelResponse(statusCode, responseBody);
- }
-
- private void logResponse(int statusCode, String responseBody) {
- if (statusCode >= HttpStatus.BAD_REQUEST.value()) {
- LOGGER.error(responseBody);
- } else {
- LOGGER.info(responseBody);
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/EndpointConfig.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/EndpointConfig.java
deleted file mode 100644
index 4eaa85010..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/EndpointConfig.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket;
-
-import java.util.Collections;
-import org.onap.netconfsimulator.websocket.message.NetconfMessageEncoder;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.web.socket.server.standard.ServerEndpointExporter;
-import org.springframework.web.socket.server.standard.ServerEndpointRegistration;
-
-@Configuration
-class EndpointConfig {
-
- @Bean
- ServerEndpointRegistration endpointRegistration() {
- ServerEndpointRegistration serverEndpointRegistration = new ServerEndpointRegistration("/netconf",
- NetconfEndpoint.class);
- serverEndpointRegistration.setEncoders(Collections.singletonList(NetconfMessageEncoder.class));
- return serverEndpointRegistration;
- }
-
- @Bean
- ServerEndpointExporter endpointExporter() {
- return new ServerEndpointExporter();
- }
-}
-
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/NetconfEndpoint.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/NetconfEndpoint.java
deleted file mode 100644
index 5870ee1e4..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/NetconfEndpoint.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket;
-
-
-import java.util.Optional;
-import javax.websocket.CloseReason;
-import javax.websocket.Endpoint;
-import javax.websocket.EndpointConfig;
-import javax.websocket.RemoteEndpoint;
-import javax.websocket.Session;
-
-import org.onap.netconfsimulator.kafka.listener.KafkaListenerEntry;
-import org.onap.netconfsimulator.kafka.listener.KafkaListenerHandler;
-import org.onap.netconfsimulator.websocket.message.NetconfMessageListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.kafka.listener.AbstractMessageListenerContainer;
-import org.springframework.kafka.listener.MessageListener;
-import org.springframework.stereotype.Component;
-
-//instance of this class is created every each websocket request
-@Component
-class NetconfEndpoint extends Endpoint {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfEndpoint.class);
- private static final String TOPIC_NAME = "config";
-
- private KafkaListenerHandler kafkaListenerHandler;
-
- public Optional<KafkaListenerEntry> getEntry() {
- return entry;
- }
-
- public void setEntry(Optional<KafkaListenerEntry> entry) {
- this.entry = entry;
- }
-
- private Optional<KafkaListenerEntry> entry = Optional.empty();
-
-
- @Autowired
- NetconfEndpoint(KafkaListenerHandler listenerHandler) {
- this.kafkaListenerHandler = listenerHandler;
- }
-
- @Override
- public void onOpen(Session session, EndpointConfig endpointConfig) {
- RemoteEndpoint.Basic basicRemote = session.getBasicRemote();
-
- addKafkaListener(basicRemote);
- entry.ifPresent(x -> LOGGER.info("Session with client: {} established", x.getClientId()));
- }
-
- @Override
- public void onError(Session session, Throwable throwable) {
- LOGGER.error("Unexpected error occurred", throwable);
- }
-
- @Override
- public void onClose(Session session, CloseReason closeReason) {
- entry.ifPresent(x -> x.getListenerContainer().stop());
- entry.ifPresent(x -> LOGGER.info("Closing connection for client: {}", x.getClientId()));
- }
-
-
- private void addKafkaListener(RemoteEndpoint.Basic remoteEndpoint) {
- MessageListener messageListener = new NetconfMessageListener(remoteEndpoint);
-
- KafkaListenerEntry kafkaListener = kafkaListenerHandler.createKafkaListener(messageListener, TOPIC_NAME);
-
- AbstractMessageListenerContainer listenerContainer = kafkaListener.getListenerContainer();
- listenerContainer.start();
- entry = Optional.of(kafkaListener);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageEncoder.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageEncoder.java
deleted file mode 100644
index 349b7e2d9..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageEncoder.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket.message;
-
-import org.onap.netconfsimulator.kafka.model.KafkaMessage;
-import org.springframework.web.socket.adapter.standard.ConvertingEncoderDecoderSupport;
-
-public class NetconfMessageEncoder extends ConvertingEncoderDecoderSupport.TextEncoder<KafkaMessage> {
-
- private static final String MESSAGE_FORMAT = "%s: %s";
-
- @Override
- public String encode(KafkaMessage netconfMessage) {
- return String.format(MESSAGE_FORMAT, netconfMessage.getTimestamp(), netconfMessage.getConfiguration());
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListener.java b/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListener.java
deleted file mode 100644
index 61610dea0..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListener.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket.message;
-
-import java.io.IOException;
-import javax.websocket.EncodeException;
-import javax.websocket.RemoteEndpoint;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.onap.netconfsimulator.kafka.model.KafkaMessage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.kafka.listener.MessageListener;
-
-public class NetconfMessageListener implements MessageListener<String, String> {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfMessageListener.class);
- private RemoteEndpoint.Basic remoteEndpoint;
-
- public NetconfMessageListener(RemoteEndpoint.Basic remoteEndpoint) {
- this.remoteEndpoint = remoteEndpoint;
- }
-
- @Override
- public void onMessage(ConsumerRecord<String, String> message) {
- LOGGER.debug("Attempting to send message to {}", remoteEndpoint);
- try {
- remoteEndpoint
- .sendObject(new KafkaMessage(message.timestamp(), message.value()));
- } catch (IOException | EncodeException exception) {
- LOGGER.error("Error during sending message to remote", exception);
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/main/resources/application.properties b/test/mocks/pnfsimulator/netconfsimulator/src/main/resources/application.properties
deleted file mode 100644
index 3947cf358..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/main/resources/application.properties
+++ /dev/null
@@ -1,8 +0,0 @@
-netconf.port=830
-netconf.address=netopeer
-netconf.user=netconf
-netconf.password=netconf
-netconf.model-loader.port=5002
-spring.kafka.bootstrap-servers=kafka1:9092
-spring.kafka.default-offset=100
-spring.kafka.consumer.auto-offset-reset=earliest
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfFunctionsIT.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfFunctionsIT.java
deleted file mode 100644
index 95ef58696..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfFunctionsIT.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package integration;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.palantir.docker.compose.connection.DockerMachine;
-import com.palantir.docker.compose.connection.waiting.HealthChecks;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.bitbucket.radistao.test.annotation.BeforeAllMethods;
-import org.bitbucket.radistao.test.runner.BeforeAfterSpringTestRunner;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.rules.TestRule;
-import org.junit.runner.RunWith;
-import com.palantir.docker.compose.DockerComposeRule;
-import org.onap.netconfsimulator.kafka.model.KafkaMessage;
-import org.springframework.http.HttpStatus;
-
-import java.io.IOException;
-
-import java.time.Duration;
-import java.time.Instant;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-import static junit.framework.TestCase.fail;
-import static org.assertj.core.api.Assertions.assertThat;
-
-@RunWith(BeforeAfterSpringTestRunner.class)
-public class NetconfFunctionsIT {
-
- private static NetconfSimulatorClient client;
- private static ObjectMapper objectMapper;
-
- private static final DockerMachine dockerMachine = DockerMachine
- .localMachine()
- .build();
-
- private static DockerComposeRule docker = DockerComposeRule.builder()
- .file("docker-compose.yml")
- .machine(dockerMachine)
- .removeConflictingContainersOnStartup(true)
- .waitingForService("sftp-server", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("ftpes-server", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("zookeeper", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("netopeer", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("kafka1", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("netconf-simulator", HealthChecks.toHaveAllPortsOpen())
- .build();
-
- @ClassRule
- public static TestRule exposePortMappings = docker;
-
- @BeforeClass
- public static void setUpClass() {
- objectMapper = new ObjectMapper();
- client = new NetconfSimulatorClient(String.format("http://%s:%d", docker.containers().ip(), 9000));
- }
-
- @BeforeAllMethods
- public void setupBeforeAll() throws InterruptedException {
- if (client.isServiceAvailable(Instant.now(), Duration.ofSeconds(45))) {
- Thread.sleep(60000);
- return;
- }
- fail("Application failed to start within established timeout: 45 seconds. Exiting.");
- }
-
- @Before
- public void setUp() {
- client.reinitializeClient();
- }
-
- @After
- public void tearDown() throws Exception {
- client.releaseClient();
- }
-
- @Test
- public void testShouldLoadModelEditConfigurationAndDeleteModule() throws IOException {
- // do load
- try (CloseableHttpResponse response = client
- .loadModel("newyangmodel", "newYangModel.yang", "initialConfig.xml")) {
- assertResponseStatusCode(response, HttpStatus.OK);
- String original = client.getResponseContentAsString(response);
- assertThat(original).isEqualTo("\"Successfully started\"\n");
- }
- // do edit-config
- try (CloseableHttpResponse updateResponse = client.updateConfig()) {
- String afterUpdateConfigContent = client.getResponseContentAsString(updateResponse);
- assertResponseStatusCode(updateResponse, HttpStatus.ACCEPTED);
- assertThat(afterUpdateConfigContent).isEqualTo("New configuration has been activated");
- }
- // do delete
- try (CloseableHttpResponse deleteResponse = client.deleteModel("newyangmodel")) {
- assertResponseStatusCode(deleteResponse, HttpStatus.OK);
- String original = client.getResponseContentAsString(deleteResponse);
- assertThat(original).isEqualTo("\"Successfully deleted\"\n");
- }
- }
-
- @Test
- public void testShouldGetCurrentConfigurationAndEditItSuccessfully() throws IOException {
- try (CloseableHttpResponse updateResponse = client.updateConfig();
- CloseableHttpResponse newCurrentConfigResponse = client.getCurrentConfig()) {
- String afterUpdateConfigContent = client.getResponseContentAsString(updateResponse);
-
- assertResponseStatusCode(updateResponse, HttpStatus.ACCEPTED);
- assertResponseStatusCode(newCurrentConfigResponse, HttpStatus.OK);
-
- assertThat(afterUpdateConfigContent).isEqualTo("New configuration has been activated");
- }
- }
-
- @Test
- public void testShouldPersistConfigChangesAndGetAllWhenRequested() throws IOException {
- client.updateConfig();
-
- try (CloseableHttpResponse newAllConfigChangesResponse = client.getAllConfigChanges()) {
- String newAllConfigChangesString = client.getResponseContentAsString(newAllConfigChangesResponse);
- assertResponseStatusCode(newAllConfigChangesResponse, HttpStatus.OK);
-
- List<KafkaMessage> kafkaMessages = objectMapper
- .readValue(newAllConfigChangesString, new TypeReference<List<KafkaMessage>>() {
- });
-
- assertThat(kafkaMessages.size()).isGreaterThanOrEqualTo(1);
- Set<String> configChangeContent = kafkaMessages.stream().map(KafkaMessage::getConfiguration)
- .collect(Collectors.toSet());
- assertThat(configChangeContent)
- .anyMatch(el -> el.contains("new value: /pnf-simulator:config/itemValue1 = 100"));
- assertThat(configChangeContent)
- .anyMatch(el -> el.contains("new value: /pnf-simulator:config/itemValue2 = 200"));
- }
- }
-
- @Test
- public void testShouldGetLastMessage() throws IOException {
- client.updateConfig();
-
- try (CloseableHttpResponse lastConfigChangesResponse = client.getLastConfigChanges(2)) {
- String newAllConfigChangesString = client.getResponseContentAsString(lastConfigChangesResponse);
- List<KafkaMessage> kafkaMessages = objectMapper
- .readValue(newAllConfigChangesString, new TypeReference<List<KafkaMessage>>() {
- });
-
- assertThat(kafkaMessages).hasSize(2);
- assertThat(kafkaMessages.get(0).getConfiguration())
- .contains("new value: /pnf-simulator:config/itemValue1 = 100");
- assertThat(kafkaMessages.get(1).getConfiguration())
- .contains("new value: /pnf-simulator:config/itemValue2 = 200");
- }
- }
-
- @Test
- public void testShouldLoadNewYangModelAndReconfigure() throws IOException {
- try (CloseableHttpResponse response = client
- .loadModel("newyangmodel", "newYangModel.yang", "initialConfig.xml")) {
- assertResponseStatusCode(response, HttpStatus.OK);
-
- String original = client.getResponseContentAsString(response);
-
- assertThat(original).isEqualTo("\"Successfully started\"\n");
- }
- }
-
- @Test
- public void shouldGetLoadedModelByName() throws IOException {
- testShouldLoadNewYangModelAndReconfigure();
-
- try (CloseableHttpResponse response = client.getConfigByModelAndContainerNames("newyangmodel", "config2")) {
- assertResponseStatusCode(response, HttpStatus.OK);
- String config = client.getResponseContentAsString(response);
-
- assertThat(config).isEqualTo(
- "<config2 xmlns=\"http://onap.org/newyangmodel\" xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n"
- + " <item1>100</item1>\n"
- + "</config2>\n");
- }
-
- }
-
- private void assertResponseStatusCode(HttpResponse response, HttpStatus expectedStatus) {
- assertThat(response.getStatusLine().getStatusCode()).isEqualTo(expectedStatus.value());
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfSimulatorClient.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfSimulatorClient.java
deleted file mode 100644
index 61f2ef1d8..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/integration/NetconfSimulatorClient.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package integration;
-
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.mime.MultipartEntityBuilder;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.util.EntityUtils;
-import org.junit.platform.commons.logging.Logger;
-import org.junit.platform.commons.logging.LoggerFactory;
-import org.springframework.util.ResourceUtils;
-
-import java.io.IOException;
-import java.time.Duration;
-import java.time.Instant;
-
-class NetconfSimulatorClient {
-
- private CloseableHttpClient netconfClient;
- private String simulatorBaseUrl;
- private static final Logger LOG = LoggerFactory.getLogger(NetconfSimulatorClient.class);
-
- NetconfSimulatorClient(String simulatorBaseUrl) {
- this.netconfClient = HttpClients.createDefault();
- this.simulatorBaseUrl = simulatorBaseUrl;
- }
-
- CloseableHttpResponse loadModel(String moduleName, String yangModelFileName, String initialiConfigFileName) throws IOException {
- String updateConfigUrl = String.format("%s/netconf/model/%s", simulatorBaseUrl, moduleName);
- HttpPost httpPost = new HttpPost(updateConfigUrl);
- HttpEntity updatedConfig = MultipartEntityBuilder
- .create()
- .addBinaryBody("yangModel", ResourceUtils.getFile(String.format("classpath:%s", yangModelFileName)))
- .addBinaryBody("initialConfig", ResourceUtils.getFile(String.format("classpath:%s",initialiConfigFileName)))
- .addTextBody("moduleName", moduleName)
- .build();
- httpPost.setEntity(updatedConfig);
- return netconfClient.execute(httpPost);
- }
-
- CloseableHttpResponse deleteModel(String moduleName) throws IOException {
- String deleteModuleUrl = String.format("%s/netconf/model/%s", simulatorBaseUrl, moduleName);
- HttpDelete httpDelete = new HttpDelete(deleteModuleUrl);
- return netconfClient.execute(httpDelete);
- }
-
- boolean isServiceAvailable(Instant startTime, Duration maxWaitingDuration) throws InterruptedException {
- boolean isServiceReady = false;
- while (Duration.between(startTime, Instant.now()).compareTo(maxWaitingDuration) < 1){
- if(checkIfSimResponds()){
- return true;
- }
- else {
- LOG.info(() -> "Simulator not ready yet, retrying in 5s...");
- Thread.sleep(5000);
- }
- }
- return isServiceReady;
- }
-
- private boolean checkIfSimResponds() throws InterruptedException {
- try(CloseableHttpResponse pingResponse = getCurrentConfig()){
- String responseString = getResponseContentAsString(pingResponse);
- if(pingResponse.getStatusLine().getStatusCode() == 200 && !responseString.trim().isEmpty()){
- return true;
- }
- }
- catch(IOException ex){
- LOG.error(ex, () -> "EXCEPTION");
- Thread.sleep(5000);
- }
- return false;
- }
-
- CloseableHttpResponse getCurrentConfig() throws IOException {
- String netconfAddress = String.format("%s/netconf/get", simulatorBaseUrl);
- HttpGet get = new HttpGet(netconfAddress);
- return netconfClient.execute(get);
- }
-
- CloseableHttpResponse getConfigByModelAndContainerNames(String model, String container) throws IOException {
- String netconfAddress = String
- .format("%s/netconf/get/%s/%s", simulatorBaseUrl, model, container);
- HttpGet get = new HttpGet(netconfAddress);
- return netconfClient.execute(get);
- }
-
- CloseableHttpResponse updateConfig() throws IOException {
- String updateConfigUrl = String.format("%s/netconf/edit-config", simulatorBaseUrl);
- HttpPost httpPost = new HttpPost(updateConfigUrl);
- HttpEntity updatedConfig = MultipartEntityBuilder
- .create()
- .addBinaryBody("editConfigXml", ResourceUtils.getFile("classpath:updatedConfig.xml"))
- .build();
- httpPost.setEntity(updatedConfig);
- return netconfClient.execute(httpPost);
- }
-
- CloseableHttpResponse getAllConfigChanges() throws IOException {
- String netconfStoreCmHistoryAddress = String.format("%s/store/cm-history", simulatorBaseUrl);
- HttpGet configurationChangesResponse = new HttpGet(netconfStoreCmHistoryAddress);
- return netconfClient.execute(configurationChangesResponse);
- }
-
- CloseableHttpResponse getLastConfigChanges(int howManyLastChanges) throws IOException {
- String netconfStoreCmHistoryAddress = String.format("%s/store/less?offset=%d", simulatorBaseUrl, howManyLastChanges);
- HttpGet configurationChangesResponse = new HttpGet(netconfStoreCmHistoryAddress);
- return netconfClient.execute(configurationChangesResponse);
- }
-
- void releaseClient() throws IOException {
- netconfClient.close();
- }
-
- void reinitializeClient(){
- netconfClient = HttpClients.createDefault();
- }
-
- String getResponseContentAsString(HttpResponse response) throws IOException {
- HttpEntity entity = response.getEntity();
- String entityStringRepr = EntityUtils.toString(entity);
- EntityUtils.consume(entity);
- return entityStringRepr;
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/EmbeddedKafkaConfig.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/EmbeddedKafkaConfig.java
deleted file mode 100644
index 5ddf2b2a6..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/EmbeddedKafkaConfig.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-
-import java.util.Map;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
-import org.springframework.kafka.core.DefaultKafkaProducerFactory;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.core.ProducerFactory;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
-
-import static org.onap.netconfsimulator.kafka.StoreServiceTest.embeddedKafka;
-
-@Configuration
-class EmbeddedKafkaConfig {
-
- @Bean
- KafkaTemplate<String, String> kafkaTemplate(){
- return new KafkaTemplate<>(producerFactory());
- }
-
- @Bean
- @Autowired
- ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(ConsumerFactory<String, String> consumerFactory){
- ConcurrentKafkaListenerContainerFactory<String, String> containerFactory = new ConcurrentKafkaListenerContainerFactory<>();
- containerFactory.setConsumerFactory(consumerFactory);
- return containerFactory;
- }
-
- @Bean
- ConsumerFactory<String, String> consumerFactory(){
- Map<String, Object> consumerProperties =
- KafkaTestUtils.consumerProps("sender", "false", embeddedKafka.getEmbeddedKafka());
- consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
- return new DefaultKafkaConsumerFactory<>(consumerProperties);
- }
-
- private ProducerFactory<String, String> producerFactory() {
- Map<String, Object> senderProperties =
- KafkaTestUtils.senderProps(embeddedKafka.getEmbeddedKafka().getBrokersAsString());
- return new DefaultKafkaProducerFactory<>(senderProperties);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreControllerTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreControllerTest.java
deleted file mode 100644
index 02eec12ac..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreControllerTest.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import java.time.Instant;
-import java.util.List;
-import org.assertj.core.api.Assertions;
-import org.assertj.core.util.Lists;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
-
-import static org.mockito.Mockito.when;
-
-@RunWith(SpringJUnit4ClassRunner.class)
-public class StoreControllerTest {
-
- private static final String MESSAGE_3 = "message 3";
- private static final String MESSAGE_2 = "message 2";
- private static final String MESSAGE_1 = "message 1";
-
- private static final List<MessageDTO> ALL_MESSAGES = Lists.newArrayList(new MessageDTO(Instant.now().getEpochSecond(), MESSAGE_1),
- new MessageDTO(Instant.now().getEpochSecond(), MESSAGE_2),
- new MessageDTO(Instant.now().getEpochSecond(), MESSAGE_3));
-
- @Mock
- private StoreService service;
-
- @InjectMocks
- private StoreController storeController;
-
-
- @Test
- public void lessShouldTakeAllMessagesTest() {
- when(service.getLastMessages(3)).thenReturn(ALL_MESSAGES);
-
- List<MessageDTO> lessResponse = storeController.less(3);
-
- assertResponseContainsExpectedMessages(lessResponse, 3, MESSAGE_1, MESSAGE_2, MESSAGE_3);
- }
-
- @Test
- public void lessShouldTakeTwoMessagesTest() {
- when(service.getLastMessages(2)).thenReturn(Lists.newArrayList(new MessageDTO(Instant.now().getEpochSecond(), MESSAGE_1)));
-
- List<MessageDTO> lessResult = storeController.less(2);
-
- assertResponseContainsExpectedMessages(lessResult, 1, MESSAGE_1);
- }
-
- @Test
- public void shouldGetAllMessages(){
- when(service.getAllMessages()).thenReturn(ALL_MESSAGES);
-
- List<MessageDTO> allMsgResult = storeController.getAllConfigurationChanges();
-
- assertResponseContainsExpectedMessages(allMsgResult, 3, MESSAGE_1, MESSAGE_2, MESSAGE_3);
- }
-
- private void assertResponseContainsExpectedMessages(List<MessageDTO> actualMessages, int expectedMessageCount, String... expectedMessages){
- Assertions.assertThat(actualMessages.stream().map(MessageDTO::getConfiguration))
- .hasSize(expectedMessageCount)
- .containsExactly(expectedMessages);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreServiceTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreServiceTest.java
deleted file mode 100644
index fd36116a8..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/StoreServiceTest.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka;
-
-import org.bitbucket.radistao.test.annotation.BeforeAllMethods;
-import org.bitbucket.radistao.test.runner.BeforeAfterSpringTestRunner;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.context.EmbeddedKafka;
-import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
-
-import java.util.List;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-@RunWith(BeforeAfterSpringTestRunner.class)
-@SpringBootTest(classes = {StoreService.class, EmbeddedKafkaConfig.class})
-@EmbeddedKafka
-public class StoreServiceTest {
-
- private static final String MESSAGE_1 = "message1";
- private static final String MESSAGE_2 = "message2";
- private static final String MESSAGE_3 = "message3";
-
- @ClassRule
- public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(1, true, 1, "config");
-
- @Autowired
- StoreService service;
-
- @Autowired
- KafkaTemplate<String, String> kafkaTemplate;
-
- @BeforeAllMethods
- public void setupBeforeAll() {
- prepareProducer();
- }
-
- @Test
- public void testShouldReturnAllAvailableMessages(){
-
- List<MessageDTO> actualMessages = service.getAllMessages();
-
- assertResponseContainsExpectedMessages(actualMessages, 3, MESSAGE_1, MESSAGE_2, MESSAGE_3);
- }
-
- @Test
- public void testShouldGetLastMessagesRespectingOffset(){
-
- List<MessageDTO> wantedLastMsg = service.getLastMessages(1L);
-
- assertResponseContainsExpectedMessages(wantedLastMsg, 1, MESSAGE_3);
- }
-
- @Test
- public void testShouldGetAll3Messages() {
- List<MessageDTO> wantedLastMsgs = service.getLastMessages(3L);
-
- assertResponseContainsExpectedMessages(wantedLastMsgs, 3, MESSAGE_1, MESSAGE_2, MESSAGE_3);
- }
-
- private void prepareProducer(){
- kafkaTemplate.send("config", "message1");
- kafkaTemplate.send("config", "message2");
- kafkaTemplate.send("config", "message3");
- }
-
- private void assertResponseContainsExpectedMessages(List<MessageDTO> actualMessages, int expectedMessageCount, String... expectedMessages){
- assertThat(actualMessages.stream().map(MessageDTO::getConfiguration))
- .hasSize(expectedMessageCount)
- .containsExactly(expectedMessages);
- }
-
-}
-
-
-
-
-
-
-
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandlerTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandlerTest.java
deleted file mode 100644
index fcb72666a..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/kafka/listener/KafkaListenerHandlerTest.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.kafka.listener;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.kafka.listener.ContainerProperties;
-import org.springframework.kafka.listener.KafkaMessageListenerContainer;
-import org.springframework.kafka.listener.MessageListener;
-
-class KafkaListenerHandlerTest {
-
- private static final String CLIENT_ID_REGEX = "[0-9]{10,}";
- private static final String SAMPLE_TOPIC = "sampleTopic";
-
- @Mock
- private ConsumerFactory<String, String> consumerFactory;
-
- @Mock
- private KafkaMessageListenerContainer<String, String> kafkaMessageListenerContainer;
-
- @Mock
- private MessageListener messageListener;
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- }
-
-
- @Test
- void shouldProperlyCreateKafkaListener() {
- KafkaListenerHandler kafkaListenerHandler = spy(new KafkaListenerHandler(consumerFactory));
- doReturn(kafkaMessageListenerContainer).when(kafkaListenerHandler)
- .createListenerContainer(any(ContainerProperties.class), eq(SAMPLE_TOPIC));
-
- KafkaListenerEntry kafkaListenerEntry = kafkaListenerHandler
- .createKafkaListener(messageListener, SAMPLE_TOPIC);
-
- assertThat(kafkaListenerEntry.getListenerContainer()).isEqualTo(kafkaMessageListenerContainer);
- assertThat(kafkaListenerEntry.getClientId()).matches(CLIENT_ID_REGEX);
- }
-
- @Test
- void shouldProperlyCreateContainer() {
- KafkaListenerHandler kafkaListenerHandler = spy(new KafkaListenerHandler(consumerFactory));
- ContainerProperties containerProperties = new ContainerProperties(SAMPLE_TOPIC);
- containerProperties.setMessageListener(mock(MessageListener.class));
-
- KafkaMessageListenerContainer<String, String> listenerContainer = kafkaListenerHandler
- .createListenerContainer(containerProperties, SAMPLE_TOPIC);
-
- ContainerProperties actualProperties = listenerContainer.getContainerProperties();
- assertThat(actualProperties.getTopics()).isEqualTo(containerProperties.getTopics());
- assertThat(actualProperties.getMessageListener()).isEqualTo(containerProperties.getMessageListener());
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/NetconfControllerTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/NetconfControllerTest.java
deleted file mode 100644
index 73fb627ea..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/NetconfControllerTest.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-import com.tailf.jnc.JNCException;
-import java.io.IOException;
-import java.nio.file.Files;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.onap.netconfsimulator.netconfcore.configuration.NetconfConfigurationService;
-import org.onap.netconfsimulator.netconfcore.model.LoadModelResponse;
-import org.onap.netconfsimulator.netconfcore.model.NetconfModelLoaderService;
-import org.springframework.mock.web.MockMultipartFile;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-import org.springframework.util.ResourceUtils;
-import org.springframework.web.multipart.MultipartFile;
-
-class NetconfControllerTest {
-
- private MockMvc mockMvc;
-
- @Mock
- private NetconfConfigurationService netconfService;
-
- @Mock
- private NetconfModelLoaderService netconfModelLoaderService;
-
- @InjectMocks
- private NetconfController controller;
-
- private static final String SAMPLE_CONFIGURATION = "<config xmlns=\"http://onap.org/pnf-simulator\" xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\"><itemValue1>11</itemValue1><itemValue2>22</itemValue2></config>";
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- mockMvc = MockMvcBuilders.standaloneSetup(controller).build();
- }
-
- @Test
- void testShouldDigestMultipartFile() throws Exception {
- byte[] bytes =
- Files.readAllBytes(ResourceUtils.getFile("classpath:updatedConfig.xml").toPath());
- MockMultipartFile file = new MockMultipartFile("editConfigXml", bytes);
-
- mockMvc
- .perform(MockMvcRequestBuilders.multipart("/netconf/edit-config").file(file))
- .andExpect(status().isAccepted());
-
- verify(netconfService).editCurrentConfiguration(any(MultipartFile.class));
- }
-
- @Test
- void testShouldThrowExceptionWhenEditConfigFileWithIncorrectNameProvided() throws Exception {
- MockMultipartFile file = new MockMultipartFile("wrongName", new byte[0]);
-
- mockMvc
- .perform(MockMvcRequestBuilders.multipart("/netconf/edit-config").file(file))
- .andExpect(status().isBadRequest());
-
- verify(netconfService, never()).editCurrentConfiguration(any(MultipartFile.class));
- }
-
- @Test
- void testShouldReturnCurrentConfiguration() throws Exception {
- when(netconfService.getCurrentConfiguration()).thenReturn(SAMPLE_CONFIGURATION);
-
- String contentAsString =
- mockMvc
- .perform(get("/netconf/get"))
- .andExpect(status().isOk())
- .andReturn()
- .getResponse()
- .getContentAsString();
-
- verify(netconfService).getCurrentConfiguration();
- assertThat(contentAsString).isEqualTo(SAMPLE_CONFIGURATION);
- }
-
- @Test
- void testShouldReturnConfigurationForGivenPath() throws Exception {
- when(netconfService.getCurrentConfiguration("sampleModel", "sampleContainer"))
- .thenReturn(SAMPLE_CONFIGURATION);
-
- String contentAsString =
- mockMvc
- .perform(get("/netconf/get/sampleModel/sampleContainer"))
- .andExpect(status().isOk())
- .andReturn()
- .getResponse()
- .getContentAsString();
-
- verify(netconfService).getCurrentConfiguration("sampleModel", "sampleContainer");
- assertThat(contentAsString).isEqualTo(SAMPLE_CONFIGURATION);
- }
-
- @Test
- void testShouldRaiseBadRequestWhenConfigurationIsNotPresent() throws Exception {
- when(netconfService.getCurrentConfiguration("sampleModel", "sampleContainer2"))
- .thenThrow(new JNCException(JNCException.ELEMENT_MISSING, "/sampleModel:sampleContainer2"));
-
- String contentAsString =
- mockMvc
- .perform(get("/netconf/get/sampleModel/sampleContainer2"))
- .andExpect(status().isBadRequest())
- .andReturn()
- .getResponse()
- .getContentAsString();
-
- assertThat(contentAsString).isEqualTo("Element does not exists: /sampleModel:sampleContainer2");
- }
-
- @Test
- void shouldThrowExceptionWhenNoConfigurationPresent() throws IOException, JNCException {
- when(netconfService.getCurrentConfiguration()).thenThrow(JNCException.class);
-
- assertThatThrownBy(() -> mockMvc.perform(get("/netconf/get")))
- .hasRootCauseExactlyInstanceOf(JNCException.class);
- }
-
- @Test
- void testShouldDeleteYangModel() throws Exception {
- String responseOkString = "Alles klar";
- String yangModelName = "someModel";
- LoadModelResponse loadModelResponse = new LoadModelResponse(200, responseOkString);
- String uri = String.format("/netconf/model/%s", yangModelName);
- when(netconfModelLoaderService.deleteYangModel(yangModelName)).thenReturn(loadModelResponse);
-
- String contentAsString =
- mockMvc
- .perform(delete(uri))
- .andExpect(status().isOk())
- .andReturn()
- .getResponse()
- .getContentAsString();
-
- verify(netconfModelLoaderService).deleteYangModel(yangModelName);
- assertThat(contentAsString).isEqualTo(responseOkString);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditorTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditorTest.java
deleted file mode 100644
index 371bdd84b..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationEditorTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.XMLParser;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.onap.netconfsimulator.netconfcore.configuration.NetconfConfigurationEditor;
-import org.springframework.util.ResourceUtils;
-import org.xml.sax.InputSource;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.nio.file.Files;
-
-import static org.mockito.Mockito.verify;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-class NetconfConfigurationEditorTest {
-
- @Mock
- private NetconfSession session;
- @Mock
- private NetconfSessionHelper netconfSessionHelper;
-
- private NetconfConfigurationEditor editor;
-
- @BeforeEach
- void setUp() throws IOException, JNCException {
- initMocks(this);
- NetconfConnectionParams params = null;
- Mockito.when(netconfSessionHelper.createNetconfSession(params)).thenReturn(session);
- editor = new NetconfConfigurationEditor(params, netconfSessionHelper);
- }
-
- @Test
- void testShouldEditConfigSuccessfully() throws IOException, JNCException {
- byte[] bytes =
- Files.readAllBytes(ResourceUtils.getFile("classpath:updatedConfig.xml").toPath());
- Element editConfigXml = new XMLParser().parse(new InputSource(new ByteArrayInputStream(bytes)));
-
- editor.editConfig(editConfigXml);
-
- verify(session).editConfig(editConfigXml);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReaderTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReaderTest.java
deleted file mode 100644
index a0a15b993..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationReaderTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.NodeSet;
-import java.io.IOException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
-
-class NetconfConfigurationReaderTest {
-
- private static final String NETCONF_MODEL_PATH = "";
- private static final String EXPECTED_STRING_XML = "<?xml version=\"1.0\"?>";
- private NetconfConfigurationReader reader;
-
- @Mock
- private NetconfSession netconfSession;
-
- @Mock
- private NetconfSessionHelper netconfSessionHelper;
-
- @Mock
- private NodeSet nodeSet;
-
- @Mock
- private Element element;
-
- @BeforeEach
- void setUp() throws IOException, JNCException {
- MockitoAnnotations.initMocks(this);
- NetconfConnectionParams params = null;
- Mockito.when(netconfSessionHelper.createNetconfSession(params)).thenReturn(netconfSession);
- reader = new NetconfConfigurationReader(params, netconfSessionHelper);
- }
-
- @Test
- void properlyReadXML() throws IOException, JNCException {
- when(netconfSession.getConfig()).thenReturn(nodeSet);
- when(nodeSet.toXMLString()).thenReturn(EXPECTED_STRING_XML);
-
- String result = reader.getRunningConfig();
-
- verify(netconfSession).getConfig();
- verify(nodeSet).toXMLString();
- assertThat(result).isEqualTo(EXPECTED_STRING_XML);
- }
-
- @Test
- void shouldProperlyReadXmlByName() throws IOException, JNCException {
- when(netconfSession.getConfig("/sample:test")).thenReturn(nodeSet);
- when(nodeSet.first()).thenReturn(element);
- when(element.toXMLString()).thenReturn(EXPECTED_STRING_XML);
-
- String result = reader.getRunningConfig("/sample:test");
-
- verify(netconfSession).getConfig("/sample:test");
- verify(nodeSet, times(2)).first();
- verify(element).toXMLString();
-
- assertThat(result).isEqualTo(EXPECTED_STRING_XML);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationServiceTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationServiceTest.java
deleted file mode 100644
index 6da65728e..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/configuration/NetconfConfigurationServiceTest.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.configuration;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import java.io.IOException;
-import java.nio.file.Files;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.springframework.mock.web.MockMultipartFile;
-import org.springframework.util.ResourceUtils;
-
-class NetconfConfigurationServiceTest {
-
- @Mock
- NetconfConfigurationReader reader;
-
- @Mock
- NetconfConfigurationEditor editor;
-
- @InjectMocks
- NetconfConfigurationService service;
-
- private static String CURRENT_CONFIG_XML_STRING =
- "<config xmlns=\"http://onap.org/pnf-simulator\" xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n"
- + " <itemValue1>100</itemValue1>\n"
- + " <itemValue2>200</itemValue2>\n"
- + "</config>\n";
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- }
-
- @Test
- void testShouldReturnCorrectCurrentConfiguration() throws IOException, JNCException {
- String expectedConfiguration = CURRENT_CONFIG_XML_STRING;
- when(reader.getRunningConfig()).thenReturn(CURRENT_CONFIG_XML_STRING);
-
- String actualCurrentConfiguration = service.getCurrentConfiguration();
-
- assertThat(actualCurrentConfiguration).isEqualToIgnoringCase(expectedConfiguration);
- }
-
- @Test
- void testShouldThrowExceptionWhenCurrentConfigurationDoesNotExists() throws IOException, JNCException{
- when(reader.getRunningConfig()).thenThrow(JNCException.class);
-
- assertThatThrownBy(() -> service.getCurrentConfiguration()).isInstanceOf(JNCException.class);
- }
-
- @Test
- void testShouldEditConfigurationSuccessfully() throws IOException, JNCException{
- byte[] bytes =
- Files.readAllBytes(ResourceUtils.getFile("classpath:updatedConfig.xml").toPath());
- MockMultipartFile editConfigXmlContent = new MockMultipartFile("editConfigXml", bytes);
- ArgumentCaptor<Element> elementCaptor = ArgumentCaptor.forClass(Element.class);
- doNothing().when(editor).editConfig(elementCaptor.capture());
-
- service.editCurrentConfiguration(editConfigXmlContent);
-
- assertThat(elementCaptor.getValue().toXMLString()).isEqualTo(CURRENT_CONFIG_XML_STRING);
- }
-
- @Test
- void testShouldRaiseExceptionWhenMultipartFileIsInvalidXmlFile() throws IOException {
- byte[] bytes =
- Files.readAllBytes(ResourceUtils.getFile("classpath:invalidXmlFile.xml").toPath());
- MockMultipartFile editConfigXmlContent = new MockMultipartFile("editConfigXml", bytes);
-
- assertThatThrownBy(() -> service.editCurrentConfiguration(editConfigXmlContent)).isInstanceOf(JNCException.class);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderServiceTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderServiceTest.java
deleted file mode 100644
index a10876b98..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/netconfcore/model/NetconfModelLoaderServiceTest.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.netconfcore.model;
-
-
-import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.StatusLine;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpRequestBase;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.springframework.web.multipart.MultipartFile;
-
-class NetconfModelLoaderServiceTest {
-
- @Mock
- private HttpClient httpClient;
-
- private NetconfModelLoaderService modelLoaderService;
-
- @BeforeEach
- void setUp() {
- MockitoAnnotations.initMocks(this);
- modelLoaderService = new NetconfModelLoaderService(httpClient);
- }
-
-
- @Test
- void shouldSendMultipartToServer() throws IOException {
- //given
- String loadModelAddress = modelLoaderService.getBackendAddress();
- makeMockClientReturnStatusOk(httpClient, HttpPost.class);
- ArgumentCaptor<HttpPost> postArgumentCaptor = ArgumentCaptor.forClass(HttpPost.class);
- MultipartFile yangMmodel = mock(MultipartFile.class);
- MultipartFile initialConfig = mock(MultipartFile.class);
- String moduleName = "moduleName";
- when(yangMmodel.getInputStream()).thenReturn(getEmptyImputStream());
- when(initialConfig.getInputStream()).thenReturn(getEmptyImputStream());
-
- //when
- LoadModelResponse response = modelLoaderService.loadYangModel(yangMmodel, initialConfig, moduleName);
-
- //then
- verify(httpClient).execute(postArgumentCaptor.capture());
- HttpPost sentPost = postArgumentCaptor.getValue();
- assertThat(response.getStatusCode()).isEqualTo(200);
- assertThat(response.getMessage()).isEqualTo("");
- assertThat(sentPost.getURI().toString()).isEqualTo(loadModelAddress);
- assertThat(sentPost.getEntity().getContentType().getElements()[0].getName()).isEqualTo("multipart/form-data");
- }
-
- @Test
- void shouldSendDeleteRequestToServer() throws IOException {
- //given
- String yangModelName = "sampleModel";
- String deleteModelAddress = modelLoaderService.getDeleteAddress(yangModelName);
- makeMockClientReturnStatusOk(httpClient, HttpDelete.class);
- ArgumentCaptor<HttpDelete> deleteArgumentCaptor = ArgumentCaptor.forClass(HttpDelete.class);
-
- //when
- LoadModelResponse response = modelLoaderService.deleteYangModel(yangModelName);
-
- //then
- verify(httpClient).execute(deleteArgumentCaptor.capture());
- HttpDelete sendDelete = deleteArgumentCaptor.getValue();
- assertThat(response.getStatusCode()).isEqualTo(200);
- assertThat(response.getMessage()).isEqualTo("");
- assertThat(sendDelete.getURI().toString()).isEqualTo(deleteModelAddress);
- }
-
- private void makeMockClientReturnStatusOk(HttpClient client,
- Class<? extends HttpRequestBase> httpMethodClass) throws IOException {
- HttpResponse httpResponse = mock(HttpResponse.class);
- StatusLine mockStatus = mock(StatusLine.class);
- HttpEntity mockEntity = mock(HttpEntity.class);
-
- when(client.execute(any(httpMethodClass))).thenReturn(httpResponse);
- when(httpResponse.getStatusLine()).thenReturn(mockStatus);
- when(mockStatus.getStatusCode()).thenReturn(200);
- when(httpResponse.getEntity()).thenReturn(mockEntity);
- when(mockEntity.getContent()).thenReturn(getEmptyImputStream());
- }
-
- private InputStream getEmptyImputStream() {
- return new ByteArrayInputStream("".getBytes());
- }
-
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/NetconfEndpointTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/NetconfEndpointTest.java
deleted file mode 100644
index c1484d4b2..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/NetconfEndpointTest.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import java.util.Map;
-import java.util.Optional;
-import javax.websocket.CloseReason;
-import javax.websocket.EndpointConfig;
-import javax.websocket.RemoteEndpoint;
-import javax.websocket.Session;
-import org.apache.kafka.common.Metric;
-import org.apache.kafka.common.MetricName;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.onap.netconfsimulator.kafka.listener.KafkaListenerEntry;
-import org.onap.netconfsimulator.kafka.listener.KafkaListenerHandler;
-import org.onap.netconfsimulator.websocket.message.NetconfMessageListener;
-import org.springframework.kafka.core.ConsumerFactory;
-import org.springframework.kafka.listener.AbstractMessageListenerContainer;
-
-import org.springframework.kafka.listener.ContainerProperties;
-import org.springframework.kafka.listener.GenericMessageListener;
-
-class NetconfEndpointTest {
-
-
- @Mock
- private KafkaListenerHandler kafkaListenerHandler;
-
- @Mock
- private Session session;
-
- @Mock
- private EndpointConfig endpointConfig;
-
- @Mock
- private RemoteEndpoint.Basic remoteEndpoint;
-
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- }
-
-
- @Test
- void shouldCreateKafkaListenerWhenClientInitializeConnection() {
- NetconfEndpoint netconfEndpoint = new NetconfEndpoint(kafkaListenerHandler);
- AbstractMessageListenerContainer abstractMessageListenerContainer = getListenerContainer();
- when(session.getBasicRemote()).thenReturn(remoteEndpoint);
- KafkaListenerEntry kafkaListenerEntry = new KafkaListenerEntry("sampleGroupId",
- abstractMessageListenerContainer);
- when(kafkaListenerHandler.createKafkaListener(any(NetconfMessageListener.class), eq("config")))
- .thenReturn(kafkaListenerEntry);
-
- netconfEndpoint.onOpen(session, endpointConfig);
-
- assertThat(netconfEndpoint.getEntry().get().getClientId()).isEqualTo("sampleGroupId");
- assertThat(netconfEndpoint.getEntry().get().getListenerContainer()).isEqualTo(abstractMessageListenerContainer);
-
- verify(abstractMessageListenerContainer).start();
- }
-
-
- @Test
- void shouldCloseListenerWhenClientDisconnects() {
- NetconfEndpoint netconfEndpoint = new NetconfEndpoint(kafkaListenerHandler);
- AbstractMessageListenerContainer abstractMessageListenerContainer = getListenerContainer();
- netconfEndpoint.setEntry( Optional.of(new KafkaListenerEntry("sampleGroupId", abstractMessageListenerContainer)) );
-
- netconfEndpoint.onClose(session, mock(CloseReason.class));
-
- verify(abstractMessageListenerContainer).stop();
- }
-
- class TestAbstractMessageListenerContainer extends AbstractMessageListenerContainer {
-
-
- TestAbstractMessageListenerContainer(ContainerProperties containerProperties) {
- super(mock(ConsumerFactory.class),containerProperties);
- }
-
- @Override
- protected void doStart() {
-
- }
-
- @Override
- protected void doStop(Runnable callback) {
-
- }
-
- @Override
- public Map<String, Map<MetricName, ? extends Metric>> metrics() {
- return null;
- }
- }
-
- private AbstractMessageListenerContainer getListenerContainer() {
- ContainerProperties containerProperties = new ContainerProperties("config");
- containerProperties.setGroupId("sample");
- containerProperties.setMessageListener(mock(GenericMessageListener.class));
- TestAbstractMessageListenerContainer testAbstractMessageListenerContainer = new TestAbstractMessageListenerContainer(
- containerProperties);
- return spy(testAbstractMessageListenerContainer);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListenerTest.java b/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListenerTest.java
deleted file mode 100644
index bb040d1e3..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/java/org/onap/netconfsimulator/websocket/message/NetconfMessageListenerTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.netconfsimulator.websocket.message;
-
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.verify;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import java.io.IOException;
-import javax.websocket.EncodeException;
-import javax.websocket.RemoteEndpoint;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.onap.netconfsimulator.kafka.model.KafkaMessage;
-
-
-class NetconfMessageListenerTest {
-
- private static final ConsumerRecord<String, String> KAFKA_RECORD = new ConsumerRecord<>("sampleTopic", 0, 0,
- "sampleKey", "sampleValue");
-
- @Mock
- private RemoteEndpoint.Basic remoteEndpoint;
-
- @InjectMocks
- private NetconfMessageListener netconfMessageListener;
-
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- }
-
-
- @Test
- void shouldProperlyParseAndSendConsumerRecord() throws IOException, EncodeException {
- netconfMessageListener.onMessage(KAFKA_RECORD);
-
- verify(remoteEndpoint).sendObject(any(KafkaMessage.class));
- }
-
-
-
- @Test
- void shouldNotPropagateEncodeException() throws IOException, EncodeException {
- doThrow(new EncodeException("","")).when(remoteEndpoint).sendObject(any(KafkaMessage.class));
-
- netconfMessageListener.onMessage(KAFKA_RECORD);
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/initialConfig.xml b/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/initialConfig.xml
deleted file mode 100644
index f28a1a0a8..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/initialConfig.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config2 xmlns="http://onap.org/newyangmodel">
- <item1>100</item1>
-</config2>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/invalidXmlFile.xml b/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/invalidXmlFile.xml
deleted file mode 100644
index 3debd8c26..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/invalidXmlFile.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>100</itemValue1>
-<config>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/newYangModel.yang b/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/newYangModel.yang
deleted file mode 100644
index bbe66c3ae..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/newYangModel.yang
+++ /dev/null
@@ -1,8 +0,0 @@
-module newyangmodel {
- namespace "http://onap.org/newyangmodel";
- prefix config2;
- container config2 {
- config true;
- leaf item1 {type uint32;}
- }
-}
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfig.xml b/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfig.xml
deleted file mode 100644
index 628a710fd..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfig.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>100</itemValue1>
- <itemValue2>200</itemValue2>
-</config>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfigForCmHistory.xml b/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfigForCmHistory.xml
deleted file mode 100644
index 5bc0e4285..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/src/test/resources/updatedConfigForCmHistory.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<config xmlns="http://onap.org/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>500</itemValue1>
- <itemValue2>1000</itemValue2>
-</config>
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key b/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key
deleted file mode 100644
index dbf8d7635..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key
+++ /dev/null
@@ -1,49 +0,0 @@
------BEGIN OPENSSH PRIVATE KEY-----
-b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFwAAAAdzc2gtcn
-NhAAAAAwEAAQAAAgEA0Eve1v58bkLc4/6FvcBYWO8q6rOTJok7YyOHnc6QBJFuChSTbAaV
-nn+E8nPgyqN0/hH740+qjtWpaJdKh0+hJMw0vUeX6SQ1OpRgyXFbDSbFrjsBjB7N2eusrR
-M5FweDxVW/mfR7AHemrLulbNYHt/HmY+Hee09D/awICpuViLKLOsPp8ifmbHG1mXStLNSg
-OJgBvsrMvbsKYgJq0HdWPyfkzFf4SVdjKxeegQbkSogWGqRG7zkfu8bsYooUpIFRFMiJvW
-dr9tlegESAu1fmUx2Wz7EtN4Qq7xqHfAbT+Ruzr4rtCRRCsdEou49kpTSM75FaKlXmchRV
-tBQKndFhcPQCO+m9OkQ4+45VSeLL236MTvAFGnqfLks97TO7VLGinhtMvJA1gZI93bwVBS
-P1uaVuiUdzT8OgUq6xsLx2qg2aoau5Ivj65Hdj8tlIjeFHeGgUd1GZa434X3p8WCDFVrQK
-GpReaDTFttVFw7F2+biFmn68TO0bb4GE84r8ouaYVSGzWG1kEy8UtriXnx6GPOzncp8HKT
-YFn2etzs5EE9Ae64+1mIJTUEnWlpxwrjCqCIreibMz4/7AkSWrwc0YeAUERso+ESQkCU8R
-zpcYSRz50UTCaerS8K5cPE9N2XB3WYbsziNTpR568onQIkL+ZlTIbNNYBgdVNDLlQeabKS
-kAAAdILF3O/Sxdzv0AAAAHc3NoLXJzYQAAAgEA0Eve1v58bkLc4/6FvcBYWO8q6rOTJok7
-YyOHnc6QBJFuChSTbAaVnn+E8nPgyqN0/hH740+qjtWpaJdKh0+hJMw0vUeX6SQ1OpRgyX
-FbDSbFrjsBjB7N2eusrRM5FweDxVW/mfR7AHemrLulbNYHt/HmY+Hee09D/awICpuViLKL
-OsPp8ifmbHG1mXStLNSgOJgBvsrMvbsKYgJq0HdWPyfkzFf4SVdjKxeegQbkSogWGqRG7z
-kfu8bsYooUpIFRFMiJvWdr9tlegESAu1fmUx2Wz7EtN4Qq7xqHfAbT+Ruzr4rtCRRCsdEo
-u49kpTSM75FaKlXmchRVtBQKndFhcPQCO+m9OkQ4+45VSeLL236MTvAFGnqfLks97TO7VL
-GinhtMvJA1gZI93bwVBSP1uaVuiUdzT8OgUq6xsLx2qg2aoau5Ivj65Hdj8tlIjeFHeGgU
-d1GZa434X3p8WCDFVrQKGpReaDTFttVFw7F2+biFmn68TO0bb4GE84r8ouaYVSGzWG1kEy
-8UtriXnx6GPOzncp8HKTYFn2etzs5EE9Ae64+1mIJTUEnWlpxwrjCqCIreibMz4/7AkSWr
-wc0YeAUERso+ESQkCU8RzpcYSRz50UTCaerS8K5cPE9N2XB3WYbsziNTpR568onQIkL+Zl
-TIbNNYBgdVNDLlQeabKSkAAAADAQABAAACAQDFUyq+14T/W34wytzd/opzbddlUksTlavZ
-5j3CZH4Qpcjt6cIi8zXoWfuTR+1ramAZlOXf2IfGGmkLeU+UUf5hgsZvjZQ+vBtk7E2oaC
-eOlO1ued2kZUYzrMz/hRdvVqIhXnNNoMqpjbArMPSs3zGes53Df6UpgdTySnevvOZzAlld
-iV1mFyB2GV6lCmBH+QHzuyTkHvDIyJk0cf/Ij1T4LY3Ve3zt1chPeWeh6ep5JORzxq6gT6
-hdVjx3uUGG+i7aloPOF1yzFAcvUjX1xHagxIYrKTihwCaALsys1TcYZYLayKx3DmeEVpXU
-4SnCS7878KHPO2M9LUBngRjxmvpHtnaIyp4LugY48y+KtywjR39hOsKW3QawVp6CtTceNE
-QMlosaVIQuMJkeW14poYBclva0B/lCn3r9/3OrCI9qZPdD1RrCQqUyom56EU4kwRddSwHi
-LDj4xKoyzH022lfaTt+PwbXtVJGAOVTS86ZovJaIJwyKCE1T0p66qqIM/mo1GVMTfTKwUt
-A9v0AwTYFXmZ/9HJjhMNhCNclPilZHzuPI9VqiZ3tkKhS6kxZQA2cB1VdcFPIEeVOpZcJd
-yPzGfFuKYOh9CLQMxfMC46MeT1XoQi5bMBygy8ZajV9VM2xUMKft1IqyN65nA7d+covoxL
-PLjC6n3hUJJtSMTAohAQAAAQBf4lenniOgFycb1REtnUhyaCsopBmkuxlCOknkmFiMNW+B
-v/aeFUp03WluKGOVureJoooU3Z32Om2+YTxTOnCRL5Sn4gL7cAp7JfDSZpZPqynUAKFvKq
-QVbYyiEmFkUDFWes9Q0r0LAx2rvPrDIGhqx2ZgrzINlhrhxaQU2+fGbNRdI86PcWSYtc5u
-Oucd6nJM7eI3QL7/pVNlK3GMJ68eviKmhxP5HHnanNes2ORa15S5rRSOAZ5653pA1J1KxB
-J5vwwMvIXbEnBn8NilqWK50DHJMFXEoLaKlb0OoYMKbiLt4CjcCCIUUT1kAu/SFUpFdYI1
-DNXfrieu3ZTEwnU9AAABAQDrf9b8VnARNimzLqI15hp0Z5B8Sxf8Xw85KRRxg7cu6TJLoF
-K6VRK1jOzrKCrIzvBwuS2Er06ogEE/3N8eXC2ipzNOtDh7CQqoAq8IKNUt+2cvThNzfOFe
-BZ6lP9pQ60RGEPoeQyhlxHOUbV80K/ksiFqnAixOmOV7Uc7KZ+8clFvhOCm76vo80GaYYk
-NQtvMa1qxIsyUrOdhmIxF9dYN/sQMUr909o80kN69L7d+D1hG6WNskEJphrHSkPWTell5g
-4ekFna1+MjNQoCWhp6KPDKK9Y8AMrqWU/bFYw9CYwXo67p486qt487ZN+0cNItmSLMR3Ke
-MWmCmq37+v3viJAAABAQDibc4fU/xePaCjQa4VlAwk9ISd8Ufz/LcJWDiseKSzQnIdCaCG
-gO/SWTUuAzGEdrNnfJRcEUtrJ0UJAo2ntYZ3AlJLQvFF9KII3yuUn/RHMrkwslAIoqLhLO
-QX38nJeWR+hEWSFbpWP5N5biLRi2Qnwtv55hYgNqTLHRURzvin5/YeuwigBax1SwtN+V2D
-JSDhMiaFV85ZQggSoIjLIsunLK5XIDzTC82gJ9aaazvKBXwkf4Yfv5t1BgPATzqwjmXztQ
-T8WbTeqEO95cIu0zhfKIGo7Wvx7S9NPrNjyNO+JK9/qqdYGhErPiZPKOcHHywF77Yyv2cm
-gOrMYVCubUOhAAAAD21hcmNpbkBtYW5kaW5nbwECAw==
------END OPENSSH PRIVATE KEY-----
diff --git a/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key.pub b/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key.pub
deleted file mode 100644
index 8f739114b..000000000
--- a/test/mocks/pnfsimulator/netconfsimulator/ssh/ssh_host_rsa_key.pub
+++ /dev/null
@@ -1 +0,0 @@
-ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQS97W/nxuQtzj/oW9wFhY7yrqs5MmiTtjI4edzpAEkW4KFJNsBpWef4Tyc+DKo3T+EfvjT6qO1alol0qHT6EkzDS9R5fpJDU6lGDJcVsNJsWuOwGMHs3Z66ytEzkXB4PFVb+Z9HsAd6asu6Vs1ge38eZj4d57T0P9rAgKm5WIsos6w+nyJ+ZscbWZdK0s1KA4mAG+ysy9uwpiAmrQd1Y/J+TMV/hJV2MrF56BBuRKiBYapEbvOR+7xuxiihSkgVEUyIm9Z2v22V6ARIC7V+ZTHZbPsS03hCrvGod8BtP5G7Oviu0JFEKx0Si7j2SlNIzvkVoqVeZyFFW0FAqd0WFw9AI76b06RDj7jlVJ4svbfoxO8AUaep8uSz3tM7tUsaKeG0y8kDWBkj3dvBUFI/W5pW6JR3NPw6BSrrGwvHaqDZqhq7ki+Prkd2Py2UiN4Ud4aBR3UZlrjfhfenxYIMVWtAoalF5oNMW21UXDsXb5uIWafrxM7RtvgYTzivyi5phVIbNYbWQTLxS2uJefHoY87OdynwcpNgWfZ63OzkQT0B7rj7WYglNQSdaWnHCuMKoIit6JszPj/sCRJavBzRh4BQRGyj4RJCQJTxHOlxhJHPnRRMJp6tLwrlw8T03ZcHdZhuzOI1OlHnryidAiQv5mVMhs01gGB1U0MuVB5pspKQ== marcin@mandingo
diff --git a/test/mocks/pnfsimulator/pnfsimulator/.gitignore b/test/mocks/pnfsimulator/pnfsimulator/.gitignore
deleted file mode 100644
index 3fa204a3b..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-.idea
-target
-*.iml
-logs
diff --git a/test/mocks/pnfsimulator/pnfsimulator/README.md b/test/mocks/pnfsimulator/pnfsimulator/README.md
deleted file mode 100644
index fadb2d4a3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/README.md
+++ /dev/null
@@ -1,299 +0,0 @@
-# PNF Simulator
-Simulator that generates VES events related to PNF PNP integration.
-
-## Usage of simulator
-### Setting up
-Preferred way to start simulator is to use `docker-compose up -d` command.
-All required docker images will be downloaded from ONAP Nexus, however there is possibility to build those
-images locally. It can be achieve by invoking `mvn clean package docker:build` from top directory.
-
-### API
-Simulator provides REST endpoints which can be used to trigger sending events to VES.
-
-*Periodic event sending*
-To trigger sending use following endpoint *http://<simulator_ip>:5000/simulator/start*.
-Supported method: *POST*
-Headers:
- - Content-Type - application/json
-Parameters:
-
- simulatorParams:
- repeatCount - determines how many events will be sent
- repeatInterval - time (in seconds) between events
- vesServerUrl - valid path to VES Collector
- templateName - name of template file (check *Templates* section)
- patch - part of event which will be merged into template
-
-
-Sample Request:
-
- {
- "simulatorParams": {
- "repeatCount": 5,
- "repeatInterval": 2,
- "vesServerUrl": "http://VES-HOST:8080/eventListener/v7"
- },
- "templateName": "validExampleMeasurementEvent.json",
- "patch": {
- "event": {
- "commonEventHeader": {
- "eventId": "PATCHED_eventId",
- "sourceName": "PATCHED_sourceName",
- "version": 3.0
- }
- }
- }
- }
-
-*One-time event sending*
-Enables direct, immediate event sending without need to have template deployed on backend.
-Keywords are supported,thus once passed, will also be substituted with proper strings.
-Passed event body must be valid and complete event according to VES Collector interface.
-To trigger sending use following endpoint *http://<simulator_ip>:5000/simulator/event*.
-
-Supported method: *POST*
-Headers:
- - Content-Type - application/json
-Parameters:
-
- vesServerUrl - valid URL to VES Collector event listener
- event - body of event to be sent directly to VES Collector (it can contain keyword expressions)
-
-
-Sample Request:
-
- {
- "vesServerUrl": "http://VES-HOST:8080/eventListener/v7",
- "event": {
- "commonEventHeader": {
- "eventId": "#RandomString(20)",
- "sourceName": "PATCHED_sourceName",
- "version": 3.0
- }
- }
- }
-
-### Changing simulator configuration
-Utility of default configuration has been introduced so as to facilitate sending requests. so far only vesServerUrl states default simulator configuration.
-On simulator startup, vesServerUrl is initialized with default value, but must be replaced with correct VES server url by user.
-Once vesServerUrl is properly set on simulator, this parameter does not need to be incorporated into every trigger event request.
-If user does not provide vesServerUrl in trigger request, default value will be used.
-If use does provide vesServerUrl in trigger request, then passed value will be used instead of default one (default value will not be overwritten by provided one).
-
-It is possible to get and update configuration (current target vesServerUrl) using offered REST API - */simulator/config* endpoint is exposed for that.
-To get current configuration *GET* method must be used.
-To update vesServerUrl *PUT* method is used, example request:
-
- {
- "vesServerUrl": "http://10.154.164.117:8080/eventListener/v7"
- }
-
-Note: passed vesServerUrl must be wellformed URL.
-
-
-### Running simulator
-The recommended way is to checkout PNF Simulator project from ONAP Git repository and use *simulator*.sh script.
-If you copy *simulator.sh* script to another location, keep in mind to copy also *docker-compose.yml* and directories: *config and templates*.
-In order to run simulator, invoke ./simulator.sh -e build to build required images and then invoke ./simulator.sh -e start
-Script downloads if necessary needed Docker images and runs instances of these images.
-After simulator start it is advisable to setup default value for target vesServerUrl.
-
-Example request:
-
- PUT to http://<simulator_ip>:<simulator_port>/simulator/config
-
- {
- "vesServerUrl": "PUT HERE VALID URL TO YOUR VES EVENT LISTENER"
- }
-
-### Templates
-Template is a draft event. Merging event with patch will result in valid VES event. Template itself should be a correct VES event as well as valid json object.
-In order to apply custom template, just copy it to ./templates directory.
-*notification.json* and *registration.json* are available by default in *./templates* directory.
-
-#### Template management
-The simulator provides means for managing templates. Supported actions: adding, editing (overriding) and deleting are available via HTTP endpoint */template*
-
-```GET /template/list```
-Lists all templates known to the simulator.
-
-```GET /template/get-content/{name}```
-Gets template content based on *name* path variable.
-
-```POST /template/upload?override=true```
-Saves template content under *name* path variable. The non-mandatory parameter *override* allows overwriting an existing template.
-
-Sample payload:
-```
-{
- "name": "someTemplate",
- "template": {
- "commonEventHeader": {
- "domain": "notification",
- "eventName": "vFirewallBroadcastPackets"
- },
- "notificationFields": {
- "arrayOfNamedHashMap": [{
- "name": "A20161221.1031-1041.bin.gz",
- "hashMap": {
- "fileformatType": "org.3GPP.32.435#measCollec"
- }
- }]
- }
- }
-}
-```
-
-### Searching for key-value conditions in stored templates
-Simulator allows to search through stored templates and retrieve names of those that satisfy given criteria passed in form of key-value pairs (See examples below).
-Following data types are supported in search as values:
--integer
--string
--double
--boolean
-Searching for null values as well as incorporating regex expression with intention to find a match is not supported.
-Search expression must be valid JSON, thus no duplicate keys are allowed - user could specify the same parameter multiple times, but only last occurrence will be applied to query.
-Example search expression:
-
-{"domain": "notification", "sequence": 1, "startEpochMicrosec": 1531616794, "sampleDouble": 2.5}
-
-will find all templates that contain all of passed key-value entries. There is an AND condition beetwen given criteria - all of them must be satisfied to qualify template as matching item.
- Keys of search expressions are searched in case insensitive way as well as string values.
-Where it comes to values of numerical and boolean type exact match is expected.
-
-API usage:
-
-```POST /template/search```
-Produces query that returns templates that contain searched criteria
-
-Sample payload:
-```
-{
- "searchExpr": {
- "domain": "notification",
- "sequence": 1,
- "startEpochMicrosec": 1531616794,
- "sampleDouble": 2.5
- }
-}
-```
-Sample response:
-```
-[notification.json]
-```
-
-
-Note: Manually deployed templates, or actually existing ones, but modified inside the templates catalog '/app/templates', will be automatically synchronized with schemas stored inside the database. That means that a user can dynamically change the template content using vi editor at simulator container, as well as use any editor at any machine and then push the changes to the template folder. All the changes will be processed 'on the fly' and accessible via the rest API.
-
-### Periodic events
-Simulator has ability to send event periodically. Rest API support parameters:
-* repeatCount - count of times that event will be sent to VES
-* repeatInterval - interval (in second) between two events.
-(Checkout example to see how to use them)
-
-### Patching
-User is able to provide patch in request, which will be merged into template.
-
-Warning: Patch should be a valid json object (no json primitives nor json arrays are allowed as a full body of patch).
-
-This mechanism allows to override part of template.
-If in "patch" section there are additional parameters (absent in template), those parameters with values will be added to event.
-Patching mechanism supports also keywords that enables automatic value generation of appropriate type
-
-### Keyword support
-Simulator supports corresponding keywords:
-- \#RandomInteger(start,end) - substitutes keyword with random positive integer within given range (range borders inclusive)
-- \#RandomPrimitiveInteger(start,end) - the same as #RandomInteger(start,end), but returns long as result
-- \#RandomInteger - substitutes keyword with random positive integer
-- \#RandomString(length) - substitutes keyword with random ASCII string with specified length
-- \#RandomString - substitutes keyword with random ASCII string with length of 20 characters
-- \#Timestamp - substitutes keyword with current timestamp in epoch (calculated just before sending event)
-- \#TimestampPrimitive - the same as \#Timestamp, but returns long as result
-- \#Increment - substitutes keyword with positive integer starting from 1 - for each consecutive event, value of increment property is incremented by 1
-
-Additional hints and restrictions:
-All keywords without 'Primitive' in name return string as result. To specify keyword with 2 arguments e.g. #RandomInteger(start,end) no whitespaces between arguments are allowed.
-Maximal value of arguments for RandomInteger is limited to the java integer range. Minimal is always 0. (Negative values are prohibited and wont be treated as a correct parts of keyword).
-RandomInteger with parameters will automatically find minimal and maximal value form the given attributes so no particular order of those is expected.
-
-How does it work?
-When user do not want to fill in parameter values that are not relevant from user perspective but are mandatory by end system, then keyword feature should be used.
-In template, keyword strings are substituted in runtime with appropriate values autogenerated by simulator.
-Example can be shown below:
-
-Example template with keywords:
-
- {
- "event": {
- "commonEventHeader": {
- "eventId": "123#RandomInteger(8,8)",
- "eventType": "pnfRegistration",
- "startEpochMicrosec": "#Timestamp",
- "vesEventListenerVersion": "7.0.1",
- "lastEpochMicrosec": 1539239592379
- },
- "pnfRegistrationFields": {
- "pnfRegistrationFieldsVersion":"2.0",
- "serialNumber": "#RandomString(7)",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4"
- }
- }
- }
-
-Corresponding result of keyword substitution (event that will be sent):
-
- {
- "event": {
- "commonEventHeader": {
- "eventId": "1238",
- "eventType": "pnfRegistration",
- "startEpochMicrosec": "154046405117",
- "vesEventListenerVersion": "7.0.1",
- "lastEpochMicrosec": 1539239592379
- },
- "pnfRegistrationFields": {
- "pnfRegistrationFieldsVersion":"2.0",
- "serialNumber": "6061ZW3",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4"
- }
- }
- }
-
-
-### Logging
-Every start of simulator will generate new logs that can be found in docker pnf-simualtor container under path:
-/var/log/ONAP/pnfsimulator/pnfsimulator_output.log
-
-### Swagger
-Detailed view of simulator REST API is available via Swagger UI
-Swagger UI is available on *http://<simulator_ip>:5000/swagger-ui.html*
-
-### History
-User is able to view events history.
-In order to browse history, go to *http://<simulator_ip>:8081/db/pnf_simulator/eventData*
-
-### TLS Support
-Simulator is able to communicate with VES using HTTPS protocol.
-CA certificates are incorporated into simulator docker image, thus no additional actions are required from user.
-
-Certificates can be found in docker container under path: */usr/local/share/ca-certificates/*
-
-Simulator works with VES that uses both self-signed certificate (already present in keystore) and VES integrated to AAF.
-
-
-## Developers Guide
-
-### Integration tests
-Integration tests are located in folder 'integration'. Tests are using docker-compose from root folder.
-This docker-compose has pnfsimulator image set on nexus3.onap.org:10003/onap/pnf-simulator:5.0.0-SNAPSHOT.
-To test your local changes before running integration tests please build project using:
-
- 'mvn clean package docker:build'
-
-then go to 'integration' folder and run:
-
- 'mvn test'
diff --git a/test/mocks/pnfsimulator/pnfsimulator/config/config.json b/test/mocks/pnfsimulator/pnfsimulator/config/config.json
deleted file mode 100644
index 819f7d751..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/config/config.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "simulatorParams": {
- "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7",
- "repeatInterval": 10,
- "repeatCount": 2
- },
- "templateName": "notification.json",
- "patch":{}
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/db/pnf_simulator.js b/test/mocks/pnfsimulator/pnfsimulator/db/pnf_simulator.js
deleted file mode 100644
index f5a03c3d0..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/db/pnf_simulator.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-const res = [
- db.createUser({ user: 'pnf_simulator_user', pwd: 'zXcVbN123!', roles: ['readWrite', 'dbAdmin'] }),
- db.simulatorConfig.insert({"vesServerUrl": "http://xdcae-ves-collector.onap:8080/eventListener/v7"}),
- db.createCollection("template"),
- db.createView("flatTemplatesView", "template", [{"$project":{"keyValues":{"$objectToArray": "$$ROOT.flatContent"}}}])
-];
-
-printjson(res);
diff --git a/test/mocks/pnfsimulator/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml b/test/mocks/pnfsimulator/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml
deleted file mode 100644
index 4bf6758d9..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml
+++ /dev/null
@@ -1,118 +0,0 @@
-description: Heat template that deploys PnP PNF simulator
-heat_template_version: '2013-05-23'
-parameters:
- flavor_name: {description: Type of instance (flavor) to be used, label: Flavor,
- type: string}
- image_name: {description: Image to be used for compute instance, label: Image name
- or ID, type: string}
- key_name: {description: Public/Private key pair name, label: Key pair name, type: string}
- public_net_id: {description: Public network that enables remote connection to VNF,
- label: Public network name or ID, type: string}
- private_net_id: {type: string, description: Private network id, label: Private network name or ID}
- private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID}
- proxy: {type: string, description: Proxy, label: Proxy, default: ""}
-resources:
- PNF_PnP_simualtor:
- type: OS::Nova::Server
- properties:
- key_name: { get_param: key_name }
- image: { get_param: image_name }
- flavor: { get_param: flavor_name }
- networks:
- - port: { get_resource: PNF_PnP_simualtor_port0 }
- user_data_format: RAW
- user_data:
- str_replace:
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_COMPOSE_VERSION=1.22.0
- }
-
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "arthur\narthur" | passwd root
- }
-
- update_os () {
- dnf -y install fedora-upgrade
- }
-
- docker_remove () {
- dnf -y remove docker \
- docker-client \
- docker-client-latest \
- docker-common \
- docker-latest \
- docker-latest-logrotate \
- docker-logrotate \
- docker-selinux \
- docker-engine-selinux \
- docker-engine
- }
-
- docker_install_and_configure () {
- dnf -y install dnf-plugins-core
- dnf config-manager \
- --add-repo \
- https://download.docker.com/linux/fedora/docker-ce.repo
- dnf -y install docker-ce
- systemctl start docker
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/override.conf<< EOF
- [Service]
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- }
- docker_compose_install () {
- curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- printf "{\n "simulatorParams": {\n "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7",\n "testDuration": "10",\n "messageInterval": "1"\n },\n "commonEventHeaderParams": {\n "eventName": "pnfRegistration_Nokia_5gDu",\n "nfNamingCode": "gNB",\n "nfcNamingCode": "oam",\n "sourceName": "NOK6061ZW3",\n "sourceId": "val13",\n "reportingEntityName": "NOK6061ZW3"\n },\n "pnfRegistrationParams": {\n "serialNumber": "6061ZW3",\n "vendorName": "Nokia",\n "oamV4IpAddress": "val3",\n "oamV6IpAddress": "val4",\n "unitFamily": "BBU",\n "modelNumber": "val6",\n "softwareVersion": "val7",\n "unitType": "val8"\n }\n}" > integration/test/mocks/pnfsimulator/config/config.json
- }
-
- start_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator
- ./simulator.sh start
- }
-
- set_versions
- enable_root_ssh
- update_os
- docker_remove
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_simulator
- params:
- $proxy: { get_param: proxy }
- PNF_PnP_simualtor_port0:
- type: OS::Neutron::Port
- properties:
- network_id: { get_param: private_net_id }
- security_groups:
- - default
- fixed_ips:
- - subnet_id: { get_param: private_subnet_id }
- PNF_PnP_simualtor_public:
- type: OS::Neutron::FloatingIP
- properties:
- floating_network_id: { get_param: public_net_id }
- port_id: { get_resource: PNF_PnP_simualtor_port0 }
-outputs:
- PNF_PnP_simualtor_private_ip:
- description: IP address of PNF_PnP_simualtor in private network
- value: { get_attr: [ PNF_PnP_simualtor, first_address ] }
- PNF_PnP_simualtor_public_ip:
- description: Floating IP address of PNF_PnP_simualtor in public network
- value: { get_attr: [ PNF_PnP_simualtor_public, floating_ip_address ] } \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/docker-compose.yml b/test/mocks/pnfsimulator/pnfsimulator/docker-compose.yml
deleted file mode 100644
index e69d371ed..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/docker-compose.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-version: '3'
-
-services:
-
- mongo:
- image: mongo
- restart: always
- environment:
- MONGO_INITDB_ROOT_USERNAME: root
- MONGO_INITDB_ROOT_PASSWORD: zXcVbN123!
- MONGO_INITDB_DATABASE: pnf_simulator
- volumes:
- - ./db:/docker-entrypoint-initdb.d
- ports:
- - "27017:27017"
-
- mongo-express:
- image: mongo-express
- restart: always
- ports:
- - 8081:8081
- environment:
- ME_CONFIG_MONGODB_ADMINUSERNAME: root
- ME_CONFIG_MONGODB_ADMINPASSWORD: zXcVbN123!
-
- pnf-simulator:
- image: nexus3.onap.org:10003/onap/pnfsimulator
- ports:
- - "5000:5000"
- volumes:
- - ./logs:/var/log
- - ./templates:/app/templates
- restart: on-failure
- depends_on:
- - mongo
- - mongo-express
diff --git a/test/mocks/pnfsimulator/pnfsimulator/docker/Dockerfile b/test/mocks/pnfsimulator/pnfsimulator/docker/Dockerfile
deleted file mode 100644
index 814cf64a4..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/docker/Dockerfile
+++ /dev/null
@@ -1,8 +0,0 @@
-FROM openjdk:8-jre-alpine
-ADD libs /app/libs
-ADD pnfsimulator-5.0.0-SNAPSHOT.jar /app/pnf-simulator.jar
-CMD apk update
-CMD apk add ca-certificates
-ADD certificates /usr/local/share/ca-certificates/
-RUN update-ca-certificates
-CMD java -cp /app/libs/*:/app/pnf-simulator.jar org.onap.pnfsimulator.Main
diff --git a/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/AAF_RootCA.crt b/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/AAF_RootCA.crt
deleted file mode 100644
index e9a50d7ea..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/AAF_RootCA.crt
+++ /dev/null
@@ -1,31 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIFPjCCAyagAwIBAgIJAJ6u7cCnzrWdMA0GCSqGSIb3DQEBCwUAMCwxDjAMBgNV
-BAsMBU9TQUFGMQ0wCwYDVQQKDARPTkFQMQswCQYDVQQGEwJVUzAeFw0xODA0MDUx
-NDE1MjhaFw0zODAzMzExNDE1MjhaMCwxDjAMBgNVBAsMBU9TQUFGMQ0wCwYDVQQK
-DARPTkFQMQswCQYDVQQGEwJVUzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC
-ggIBAMA5pkgRs7NhGG4ew5JouhyYakgYUyFaG121+/h8qbSdt0hVQv56+EA41Yq7
-XGie7RYDQK9NmAFF3gruE+6X7wvJiChp+Cyd7sFMnb65uWhxEdxWTM2BJFrgfzUn
-H8ZCxgaCo3XH4PzlKRy2LQQJEJECwl/RZmRCXijMt5e9h8XoZY/fKkKcZZUsWNCM
-pTo266wjvA9MXLmdgReRj0+vrCjrNqy+htwJDztoiHWiYPqT6o8EvGcgjNqjlZx7
-NUNf8MfLDByqKF6+wRbHv1GKjn3/Vijd45Fv8riyRYROiFanvbV6jIfBkv8PZbXg
-2VDWsYsgp8NAvMxK+iV8cO+Ck3lBI2GOPZbCEqpPVTYbLUz6sczAlCXwQoPzDIZY
-wYa3eR/gYLY1gP2iEVHORag3bLPap9ZX5E8DZkzTNTjovvLk8KaCmfcaUMJsBtDd
-ApcUitz10cnRyZc1sX3gE1f3DpzQM6t9C5sOVyRhDcSrKqqwb9m0Ss04XAS9FsqM
-P3UWYQyqDXSxlUAYaX892u8mV1hxnt2gjb22RloXMM6TovM3sSrJS0wH+l1nznd6
-aFXftS/G4ZVIVZ/LfT1is4StoyPWZCwwwly1z8qJQ/zhip5NgZTxQw4mi7ww35DY
-PdAQOCoajfSvFjqslQ/cPRi/MRCu079heVb5fQnnzVtnpFQRAgMBAAGjYzBhMB0G
-A1UdDgQWBBRTVTPyS+vQUbHBeJrBKDF77+rtSTAfBgNVHSMEGDAWgBRTVTPyS+vQ
-UbHBeJrBKDF77+rtSTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAN
-BgkqhkiG9w0BAQsFAAOCAgEAPx/IaK94n02wPxpnYTy+LVLIxwdq/kawNd6IbiMz
-L87zmNMDmHcGbfoRCj8OkhuggX9Lx1/CkhpXimuYsZOFQi5blr/u+v4mIbsgbmi9
-7j+cUHDP0zLycvSvxKHty51LwmaX9a4wkJl5zBU4O1sd/H9tWcEmwJ39ltKoBKBx
-c94Zc3iMm5ytRWGj+0rKzLDAXEWpoZ5bE5PLJauA6UDCxDLfs3FwhbS7uDggxYvf
-jySF5FCNET94oJ+m8s7VeHvoa8iPGKvXrIqdd7XDHnqJJlVKr7m9S0fMbyEB8ci2
-RtOXDt93ifY1uhoEtEykn4dqBSp8ezvNMnwoXdYPDvTd9uCAFeWFLVreBAWxd25h
-PsBTkZA5hpa/rA+mKv6Af4VBViYr8cz4dZCsFChuioVebe9ighrfjB//qKepFjPF
-CyjzKN1u0JKm/2x/ORqxkTONG8p3uDwoIOyimUcTtTMv42bfYD88RKakqSFXE9G+
-Z0LlaKABqfjK49o/tsAp+c5LoNlYllKhnetO3QAdraHwdmC36BhoghzR1jpX751A
-cZn2VH3Q4XKyp01cJNCJIrua+A+bx6zh3RyW6zIIkbRCbET+UD+4mr8WIcSE3mtR
-ZVlnhUDO4z9//WKMVzwS9Rh8/kuszrGFI1KQozXCHLrce3YP6RYZfOed79LXaRwX
-dYY=
------END CERTIFICATE-----
diff --git a/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/dcaelocal.crt b/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/dcaelocal.crt
deleted file mode 100644
index 1be5a6fb2..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/docker/certificates/dcaelocal.crt
+++ /dev/null
@@ -1,20 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDSDCCAjCgAwIBAgIEWAkZ5jANBgkqhkiG9w0BAQUFADBmMQswCQYDVQQGEwJV
-UzELMAkGA1UECBMCTkoxEzARBgNVBAcTCk1pZGRsZXRvd24xDTALBgNVBAoTBERD
-QUUxEjAQBgNVBAsTCU9QRU4tRENBRTESMBAGA1UEAxMJRENBRUxPQ0FMMB4XDTE2
-MTAyMDE5MjQyMloXDTE3MDExODE5MjQyMlowZjELMAkGA1UEBhMCVVMxCzAJBgNV
-BAgTAk5KMRMwEQYDVQQHEwpNaWRkbGV0b3duMQ0wCwYDVQQKEwREQ0FFMRIwEAYD
-VQQLEwlPUEVOLURDQUUxEjAQBgNVBAMTCURDQUVMT0NBTDCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBALoR0omXk1Xou3TdDydwhCF9V0ZSgQtMlMr8qYxe
-wLg/UGZbTea5HHqBmjWTVEKFGea7V7pnEiGWCPftIy/4D8ZSDaEQQ0WYCf6jNxeI
-T6PSiM+db/TwPToqPNMQwoftpsAEkJEJFB9rgUXoDD9FY5kUQW+fYOLyUeOb/rDE
-cRnSZX2qp1M/jAm1puIeNQcqiPExOMHqnN2bIgAZoHaFucdyOQUxuhIAgxoqe8Zt
-s451hm94g42F92I8KLy4EyOhXmSMjaLmgOqdjOPyBubOde4R39+6KAyQNGAnm5I4
-xq2J80R7qU0+4nnfjs2ScfAB/HUsWoVaGr9Ii4e34CW8JG8CAwEAATANBgkqhkiG
-9w0BAQUFAAOCAQEARqQmWg1gtwbgPbamWy0vdNWLQZm78y1ETR0ro9cazD25sD5/
-anDJ2RA97pGQFgncAI+Fzg4YygWBIext0OE92wQeKaHkxSujAe09F0wPcwc00Vyq
-NtBPcP17n18FkpFW1hkurlWiHOpGDRW2MmwhLj4lSFJmxMBAbzlfKrTBgj116UT+
-d4AGyuovS4LkmBWYFN//yoddGyrO26ar1M9ORdbDkjzOK4DkioTx3bwbdKPW7V4a
-+Ns1KDFZ7FpjcWPZc6uDV4lBIhNUZuQZLy5hWBUeNhh4i2lL6xMrPzRa5kV1M7og
-ipv/ReVCixJqsGhx+8I4OPUbPoUYzRF+JmYUzA==
------END CERTIFICATE----- \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/pom.xml b/test/mocks/pnfsimulator/pnfsimulator/integration/pom.xml
deleted file mode 100644
index 21c2f3c49..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/pom.xml
+++ /dev/null
@@ -1,130 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.simulator</groupId>
- <artifactId>simulator-parent</artifactId>
- <version>5.0.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>pnf-simulator-integration</artifactId>
- <repositories>
- <repository>
- <id>dl.bintray.com</id>
- <url>https://dl.bintray.com/palantir/releases</url>
- </repository>
- </repositories>
-
- <properties>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- </properties>
-
- <dependencies>
-
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>3.9.1</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter</artifactId>
- <version>2.0.2.RELEASE</version>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- <version>2.0.2.RELEASE</version>
- </dependency>
-
- <dependency>
- <groupId>io.rest-assured</groupId>
- <artifactId>rest-assured</artifactId>
- <version>3.2.0</version>
- </dependency>
- <dependency>
- <groupId>org.mongodb</groupId>
- <artifactId>mongo-java-driver</artifactId>
- <version>3.9.1</version>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-test</artifactId>
- <version>5.0.4.RELEASE</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <version>2.0.1.RELEASE</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.8.2</version>
- </dependency>
- <dependency>
- <groupId>com.palantir.docker.compose</groupId>
- <artifactId>docker-compose-rule-junit4</artifactId>
- <version>0.34.0</version>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <executions>
- <execution>
- <id>onap-license</id>
- <phase>none</phase>
- </execution>
- <execution>
- <id>onap-java-style</id>
- <phase>none</phase>
- </execution>
- </executions>
- <artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.17</version>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.12.1</version>
- <configuration>
- <includes>
- <!--Execute only suites from the test folder-->
- <include>**/*Suite.java</include>
- </includes>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorController.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorController.java
deleted file mode 100644
index 70e0c609c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorController.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-@RequestMapping("ves-simulator")
-@RestController
-public class VesSimulatorController {
-
- private final VesSimulatorService vesSimulatorService;
- private final Gson gson;
-
- @Autowired
- public VesSimulatorController(VesSimulatorService vesSimulatorService, Gson gson) {
- this.vesSimulatorService = vesSimulatorService;
- this.gson = gson;
- }
-
- @PostMapping("eventListener/v5")
- String sendEventToDmaapV5(@RequestBody String body) {
- System.out.println("Received event" + body);
- JsonObject jsonObject = gson.fromJson(body, JsonObject.class);
- vesSimulatorService.sendEventToDmaapV5(jsonObject);
- return "MessageAccepted";
- }
-
- @PostMapping("eventListener/v7")
- String sendEventToDmaapV7(@RequestBody String body) {
- System.out.println("Received event" + body);
- JsonObject jsonObject = gson.fromJson(body, JsonObject.class);
- vesSimulatorService.sendEventToDmaapV7(jsonObject);
- return "MessageAccepted";
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorService.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorService.java
deleted file mode 100644
index 65e5d3e13..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/java/org/onap/pnfsimulator/integration/VesSimulatorService.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import com.google.gson.JsonObject;
-import org.springframework.stereotype.Service;
-
-@Service
-public class VesSimulatorService {
-
- void sendEventToDmaapV5(JsonObject jsonObject) {
- //JUST FOR TESTING PURPOSE
- }
-
- void sendEventToDmaapV7(JsonObject jsonObject) {
- //JUST FOR TESTING PURPOSE
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/application.properties b/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/application.properties
deleted file mode 100644
index 205ed95fb..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/application.properties
+++ /dev/null
@@ -1 +0,0 @@
-server.port=8000 \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/keystore b/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/keystore
deleted file mode 100644
index 26a16f756..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/main/resources/keystore
+++ /dev/null
Binary files differ
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/BasicAvailabilityTest.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/BasicAvailabilityTest.java
deleted file mode 100644
index 9f11a00a0..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/BasicAvailabilityTest.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import static io.restassured.RestAssured.given;
-import static io.restassured.RestAssured.when;
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-import static org.hamcrest.Matchers.equalTo;
-
-import com.google.gson.JsonObject;
-import java.io.IOException;
-import java.net.Inet4Address;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collections;
-import java.util.UUID;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-import org.mockito.internal.util.Timer;
-import org.mockito.internal.verification.VerificationOverTimeImpl;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
-import org.springframework.test.context.junit4.SpringRunner;
-
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = {Main.class, TestConfiguration.class}, webEnvironment = WebEnvironment.DEFINED_PORT)
-public class BasicAvailabilityTest {
-
- @Autowired
- VesSimulatorController vesSimulatorController;
-
- @Autowired
- VesSimulatorService vesSimulatorService;
-
- private final String ACTION_START = "start";
-
- private String currenVesSimulatorIp;
-
- @Before
- public void setUp() throws Exception {
- currenVesSimulatorIp = getCurrentIpAddress();
- }
-
- @After
- public void tearDown() {
- Mockito.reset(vesSimulatorService);
- }
-
- @Test
- public void simulatorShouldFailWhenTriggeredNonexistentTemplate(){
- //given
- String startUrl = prepareRequestUrl(ACTION_START);
- String body = "{\n"
- + "\"templateName\": \"any_nonexistent_template.json\",\n"
- + "\"patch\":{},\n"
- + "\"simulatorParams\": {\n"
- + "\"vesServerUrl\": \"https://" + currenVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"repeatInterval\": 1,\n"
- + "\"repeatCount\": 1\n"
- + "}\n"
- + "}";
-
- //when
- given()
- .contentType("application/json")
- .body(body)
- .when()
- .post(startUrl)
- .then()
- .statusCode(400)
- .body("message", equalTo("Cannot start simulator - template any_nonexistent_template.json not found."));
- }
-
- @Test
- public void whenTriggeredSimulatorShouldSendSingleEventToVes() {
- //given
- String startUrl = prepareRequestUrl(ACTION_START);
- String body = "{\n"
- + "\"templateName\": \"notification.json\",\n"
- + "\"patch\":{},\n"
- + "\"simulatorParams\": {\n"
- + "\"vesServerUrl\": \"https://" + currenVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"repeatInterval\": 1,\n"
- + "\"repeatCount\": 1\n"
- + "}\n"
- + "}";
- ArgumentCaptor<JsonObject> parameterCaptor = ArgumentCaptor.forClass(JsonObject.class);
-
- //when
- given()
- .contentType("application/json")
- .body(body)
- .when()
- .post(startUrl)
- .then()
- .statusCode(200)
- .body("message", equalTo("Request started"));
-
- Mockito.verify(vesSimulatorService,
- Mockito.timeout(3000))
- .sendEventToDmaapV5(parameterCaptor.capture());
-
- assertThat(parameterCaptor.getValue()
- .getAsJsonObject("event")
- .getAsJsonObject("commonEventHeader")
- .get("domain").getAsString()).isEqualTo("notification");
- }
-
- @Test
- public void simulatorShouldCorrectlyRespondOnCancellAllEvent() {
- //given
- String ACTION_CANCEL_ALL = "cancel";
- String cancelAllUrl = prepareRequestUrl(ACTION_CANCEL_ALL);
-
- //when
- when()
- .post(cancelAllUrl)
- .then()
- .statusCode(200)
- .body("message", equalTo("Event(s) was cancelled"));
-
- }
-
- @Test
- public void simulatorBeAbleToUseNewlyAddedTemplate() throws IOException {
- //given
- String templateBody = "{\"fake\":\"template\"}\n";
- String fileName = UUID.randomUUID() + ".json";
- String requestBody = "{\n"
- + "\"templateName\": \"" + fileName + "\",\n"
- + "\"patch\":{},\n"
- + "\"simulatorParams\": {\n"
- + "\"vesServerUrl\": \"https://" + currenVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"repeatInterval\": 1,\n"
- + "\"repeatCount\": 1\n"
- + "}\n"
- + "}";
- ArgumentCaptor<JsonObject> parameterCaptor = ArgumentCaptor.forClass(JsonObject.class);
-
- //when
- Path newFile = Files.createFile(Paths.get("..", "templates", fileName));
- Files.write(newFile, templateBody.getBytes());
-
- given()
- .contentType("application/json")
- .body(requestBody)
- .when()
- .post(prepareRequestUrl(ACTION_START));
-
- Files.delete(newFile);
-
- //then
- Mockito.verify(vesSimulatorService, Mockito.timeout(3000))
- .sendEventToDmaapV5(parameterCaptor.capture());
- assertThat(parameterCaptor.getValue()
- .get("fake").getAsString()).isEqualTo("template");
-
- }
-
- @Test
- public void whenTriggeredSimulatorShouldSendGivenAmountOfEventsToVes() {
- //given
- String startUrl = prepareRequestUrl(ACTION_START);
- String body = "{\n"
- + "\"templateName\": \"notification.json\",\n"
- + "\"patch\":{},\n"
- + "\"simulatorParams\": {\n"
- + "\"vesServerUrl\": \"https://" + currenVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"repeatInterval\": 1,\n"
- + "\"repeatCount\": 4\n"
- + "}\n"
- + "}";
- ArgumentCaptor<JsonObject> parameterCaptor = ArgumentCaptor.forClass(JsonObject.class);
-
- //when
- given()
- .contentType("application/json")
- .body(body)
- .when()
- .post(startUrl)
- .then()
- .statusCode(200)
- .body("message", equalTo("Request started"));
-
- VerificationOverTimeImpl verificator = new VerificationOverTimeImpl(100, Mockito.times(4), false, new Timer(6000));
- Mockito.verify(vesSimulatorService, verificator).sendEventToDmaapV5(parameterCaptor.capture());
-
- for (JsonObject value : parameterCaptor.getAllValues()) {
- assertThat(value
- .getAsJsonObject("event")
- .getAsJsonObject("commonEventHeader")
- .get("domain").getAsString()).isEqualTo("notification");
- }
- }
-
- private String prepareRequestUrl(String action) {
- return "http://0.0.0.0:5000/simulator/" + action;
- }
-
- private String getCurrentIpAddress() throws SocketException {
- return Collections.list(NetworkInterface.getNetworkInterfaces()).stream()
- .flatMap(i -> Collections.list(i.getInetAddresses()).stream())
- .filter(ip -> ip instanceof Inet4Address)
- .map(e -> (Inet4Address) e)
- .findFirst()
- .orElseThrow(RuntimeException::new)
- .getHostAddress();
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/OptionalTemplatesTest.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/OptionalTemplatesTest.java
deleted file mode 100644
index a5ffe4d47..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/OptionalTemplatesTest.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import static io.restassured.RestAssured.given;
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-import static org.hamcrest.Matchers.equalTo;
-
-import com.google.gson.JsonObject;
-import com.mongodb.MongoClient;
-import com.mongodb.MongoClientOptions;
-import com.mongodb.MongoCredential;
-import com.mongodb.ServerAddress;
-import com.mongodb.client.FindIterable;
-import com.mongodb.client.MongoCollection;
-import com.mongodb.client.MongoCursor;
-import com.mongodb.client.MongoDatabase;
-import java.time.Instant;
-import java.net.Inet4Address;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-import java.util.Collections;
-import org.assertj.core.api.Assertions;
-import org.bson.Document;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
-import org.springframework.test.context.junit4.SpringRunner;
-
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = {Main.class, TestConfiguration.class}, webEnvironment = WebEnvironment.DEFINED_PORT)
-public class OptionalTemplatesTest {
-
- private static final String PNF_SIMULATOR_DB = "pnf_simulator";
- private static final String COMMON_EVENT_HEADER = "commonEventHeader";
- private static final String PNF_SIMULATOR_DB_PSWD = "zXcVbN123!";
- private static final String PNF_SIMULATOR_DB_USER = "pnf_simulator_user";
- private static final String PATCHED = "patched";
- private static final String SINGLE_EVENT_URL = "http://0.0.0.0:5000/simulator/event";
-
- @Autowired
- VesSimulatorController vesSimulatorController;
-
- @Autowired
- private VesSimulatorService vesSimulatorService;
-
- private String currentVesSimulatorIp;
-
- @Before
- public void setUp() throws Exception {
- currentVesSimulatorIp = getCurrentIpAddress();
- }
-
- @After
- public void tearDown() {
- Mockito.reset(vesSimulatorService);
- }
-
- @Test
- public void whenTriggeredSimulatorWithoutTemplateShouldSendSingleEventToVes() {
- //given
- long currentTimestamp = Instant.now().getEpochSecond();
-
- String body = "{\n"
- + "\"vesServerUrl\": \"https://" + currentVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"event\": { \n"
- + "\"commonEventHeader\": {\n"
- + "\"eventId1\": \"#RandomString(20)\",\n"
- + "\"eventId2\": \"#RandomInteger(10,10)\",\n"
- + "\"eventId3\": \"#Increment\",\n"
- + "\"eventId4\": \"#RandomPrimitiveInteger(10,10)\",\n"
- + "\"eventId5\": \"#TimestampPrimitive\",\n"
- + "\"sourceName\": \"Single_sourceName\",\n"
- + "\"version\": 3"
- + "}\n"
- + "}\n"
- + "}";
- ArgumentCaptor<JsonObject> parameterCaptor = ArgumentCaptor.forClass(JsonObject.class);
-
- //when
- given()
- .contentType("application/json")
- .body(body)
- .when()
- .post(SINGLE_EVENT_URL)
- .then()
- .statusCode(202)
- .body("message", equalTo("One-time direct event sent successfully"));
-
- //then
- long afterExecution = Instant.now().getEpochSecond();
- Mockito.verify(vesSimulatorService,
- Mockito.timeout(3000))
- .sendEventToDmaapV5(parameterCaptor.capture());
-
- JsonObject value = parameterCaptor.getValue();
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("sourceName").getAsString()).isEqualTo("Single_sourceName");
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("eventId1").getAsString().length()).isEqualTo(20);
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("eventId2").getAsString()).isEqualTo("10");
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("eventId3").getAsString()).isEqualTo("1");
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("eventId4").getAsInt()).isEqualTo(10);
- assertThat(value
- .getAsJsonObject(COMMON_EVENT_HEADER)
- .get("eventId5").getAsLong()).isBetween(currentTimestamp, afterExecution);
- }
-
- @Test
- public void whenTriggeredSimulatorWithoutTemplateEventShouldBeVisibleInDB() throws UnknownHostException {
- //given
- String body = "{\n"
- + "\"vesServerUrl\": \"https://" + currentVesSimulatorIp + ":9443/ves-simulator/eventListener/v5\",\n"
- + "\"event\": { \n"
- + "\"commonEventHeader\": {\n"
- + "\"sourceName\": \"HistoricalEvent\",\n"
- + "\"version\": 3"
- + "}\n"
- + "}\n"
- + "}";
- ArgumentCaptor<JsonObject> parameterCaptor = ArgumentCaptor.forClass(JsonObject.class);
-
- //when
- given()
- .contentType("application/json")
- .body(body)
- .when()
- .post(SINGLE_EVENT_URL)
- .then()
- .statusCode(202)
- .body("message", equalTo("One-time direct event sent successfully"));
-
- //then
- Mockito.verify(vesSimulatorService,
- Mockito.timeout(3000))
- .sendEventToDmaapV5(parameterCaptor.capture());
-
- Document sourceNameInMongoDB = findSourceNameInMongoDB();
- Assertions.assertThat(sourceNameInMongoDB.get(PATCHED))
- .isEqualTo("{\"commonEventHeader\":{\"sourceName\":\"HistoricalEvent\",\"version\":3}}");
- }
-
- private Document findSourceNameInMongoDB() throws UnknownHostException {
- MongoCredential credential = MongoCredential
- .createCredential(PNF_SIMULATOR_DB_USER, PNF_SIMULATOR_DB, PNF_SIMULATOR_DB_PSWD.toCharArray());
- MongoClient mongoClient = new MongoClient(new ServerAddress(Inet4Address.getLocalHost(), 27017),
- credential, MongoClientOptions.builder().build());
- MongoDatabase pnfSimulatorDb = mongoClient.getDatabase(PNF_SIMULATOR_DB);
- MongoCollection<Document> table = pnfSimulatorDb.getCollection("eventData");
- Document searchQuery = new Document();
- searchQuery.put(PATCHED, new Document("$regex", ".*" + "HistoricalEvent" + ".*"));
- FindIterable<Document> findOfPatched = table.find(searchQuery);
- Document dbObject = null;
- MongoCursor<Document> cursor = findOfPatched.iterator();
- if (cursor.hasNext()) {
- dbObject = cursor.next();
- }
- return dbObject;
- }
-
- private String getCurrentIpAddress() throws SocketException {
- return Collections.list(NetworkInterface.getNetworkInterfaces()).stream()
- .flatMap(i -> Collections.list(i.getInetAddresses()).stream())
- .filter(ip -> ip instanceof Inet4Address)
- .map(e -> (Inet4Address) e)
- .findFirst()
- .orElseThrow(RuntimeException::new)
- .getHostAddress();
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/SearchInTemplatesTest.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/SearchInTemplatesTest.java
deleted file mode 100644
index 9d4ff3b8e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/SearchInTemplatesTest.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import static io.restassured.RestAssured.given;
-import static java.nio.file.Files.readAllBytes;
-
-import io.restassured.http.Header;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import org.hamcrest.Matchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.springframework.util.ResourceUtils;
-
-public class SearchInTemplatesTest {
-
- private static final String UPLOAD = "upload";
- private static final String SEARCH = "search";
- private static final String APPLICATION_JSON = "application/json";
- private static final String CONTENT_TYPE = "Content-Type";
-
- @BeforeClass
- public static void setUp() throws IOException {
- for (File file : readFileFromTemplatesFolder()) {
- byte[] body = readAllBytes(file.toPath());
-
- given()
- .body(body)
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(UPLOAD) + "?override=true")
- .then()
- .statusCode(201);
- }
- }
-
- @Test
- public void shouldFindNothingWhenNonexistentValueIsProvided(){
- given()
- .body("{\"searchExpr\": { \"child3\": \"nonexistentValue\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.empty());
- }
-
- @Test
- public void shouldFindNothingWhenNonexistentKeyIsProvided(){
- given()
- .body("{\"searchExpr\": { \"nonexistentKey\": \"Any value 1\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.empty());
- }
-
- @Test
- public void shouldFindNothingWhenPartOfKeyIsProvided(){
- given()
- .body("{\"searchExpr\": { \"child\": \"Any value 1\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.empty());
- }
-
- @Test
- public void shouldFindNothingWhenPartOfValueIsProvided(){
- given()
- .body("{\"searchExpr\": { \"child5\": \"Any\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.empty());
- }
-
- @Test
- public void shouldBeAbleToSearchForString(){
- given()
- .body("{\"searchExpr\": { \"child1\": \"Any value 1\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json", "complicated_template.json", "simple_template.json"));
-
- given()
- .body("{\"searchExpr\": { \"child2\": \"any value 4\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForManyStrings(){
- given()
- .body("{\"searchExpr\": { \"child1\": \"Any value 1\", \"child2\": \"any value 2\"}}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("simple_template.json", "complicated_template.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForStarSign(){
- given()
- .body("{\"searchExpr\": { \"child2\": \"*\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("complicated_template.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForQuestionMark(){
- given()
- .body("{\"searchExpr\": { \"child1\": \"?\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("complicated_template.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForBrackets(){
- given()
- .body("{\"searchExpr\": { \"parent2\": \"[]\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json"));
- }
-
- @Test
- public void shouldInformThatSearchForNullsIsProhibited(){
- given()
- .body("{\"searchExpr\": { \"child3\": null }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(400);
- }
-
- @Test
- public void shouldBeAbleToSearchForURI(){
- given()
- .body("{\"searchExpr\": { \"child3\": \"https://url.com?param1=test&param2=*\" }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("complicated_template.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForFloats(){
- given()
- .body("{\"searchExpr\": { \"child2\": 4.44 }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json"));
-
- given()
- .body("{\"searchExpr\": { \"child5\": 4.4 }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("complicated_template.json", "template_with_floats.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForIntegers(){
- given()
- .body("{\"searchExpr\": { \"child2\": 1 }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json", "template_with_ints.json"));
-
- given()
- .body("{\"searchExpr\": { \"child2\": 4 }}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_array.json"));
- }
-
- @Test
- public void shouldBeAbleToSearchForBooleans(){
- given()
- .body("{\"searchExpr\": { \"child4\": true}}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_booleans.json"));
-
- given()
- .body("{\"searchExpr\": { \"parent2\": false}}")
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(SEARCH))
- .then()
- .statusCode(200)
- .body("", Matchers.hasItems("template_with_booleans.json"));
- }
-
-
- private static String prepareRequestUrl(String action) {
- return "http://0.0.0.0:5000/template/" + action;
- }
-
- private static File[] readFileFromTemplatesFolder() throws FileNotFoundException {
- return ResourceUtils.getFile("classpath:templates/search").listFiles();
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TemplatesManagementTest.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TemplatesManagementTest.java
deleted file mode 100644
index 7e74dd493..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TemplatesManagementTest.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import static io.restassured.RestAssured.given;
-
-import io.restassured.http.Header;
-import io.restassured.path.json.JsonPath;
-import io.restassured.path.json.config.JsonPathConfig;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Map;
-import org.hamcrest.Matchers;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.util.ResourceUtils;
-
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = {Main.class, TestConfiguration.class}, webEnvironment = WebEnvironment.DEFINED_PORT)
-public class TemplatesManagementTest {
-
- private static final String LIST_URL = "list";
- private static final String GET_URL = "get/";
- private static final String UPLOAD = "upload";
- private static final String NOTIFICATION_JSON = "notification.json";
- private static final String REGISTRATION_JSON = "registration.json";
- private static final String UPLOAD_TEMPLATE_JSON = "upload_template.json";
- private static final String OVERWRITE_TEMPLATE_JSON = "overwrite_template.json";
- private static final String OVERWRITTEN_TEMPLATE_JSON = "overwritten_template.json";
- private static final String APPLICATION_JSON = "application/json";
- private static final String CONTENT_TYPE = "Content-Type";
- private static final String FORCE_FLAG = "?override=true";
- private static final String CONTENT = "content";
- private static final String TEMPLATE = "template";
- private static final String ID = "id";
-
- @Test
- public void whenCallingGetShouldReceiveNotificationTemplate() throws IOException {
- given()
- .when()
- .get(prepareRequestUrl(GET_URL) + NOTIFICATION_JSON)
- .then()
- .statusCode(200)
- .body(ID, Matchers.equalTo(NOTIFICATION_JSON))
- .body(CONTENT, Matchers.equalTo(readTemplateFromResources(NOTIFICATION_JSON).getMap(CONTENT)));
- }
-
- @Test
- public void whenCallingGetShouldReceiveRegistrationTemplate() throws IOException {
- given()
- .when()
- .get(prepareRequestUrl(GET_URL) + REGISTRATION_JSON)
- .then()
- .statusCode(200)
- .body(ID, Matchers.equalTo(REGISTRATION_JSON))
- .body(CONTENT, Matchers.equalTo(readTemplateFromResources(REGISTRATION_JSON).getMap(CONTENT)));
- }
-
- @Test
- public void whenCallingListShouldReceiveAllPredefinedTemplates() throws IOException {
- Map<Object, Object> registration = readTemplateFromResources(REGISTRATION_JSON).getMap(CONTENT);
- Map<Object, Object> notification = readTemplateFromResources(NOTIFICATION_JSON).getMap(CONTENT);
-
- given()
- .when()
- .get(prepareRequestUrl(LIST_URL))
- .then()
- .statusCode(200)
- .body(CONTENT, Matchers.<Map>hasItems(
- registration,
- notification
- ));
- }
-
- @Test
- public void whenCallingUploadAndGetShouldReceiveNewTemplate() throws IOException {
- byte[] body = Files.readAllBytes(readFileFromTemplatesFolder(UPLOAD_TEMPLATE_JSON));
-
- given()
- .body(body)
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(UPLOAD))
- .then()
- .statusCode(201);
-
- given()
- .when()
- .get(prepareRequestUrl(GET_URL) + UPLOAD_TEMPLATE_JSON)
- .then()
- .statusCode(200)
- .body(ID, Matchers.equalTo(UPLOAD_TEMPLATE_JSON))
- .body(CONTENT, Matchers.equalTo(readTemplateFromResources(UPLOAD_TEMPLATE_JSON).getMap(TEMPLATE)));
- }
-
- @Test
- public void whenCallingOverrideAndGetShouldReceiveNewTemplate() throws IOException, JSONException {
- byte[] body = Files.readAllBytes(readFileFromTemplatesFolder(OVERWRITE_TEMPLATE_JSON));
-
- given()
- .body(body)
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(UPLOAD))
- .then()
- .statusCode(201);
-
- JSONObject overwrittenBody = new JSONObject(new String(body));
- JSONObject overwrittenTemplate = new JSONObject("{\"field1\": \"overwritten_field1\"}");
- overwrittenBody.put(TEMPLATE, overwrittenTemplate);
-
- given()
- .body(overwrittenBody.toString().getBytes())
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(UPLOAD))
- .then()
- .statusCode(409);
-
- given()
- .body(overwrittenBody.toString().getBytes())
- .header(new Header(CONTENT_TYPE, APPLICATION_JSON))
- .when()
- .post(prepareRequestUrl(UPLOAD + FORCE_FLAG))
- .then()
- .statusCode(201);
-
- given()
- .when()
- .get(prepareRequestUrl(GET_URL) + OVERWRITE_TEMPLATE_JSON)
- .then()
- .statusCode(200)
- .body(ID, Matchers.equalTo(OVERWRITE_TEMPLATE_JSON))
- .body(CONTENT, Matchers.equalTo(readTemplateFromResources(OVERWRITTEN_TEMPLATE_JSON).getMap(CONTENT)));
- }
-
- private String prepareRequestUrl(String action) {
- return "http://0.0.0.0:5000/template/" + action;
- }
-
- private JsonPath readTemplateFromResources(String templateName) throws IOException {
- byte[] content = Files.readAllBytes(readFileFromTemplatesFolder(templateName));
- return new JsonPath(new String(content)).using(new JsonPathConfig("UTF-8"));
- }
-
- private Path readFileFromTemplatesFolder(String templateName) throws FileNotFoundException {
- return ResourceUtils.getFile("classpath:templates/"+templateName).toPath();
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TestConfiguration.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TestConfiguration.java
deleted file mode 100644
index 19ae050c5..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/TestConfiguration.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration;
-
-import org.mockito.Mockito;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Primary;
-
-@Configuration
-public class TestConfiguration {
-
- @Bean
- @Primary
- VesSimulatorService provideVesSimulatorService() {
- return Mockito.mock(VesSimulatorService.class);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/suites/DockerBasedTestsSuite.java b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/suites/DockerBasedTestsSuite.java
deleted file mode 100644
index cc2ac588f..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/java/org/onap/pnfsimulator/integration/suites/DockerBasedTestsSuite.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.integration.suites;
-
-import com.palantir.docker.compose.DockerComposeRule;
-import com.palantir.docker.compose.connection.waiting.HealthChecks;
-import org.junit.ClassRule;
-import org.junit.runner.RunWith;
-import org.junit.runners.Suite;
-import org.junit.runners.Suite.SuiteClasses;
-import org.onap.pnfsimulator.integration.BasicAvailabilityTest;
-import org.onap.pnfsimulator.integration.OptionalTemplatesTest;
-import org.onap.pnfsimulator.integration.SearchInTemplatesTest;
-import org.onap.pnfsimulator.integration.TemplatesManagementTest;
-
-@RunWith(Suite.class)
-@SuiteClasses({BasicAvailabilityTest.class, TemplatesManagementTest.class, OptionalTemplatesTest.class,
- SearchInTemplatesTest.class})
-public class DockerBasedTestsSuite {
-
- @ClassRule
- public static DockerComposeRule docker = DockerComposeRule.builder()
- .file("../docker-compose.yml")
- .waitingForService("pnf-simulator", HealthChecks.toHaveAllPortsOpen())
- .waitingForService("mongo", HealthChecks.toHaveAllPortsOpen())
- .build();
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/application.properties b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/application.properties
deleted file mode 100644
index c3e147200..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/application.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-server.port=9443
-security.require-ssl=true
-server.ssl.key-store=src/main/resources/keystore
-server.ssl.key-store-password=collector
-server.ssl.keyStoreType=JKS
-server.ssl.keyAlias=tomcat \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/notification.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/notification.json
deleted file mode 100644
index 7b3e668aa..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/notification.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "id": "notification.json",
- "content": {
- "event": {
- "commonEventHeader": {
- "domain": "notification",
- "eventName": "vFirewallBroadcastPackets",
- "eventId": "4cfc-91cf-31a46",
- "priority": "Normal",
- "reportingEntityName": "myVNF",
- "sequence": 1,
- "sourceName": "ClosedLoopVNF",
- "startEpochMicrosec": 1531616794,
- "lastEpochMicrosec": 1531719042,
- "vesEventListenerVersion": "7.0.1",
- "version": "4.0.1"
- },
- "notificationFields": {
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady",
- "arrayOfNamedHashMap": [
- {
- "name": "A20161221.1031-1041.bin.gz",
- "hashMap": {
- "fileformatType": "org.3GPP.32.435#measCollec",
- "fileFormatVersion": "V10",
- "location": "ftpes://192.169.0.1:22/ftp/rop/A20161224.1030-1045.bin.gz",
- "compression": "gzip"
- }
- },
- {
- "name": "A20161222.1042-1102.bin.gz",
- "hashMap": {
- "fileFormatType": "org.3GPP.32.435#measCollec",
- "fileFormatVersion": "V10",
- "location": "ftpes://192.168.0.102:22/ftp/rop/A20161224.1045-1100.bin.gz",
- "compression": "gzip"
- }
- }
- ],
- "notificationFieldsVersion": "2.0"
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwrite_template.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwrite_template.json
deleted file mode 100644
index d6d94f7a7..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwrite_template.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "overwrite_template.json",
- "template": {
- "field1": "field1"
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwritten_template.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwritten_template.json
deleted file mode 100644
index f7848d415..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/overwritten_template.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "id": "overwrite_template.json",
- "content": {
- "field1": "overwritten_field1"
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/registration.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/registration.json
deleted file mode 100644
index bf0ac717c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/registration.json
+++ /dev/null
@@ -1,36 +0,0 @@
-{
- "id": "registration.json",
- "content": {
- "event": {
- "commonEventHeader": {
- "eventId": "registration_39239592",
- "eventType": "pnfRegistration",
- "reportingEntityName": "NOK6061ZW3",
- "domain": "pnfRegistration",
- "nfcNamingCode": "oam",
- "sequence": 0,
- "sourceId": "val13",
- "internalHeaderFields": {},
- "priority": "Normal",
- "sourceName": "NOK6061ZW3",
- "eventName": "pnfRegistration_Nokia_5gDu",
- "version": "4.0.1",
- "nfNamingCode": "gNB",
- "startEpochMicrosec": 1539239592379,
- "vesEventListenerVersion": "7.0.1",
- "lastEpochMicrosec": 1539239592379
- },
- "pnfRegistrationFields": {
- "pnfRegistrationFieldsVersion": "2.0",
- "serialNumber": "6061ZW3",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4",
- "unitFamily": "BBU",
- "modelNumber": "val6",
- "softwareVersion": "val7",
- "unitType": "val8"
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/complicated_template.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/complicated_template.json
deleted file mode 100644
index 0edbb62dc..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/complicated_template.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "complicated_template.json",
- "template": {
- "parent": {
- "child1": "Any value 1",
- "child2": {
- "parent": {
- "child1": "Any value 1",
- "child2": "Any value 2",
- "child3": {
- "child4": "Any value 4"
- },
- "child4": [
- "Any value 1",
- "Any value 2, Any value 3"
- ],
- "child5": [
- "Any value 4",
- 1,
- 2,
- 4.4,
- {
- "child6": [
- 1,
- 2,
- 4.4
- ]
- }
- ]
- }
- },
- "child3": {
- "child4": "Any value 4"
- }
- },
- "parent2": "Any value 2",
- "parent3": {
- "child1": "?",
- "child2": "*",
- "child3": "https://url.com?param1=test&param2=*"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/simple_template.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/simple_template.json
deleted file mode 100644
index ad2a64aff..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/simple_template.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "name": "simple_template.json",
- "template": {
- "parent": {
- "child1": "Any value 1",
- "child2": "Any value 2",
- "child3": {
- "child4": "Any value 4"
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_array.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_array.json
deleted file mode 100644
index bb3235e2c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_array.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "name": "template_with_array.json",
- "template": {
- "parent": {
- "child1": [
- {
- "child1": "Any value 1",
- "child2": [
- 4,
- 4.44
- ]
- }
- ],
- "child2": [
- 1,
- "Any value 4",
- 3.3,
- 5
- ]
- },
- "parent2": "[]"
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_booleans.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_booleans.json
deleted file mode 100644
index 8bf54080c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_booleans.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "name": "template_with_booleans.json",
- "template": {
- "parent": {
- "child1": true,
- "child3": {
- "child4": true
- }
- },
- "parent2": false
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_floats.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_floats.json
deleted file mode 100644
index aab3243e1..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_floats.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "name": "template_with_floats.json",
- "template": {
- "parent": {
- "child1": 6.4,
- "child2": 1.2,
- "child3": {
- "child5": 4.4,
- "child2": "1"
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_ints.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_ints.json
deleted file mode 100644
index 015cc46f3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/search/template_with_ints.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "name": "template_with_ints.json",
- "template": {
- "parent": {
- "child1": 6,
- "child2": 1,
- "child3": {
- "child4": 4
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/upload_template.json b/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/upload_template.json
deleted file mode 100644
index 4c49f0e17..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/integration/src/test/resources/templates/upload_template.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "upload_template.json",
- "template": {
- "field1": "field1"
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/pom.xml b/test/mocks/pnfsimulator/pnfsimulator/pom.xml
deleted file mode 100644
index 83aa3f051..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/pom.xml
+++ /dev/null
@@ -1,364 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.simulator</groupId>
- <artifactId>simulator-parent</artifactId>
- <version>5.0.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>pnfsimulator</artifactId>
- <version>5.0.0-SNAPSHOT</version>
-
- <name>pnfsimulator</name>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- <maven.build.timestamp.format>yyyyMMdd'T'HHmmss</maven.build.timestamp.format>
-
- <simulator.main.class>org.onap.pnfsimulator.Main</simulator.main.class>
- <docker.image.tag>latest</docker.image.tag>
- <junit.jupiter.version>5.1.0</junit.jupiter.version>
- <junit.vintage.version>5.1.0</junit.vintage.version>
- <spring.boot.version>2.1.6.RELEASE</spring.boot.version>
- <docker.image.name>onap/${project.artifactId}</docker.image.name>
-
- <dependency.directory.name>libs</dependency.directory.name>
- <dependency.directory.location>${project.build.directory}/${dependency.directory.name}
- </dependency.directory.location>
-
- <onap.nexus.dockerregistry.daily>nexus3.onap.org:10003</onap.nexus.dockerregistry.daily>
- <onap.nexus.url>http://nexus3.onap.org</onap.nexus.url>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter</artifactId>
- <version>${spring.boot.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- <version>${spring.boot.version}</version>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-data-mongodb</artifactId>
- <version>${spring.boot.version}</version>
- </dependency>
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-core</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.7.25</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>2.6</version>
- </dependency>
- <dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20180130</version>
- </dependency>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.8.2</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.5.5</version>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>21.0</version>
- </dependency>
- <dependency>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- <version>1.4</version>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-lang3</artifactId>
- <version>3.7</version>
- </dependency>
- <dependency>
- <groupId>org.projectlombok</groupId>
- <artifactId>lombok</artifactId>
- <version>1.18.2</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.fasterxml.jackson.datatype</groupId>
- <artifactId>jackson-datatype-jdk8</artifactId>
- <version>2.9.7</version>
- </dependency>
- <dependency>
- <groupId>com.fasterxml.jackson.datatype</groupId>
- <artifactId>jackson-datatype-jsr310</artifactId>
- <version>2.9.7</version>
- </dependency>
- <dependency>
- <groupId>io.vavr</groupId>
- <artifactId>vavr-match</artifactId>
- <version>0.9.2</version>
- </dependency>
- <dependency>
- <groupId>io.vavr</groupId>
- <artifactId>vavr</artifactId>
- <version>0.9.2</version>
- </dependency>
-
-
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-engine</artifactId>
- <version>${junit.jupiter.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-migrationsupport</artifactId>
- <version>${junit.jupiter.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>3.9.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>2.18.3</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-test</artifactId>
- <version>5.0.4.RELEASE</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <version>${spring.boot.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>io.springfox</groupId>
- <artifactId>springfox-swagger2</artifactId>
- <version>2.9.2</version>
- </dependency>
- <dependency>
- <groupId>io.springfox</groupId>
- <artifactId>springfox-swagger-ui</artifactId>
- <version>2.9.2</version>
- </dependency>
- <dependency>
- <groupId>org.quartz-scheduler</groupId>
- <artifactId>quartz</artifactId>
- <version>2.2.1</version>
- </dependency>
- <dependency>
- <groupId>org.quartz-scheduler</groupId>
- <artifactId>quartz-jobs</artifactId>
- <version>2.2.1</version>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptor>src/assembly/resources.xml</descriptor>
- <finalName>${project.artifactId}-${project.version}</finalName>
- </configuration>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.7.0</version>
- <configuration>
- <source>${maven.compiler.source}</source>
- <target>${maven.compiler.target}</target>
- <showWarnings>true</showWarnings>
- <showDeprecation>true</showDeprecation>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>3.0.2</version>
- <configuration>
- <archive>
- <manifestEntries>
- <Main-Class>${simulator.main.class}</Main-Class>
- <Build-Time>${maven.build.timestamp}</Build-Time>
- </manifestEntries>
- </archive>
- </configuration>
- </plugin>
- <plugin>
- <groupId>pl.project13.maven</groupId>
- <artifactId>git-commit-id-plugin</artifactId>
- <version>2.2.4</version>
- <executions>
- <execution>
- <id>get-commit-info</id>
- <goals>
- <goal>revision</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
- <generateGitPropertiesFile>true</generateGitPropertiesFile>
- <includeOnlyProperties>git.commit.id.abbrev</includeOnlyProperties>
- </configuration>
- </plugin>
- <plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.19</version>
- <dependencies>
- <dependency>
- <groupId>org.junit.platform</groupId>
- <artifactId>junit-platform-surefire-provider</artifactId>
- <version>1.1.1</version>
- </dependency>
- </dependencies>
- <configuration>
- <detail>true</detail>
- <printSummary>true</printSummary>
- <useSystemClassLoader>false</useSystemClassLoader>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <configuration>
- <outputDirectory>${dependency.directory.location}</outputDirectory>
- <includeScope>runtime</includeScope>
- <silent>true</silent>
- </configuration>
- <executions>
- <execution>
- <id>copy-external-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.spotify</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>1.0.0</version>
- <configuration>
- <registryUrl>${onap.nexus.dockerregistry.daily}</registryUrl>
- <imageName>${onap.nexus.dockerregistry.daily}/${docker.image.name}</imageName>
- <dockerDirectory>${project.basedir}/docker</dockerDirectory>
- <forceTags>true</forceTags>
- <imageTags>
- <imageTag>latest</imageTag>
- </imageTags>
-
- <resources>
- <resource>
- <targetPath>${dependency.directory.name}</targetPath>
- <directory>${dependency.directory.location}</directory>
- </resource>
- <resource>
- <targetPath>/</targetPath>
- <directory>${project.build.directory}</directory>
- <include>${project.build.finalName}.jar</include>
- </resource>
- </resources>
- <forceTags>true</forceTags>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jacoco</groupId>
- <artifactId>jacoco-maven-plugin</artifactId>
- <version>0.8.1</version>
- <configuration>
- <excludes>
- <exclude>org/onap/pnfsimulator/Main.class</exclude>
- </excludes>
- </configuration>
- <executions>
- <execution>
- <id>default-prepare-agent</id>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- </execution>
- <execution>
- <id>report</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>report</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/assembly/resources.xml b/test/mocks/pnfsimulator/pnfsimulator/src/assembly/resources.xml
deleted file mode 100644
index 35dd3b2e2..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/assembly/resources.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<assembly>
- <id>resources</id>
- <formats>
- <format>zip</format>
- </formats>
-
- <fileSets>
- <fileSet>
- <includes>
- <include>simulator.sh</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <includes>
- <include>docker-compose.yml</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0644</fileMode>
- </fileSet>
- <fileSet>
- <directory>config</directory>
- <outputDirectory>config</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>deployment</directory>
- <outputDirectory>deployment</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- </fileSets>
-</assembly>
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/Main.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/Main.java
deleted file mode 100644
index e0eace2d0..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/Main.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator;
-
-import javax.annotation.PostConstruct;
-
-import org.onap.pnfsimulator.filesystem.WatcherService;
-import org.onap.pnfsimulator.template.FsToDbTemplateSynchronizer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.scheduling.annotation.EnableAsync;
-
-@SpringBootApplication
-@EnableAsync
-public class Main {
-
- private final WatcherService watcherService;
- private final FsToDbTemplateSynchronizer fsToDbTemplateSynchronizer;
-
- @Autowired
- public Main(WatcherService watcherService,
- FsToDbTemplateSynchronizer fsToDbTemplateSynchronizer) {
- this.watcherService = watcherService;
- this.fsToDbTemplateSynchronizer = fsToDbTemplateSynchronizer;
- }
-
- public static void main(String[] args) {
- SpringApplication.run(Main.class, args);
- }
-
- @PostConstruct
- public void createWatchers() {
- fsToDbTemplateSynchronizer.synchronize();
- watcherService.createWatcher();
- }
-}
-
-
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/SwaggerConfig.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/SwaggerConfig.java
deleted file mode 100644
index 90a5ecb03..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/SwaggerConfig.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator;
-
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import springfox.documentation.builders.PathSelectors;
-import springfox.documentation.builders.RequestHandlerSelectors;
-import springfox.documentation.spi.DocumentationType;
-import springfox.documentation.spring.web.plugins.Docket;
-import springfox.documentation.swagger2.annotations.EnableSwagger2;
-
-@Configuration
-@EnableSwagger2
-public class SwaggerConfig {
-
- @Bean
- public Docket api() {
- return new Docket(DocumentationType.SWAGGER_2)
- .select()
- .apis(RequestHandlerSelectors.basePackage("org.onap.pnfsimulator"))
- .paths(PathSelectors.any())
- .build();
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Row.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Row.java
deleted file mode 100644
index f9a167b93..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/db/Row.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.db;
-
-import org.springframework.data.annotation.Id;
-import org.springframework.data.mongodb.core.mapping.Field;
-
-public abstract class Row {
- @Id
- @Field("_id")
- protected String id;
-
- public String getId() {
- return id;
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventData.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventData.java
deleted file mode 100644
index 23b1c21a7..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventData.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.event;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import lombok.Builder;
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.data.annotation.Id;
-import org.springframework.data.mongodb.core.mapping.Field;
-
-@Builder
-@Getter
-@Setter
-public class EventData {
- @Id
- private String id;
-
- @Field("template")
- @JsonInclude
- private String template;
-
- @Field("patched")
- @JsonInclude
- private String patched;
-
- @Field("input")
- @JsonInclude
- private String input;
-
- @Field("keywords")
- @JsonInclude
- private String keywords;
-
- @Field("incrementValue")
- @JsonInclude
- private int incrementValue;
-
- protected EventData(String id, String template, String patched, String input, String keywords, int incrementValue) {
- this.id = id;
- this.template = template;
- this.patched = patched;
- this.input = input;
- this.keywords = keywords;
- this.incrementValue = incrementValue;
- }
-
- @Override
- public String toString() {
- return "EventData{"
- + "id='" + id + '\''
- + ", template='" + template + '\''
- + ", patched='" + patched + '\''
- + ", input='" + input + '\''
- + ", keywords='" + keywords + '\''
- + '}';
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataRepository.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataRepository.java
deleted file mode 100644
index d1a66ab0a..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataRepository.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.event;
-
-import org.springframework.data.mongodb.repository.MongoRepository;
-
-public interface EventDataRepository extends MongoRepository<EventData, String> {
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataService.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataService.java
deleted file mode 100644
index 3568f0178..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/event/EventDataService.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.event;
-
-import com.google.gson.JsonObject;
-import java.util.List;
-import java.util.Optional;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class EventDataService {
- private final EventDataRepository repository;
-
- @Autowired
- public EventDataService(EventDataRepository repository) {
- this.repository = repository;
- }
-
- private EventData persistEventData(String templateString, String patchedString, String inputString, String keywordsString) {
- EventData eventData = EventData.builder()
- .template(templateString)
- .patched(patchedString)
- .input(inputString)
- .keywords(keywordsString)
- .build();
- return repository.save(eventData);
- }
-
- public EventData persistEventData(JsonObject templateJson, JsonObject patchedJson, JsonObject inputJson,
- JsonObject keywordsJson) {
- return persistEventData(templateJson.toString(),
- patchedJson.toString(),
- inputJson.toString(),
- keywordsJson.toString());
- }
-
- public List<EventData> getAllEvents() {
- return repository.findAll();
- }
-
- public Optional<EventData> getById(String id) {
- return repository.findById(id);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessor.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessor.java
deleted file mode 100644
index 56a569671..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessor.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.filesystem;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardWatchEventKinds;
-import java.nio.file.WatchEvent;
-import java.nio.file.WatchEvent.Kind;
-import java.time.Instant;
-import java.util.Arrays;
-import java.util.Optional;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import lombok.extern.slf4j.Slf4j;
-import org.bson.json.JsonParseException;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.template.Template;
-import org.bson.Document;
-
-@Slf4j
-public enum WatcherEventProcessor {
- CREATED(StandardWatchEventKinds.ENTRY_CREATE) {
- @Override
- public void processEvent(Path path, Storage<Template> storage) throws IOException {
- String content = getContent(path);
- String fileName = path.getFileName().toString();
- Document documentsContent = Document.parse(content);
- storage.persist(new Template(fileName, documentsContent, Instant.now().getNano()));
- log.info("DB record created for template: " + fileName);
- }
- },
- MODIFIED(StandardWatchEventKinds.ENTRY_MODIFY) {
- @Override
- public void processEvent(Path path, Storage<Template> storage) throws IOException {
- String fileName = path.getFileName().toString();
- String content = getContent(path);
- Document documentsContent = Document.parse(content);
- Template template = storage.get(fileName).orElse(new Template(fileName, documentsContent, Instant.now().getNano()));
- template.setContent(documentsContent);
- storage.persist(template);
- log.info("DB record modified for template: " + fileName);
- }
- },
- DELETED(StandardWatchEventKinds.ENTRY_DELETE) {
- @Override
- public void processEvent(Path path, Storage<Template> storage) {
- String fileName = path.getFileName().toString();
- storage.delete(fileName);
- log.info("DB record deleted for template: " + fileName);
- }
- };
-
- private final Kind<Path> pathKind;
-
- String getContent(Path path) throws IOException {
- try (Stream<String> lines = Files.lines(path, StandardCharsets.UTF_8)) {
- return lines.collect(Collectors.joining(System.lineSeparator()));
- } catch (IOException e) {
- log.error("Could not get content due to: " + e.getMessage() + " " + e.getCause(), e);
- throw e;
- }
- }
-
- WatcherEventProcessor(Kind<Path> pathKind) {
- this.pathKind = pathKind;
- }
-
- public abstract void processEvent(Path templateName, Storage<Template> storage) throws IOException;
-
- static void process(WatchEvent<?> event, Storage<Template> storage, Path templatesDir) {
- Optional<WatcherEventProcessor> watcherEventProcessor = getWatcherEventProcessor(event);
- watcherEventProcessor.ifPresent(processor -> {
- try {
- final Path templatePath = templatesDir.resolve((Path) event.context());
- processor.processEvent(templatePath, storage);
- } catch (IOException e) {
- log.error("Error during processing DB record for template.", e);
- } catch (JsonParseException e) {
- log.error("Invalid JSON format provided for template.", e);
- }
- });
- }
-
- private static Optional<WatcherEventProcessor> getWatcherEventProcessor(WatchEvent<?> event) {
- return Arrays.stream(values()).filter(value -> value.pathKind.equals(event.kind())).findFirst();
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherService.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherService.java
deleted file mode 100644
index 26b684df9..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherService.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.filesystem;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.ApplicationContext;
-import org.springframework.core.task.TaskExecutor;
-import org.springframework.stereotype.Service;
-
-@Service
-public class WatcherService {
-
- private TaskExecutor taskExecutor;
- private ApplicationContext applicationContext;
-
- @Autowired
- public WatcherService(ApplicationContext applicationContext, TaskExecutor taskExecutor) {
- this.taskExecutor = taskExecutor;
- this.applicationContext = applicationContext;
- }
-
- public void createWatcher() {
- taskExecutor.execute(applicationContext.getBean(WatcherThread.class));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherThread.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherThread.java
deleted file mode 100644
index a202b1f9e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/filesystem/WatcherThread.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.filesystem;
-
-import java.io.IOException;
-import java.nio.file.FileSystems;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardWatchEventKinds;
-import java.nio.file.WatchEvent;
-import java.nio.file.WatchKey;
-import java.nio.file.WatchService;
-import lombok.extern.slf4j.Slf4j;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.template.Template;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Component;
-
-@Slf4j
-@Component
-public class WatcherThread implements Runnable {
-
- private final WatchService watchService;
- private final Storage<Template> storage;
- private final Path templatesDir;
-
- WatcherThread(String templatesDir, WatchService watchService, Storage<Template> storage) throws IOException {
- this.watchService = watchService;
- this.storage = storage;
- this.templatesDir = Paths.get(templatesDir);
- registerDirectory(this.templatesDir);
- }
-
- @Autowired
- public WatcherThread(@Value("${templates.dir}") String templatesDir, Storage<Template> storage) throws IOException {
- this(templatesDir, FileSystems.getDefault().newWatchService(), storage);
- }
-
- private void registerDirectory(Path path) throws IOException {
- path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE,
- StandardWatchEventKinds.ENTRY_MODIFY);
- }
-
- @Override
- public void run() {
- while (true) {
- WatchKey key;
- try {
- key = watchService.take();
- for (WatchEvent<?> event : key.pollEvents()) {
- WatcherEventProcessor.process(event, storage, templatesDir);
- }
- key.reset();
- } catch (InterruptedException e) {
- log.error("Watch service interrupted.", e.getMessage());
- Thread.currentThread().interrupt();
- return;
- }
-
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java
deleted file mode 100644
index 5678f4fa3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.logging;
-
-public final class MDCVariables {
-
- public static final String X_ONAP_REQUEST_ID = "X-ONAP-RequestID";
- public static final String X_INVOCATION_ID = "X-InvocationID";
- public static final String REQUEST_ID = "RequestID";
- public static final String INVOCATION_ID = "InvocationID";
- public static final String INSTANCE_UUID = "InstanceUUID";
- public static final String RESPONSE_CODE = "ResponseCode";
- public static final String SERVICE_NAME = "ServiceName";
-
- private MDCVariables() {
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
deleted file mode 100644
index 88648f91d..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.gson.JsonSyntaxException;
-import org.json.JSONException;
-import org.onap.pnfsimulator.event.EventData;
-import org.onap.pnfsimulator.event.EventDataService;
-import org.onap.pnfsimulator.rest.model.FullEvent;
-import org.onap.pnfsimulator.rest.model.SimulatorRequest;
-import org.onap.pnfsimulator.rest.util.DateUtil;
-import org.onap.pnfsimulator.rest.util.ResponseBuilder;
-import org.onap.pnfsimulator.simulator.SimulatorService;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfig;
-import org.quartz.SchedulerException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.PutMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import javax.validation.Valid;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.INSTANCE_UUID;
-import static org.onap.pnfsimulator.logging.MDCVariables.INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.RESPONSE_CODE;
-import static org.onap.pnfsimulator.logging.MDCVariables.SERVICE_NAME;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.MESSAGE;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.TIMESTAMP;
-import static org.springframework.http.HttpStatus.ACCEPTED;
-import static org.springframework.http.HttpStatus.BAD_REQUEST;
-import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
-import static org.springframework.http.HttpStatus.NOT_FOUND;
-import static org.springframework.http.HttpStatus.OK;
-
-@RestController
-@RequestMapping("/simulator")
-public class SimulatorController {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(SimulatorController.class);
- private static final Marker ENTRY = MarkerFactory.getMarker("ENTRY");
- private static final String INCORRECT_TEMPLATE_MESSAGE = "Cannot start simulator, template %s is not in valid format: %s";
- private static final String NOT_EXISTING_TEMPLATE = "Cannot start simulator - template %s not found.";
- private final DateFormat responseDateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss,SSS");
- private final SimulatorService simulatorService;
- private EventDataService eventDataService;
-
- @Autowired
- public SimulatorController(SimulatorService simulatorService,
- EventDataService eventDataService) {
- this.simulatorService = simulatorService;
- this.eventDataService = eventDataService;
- }
-
- @PostMapping("test")
- @Deprecated
- public ResponseEntity test(@Valid @RequestBody SimulatorRequest simulatorRequest) {
- MDC.put("test", "test");
- LOGGER.info(ENTRY, simulatorRequest.toString());
- return buildResponse(OK, ImmutableMap.of(MESSAGE, "message1234"));
- }
-
- @PostMapping(value = "start")
- public ResponseEntity start(@RequestHeader HttpHeaders headers,
- @Valid @RequestBody SimulatorRequest triggerEventRequest) {
- logContextHeaders(headers, "/simulator/start");
- LOGGER.info(ENTRY, "Simulator started");
-
- try {
- return processRequest(triggerEventRequest);
-
- } catch (JSONException | JsonSyntaxException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Cannot trigger event, invalid json format: {}", e.getMessage());
- LOGGER.debug("Received json has invalid format", e);
- return buildResponse(BAD_REQUEST, ImmutableMap.of(MESSAGE, String
- .format(INCORRECT_TEMPLATE_MESSAGE, triggerEventRequest.getTemplateName(),
- e.getMessage())));
- } catch (IOException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Json validation failed: {}", e.getMessage());
- return buildResponse(BAD_REQUEST,
- ImmutableMap.of(MESSAGE, String.format(NOT_EXISTING_TEMPLATE, triggerEventRequest.getTemplateName())));
- } catch (Exception e) {
- MDC.put(RESPONSE_CODE, INTERNAL_SERVER_ERROR.toString());
- LOGGER.error("Cannot trigger event - unexpected exception", e);
- return buildResponse(INTERNAL_SERVER_ERROR,
- ImmutableMap.of(MESSAGE, "Unexpected exception: " + e.getMessage()));
- } finally {
- MDC.clear();
- }
- }
-
- @GetMapping("all-events")
- @Deprecated
- public ResponseEntity allEvents() {
- List<EventData> eventDataList = eventDataService.getAllEvents();
- StringBuilder sb = new StringBuilder();
- eventDataList.forEach(e -> sb.append(e).append(System.lineSeparator()));
-
- return ResponseBuilder
- .status(OK).put(MESSAGE, sb.toString())
- .build();
- }
-
- @GetMapping("config")
- public ResponseEntity getConfig() {
- SimulatorConfig configToGet = simulatorService.getConfiguration();
- return buildResponse(OK, ImmutableMap.of("simulatorConfig", configToGet));
- }
-
- @PutMapping("config")
- public ResponseEntity updateConfig(@Valid @RequestBody SimulatorConfig newConfig) {
- SimulatorConfig updatedConfig = simulatorService.updateConfiguration(newConfig);
- return buildResponse(OK, ImmutableMap.of("simulatorConfig", updatedConfig));
- }
-
- @PostMapping("cancel/{jobName}")
- public ResponseEntity cancelEvent(@PathVariable String jobName) throws SchedulerException {
- LOGGER.info(ENTRY, "Cancel called on {}.", jobName);
- boolean isCancelled = simulatorService.cancelEvent(jobName);
- return createCancelEventResponse(isCancelled);
- }
-
- @PostMapping("cancel")
- public ResponseEntity cancelAllEvent() throws SchedulerException {
- LOGGER.info(ENTRY, "Cancel called on all jobs");
- boolean isCancelled = simulatorService.cancelAllEvents();
- return createCancelEventResponse(isCancelled);
- }
-
- @PostMapping("event")
- public ResponseEntity sendEventDirectly(@RequestHeader HttpHeaders headers, @Valid @RequestBody FullEvent event) throws MalformedURLException {
- logContextHeaders(headers, "/simulator/event");
- LOGGER.info(ENTRY, "Trying to send one-time event directly to VES Collector");
- simulatorService.triggerOneTimeEvent(event);
- return buildResponse(ACCEPTED, ImmutableMap.of(MESSAGE, "One-time direct event sent successfully"));
- }
-
- private ResponseEntity processRequest(SimulatorRequest triggerEventRequest)
- throws IOException, SchedulerException {
-
- String jobName = simulatorService.triggerEvent(triggerEventRequest);
- MDC.put(RESPONSE_CODE, OK.toString());
- return buildResponse(OK, ImmutableMap.of(MESSAGE, "Request started", "jobName", jobName));
- }
-
- private ResponseEntity buildResponse(HttpStatus endStatus, Map<String, Object> parameters) {
- ResponseBuilder builder = ResponseBuilder
- .status(endStatus)
- .put(TIMESTAMP, DateUtil.getTimestamp(responseDateFormat));
- parameters.forEach(builder::put);
- return builder.build();
- }
-
- private void logContextHeaders(HttpHeaders headers, String serviceName) {
- MDC.put(REQUEST_ID, headers.getFirst(X_ONAP_REQUEST_ID));
- MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID));
- MDC.put(INSTANCE_UUID, UUID.randomUUID().toString());
- MDC.put(SERVICE_NAME, serviceName);
- }
-
- private ResponseEntity createCancelEventResponse(boolean isCancelled) {
- if (isCancelled) {
- return buildResponse(OK, ImmutableMap.of(MESSAGE, "Event(s) was cancelled"));
- } else {
- return buildResponse(NOT_FOUND, ImmutableMap.of(MESSAGE, "Simulator was not able to cancel event(s)"));
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/TemplateController.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/TemplateController.java
deleted file mode 100644
index 444e23bae..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/TemplateController.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import java.time.Instant;
-import java.util.List;
-import java.util.Optional;
-import javax.validation.Valid;
-
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.rest.model.TemplateRequest;
-import org.onap.pnfsimulator.rest.model.SearchExp;
-import org.onap.pnfsimulator.template.Template;
-import org.onap.pnfsimulator.template.search.IllegalJsonValueException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.MediaType;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-import org.springframework.web.server.ResponseStatusException;
-
-
-@RestController
-@RequestMapping("/template")
-public class TemplateController {
- static final String TEMPLATE_NOT_FOUND_MSG = "A template with given name does not exist";
- static final String CANNOT_OVERRIDE_TEMPLATE_MSG = "Cannot overwrite existing template. Use override=true to override";
- private final Storage<Template> service;
- private static final Logger LOG = LoggerFactory.getLogger(TemplateController.class);
-
- @Autowired
- public TemplateController(Storage<Template> service) {
- this.service = service;
- }
-
- @GetMapping("list")
- public ResponseEntity<?> list() {
- return new ResponseEntity<>(service.getAll(), HttpStatus.OK);
- }
-
- @GetMapping("get/{templateName}")
- public ResponseEntity<?> get(@PathVariable String templateName) {
- Optional<Template> template = service.get(templateName);
- if (!template.isPresent()) {
- HttpHeaders headers = new HttpHeaders();
- headers.setContentType(MediaType.TEXT_PLAIN);
- return new ResponseEntity<>(TEMPLATE_NOT_FOUND_MSG, headers, HttpStatus.NOT_FOUND);
- }
- return new ResponseEntity<>(template, HttpStatus.OK);
- }
-
- @PostMapping("upload")
- public ResponseEntity<?> upload(
- @RequestBody @Valid TemplateRequest templateRequest,
- @RequestParam(required = false) boolean override) {
- String msg = "";
- HttpStatus status = HttpStatus.CREATED;
- Template template = new Template(templateRequest.getName(), templateRequest.getTemplate(), Instant.now().getNano());
- if (!service.tryPersistOrOverwrite(template, override)) {
- status = HttpStatus.CONFLICT;
- msg = CANNOT_OVERRIDE_TEMPLATE_MSG;
- }
- return new ResponseEntity<>(msg, status);
- }
-
- @PostMapping("search")
- public ResponseEntity<?> searchByCriteria(@RequestBody SearchExp queryJson) {
- try {
- List<String> templateNames = service.getIdsByContentCriteria(queryJson.getSearchExpr());
- return new ResponseEntity<>(templateNames, HttpStatus.OK);
- } catch (IllegalJsonValueException ex) {
- throw new ResponseStatusException(HttpStatus.BAD_REQUEST, String.format("Try again with correct parameters. Cause: %s", ex.getMessage()), ex);
- }
-
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/FullEvent.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/FullEvent.java
deleted file mode 100644
index 77d9b3da1..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/FullEvent.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * FULL-EVENT
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.model;
-
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.google.gson.JsonObject;
-import lombok.AllArgsConstructor;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-import org.onap.pnfsimulator.rest.util.JsonObjectDeserializer;
-import org.springframework.lang.Nullable;
-
-import javax.validation.constraints.NotNull;
-
-@AllArgsConstructor
-@NoArgsConstructor
-@Getter
-@ToString
-@EqualsAndHashCode
-public class FullEvent {
-
- @Nullable
- private String vesServerUrl;
-
- @NotNull
- @JsonDeserialize(using = JsonObjectDeserializer.class)
- private JsonObject event;
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorParams.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorParams.java
deleted file mode 100644
index 787583e7d..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorParams.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.model;
-
-import lombok.AllArgsConstructor;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-
-import javax.validation.constraints.NotNull;
-import org.springframework.lang.Nullable;
-
-@Getter
-@EqualsAndHashCode
-@AllArgsConstructor
-@NoArgsConstructor
-public class SimulatorParams {
-
- @NotNull
- private String vesServerUrl;
-
- @Nullable
- private Integer repeatInterval;
-
- @Nullable
- private Integer repeatCount;
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorRequest.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorRequest.java
deleted file mode 100644
index 2b0665813..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/model/SimulatorRequest.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.model;
-
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.google.gson.JsonObject;
-import javax.validation.constraints.NotNull;
-import lombok.AllArgsConstructor;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-import org.onap.pnfsimulator.rest.util.JsonObjectDeserializer;
-import org.springframework.lang.Nullable;
-
-@Getter
-@ToString
-@EqualsAndHashCode
-@AllArgsConstructor
-@NoArgsConstructor
-public class SimulatorRequest {
-
- @NotNull
- private SimulatorParams simulatorParams;
-
- @NotNull
- private String templateName;
-
- @Nullable
- @JsonDeserialize(using = JsonObjectDeserializer.class)
- private JsonObject patch;
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java
deleted file mode 100644
index 9a5c9ca9e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import java.text.DateFormat;
-import java.util.Date;
-
-public final class DateUtil {
-
- private DateUtil() {
- }
-
- public static String getTimestamp(DateFormat dateFormat) {
-
- return dateFormat.format(new Date());
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/JsonObjectDeserializer.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/JsonObjectDeserializer.java
deleted file mode 100644
index f89c4a7b8..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/JsonObjectDeserializer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.ObjectCodec;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.JsonDeserializer;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-
-import java.io.IOException;
-
-public class JsonObjectDeserializer extends JsonDeserializer<JsonObject> {
- private Gson gson = new Gson();
-
- @Override
- public JsonObject deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
- ObjectCodec oc = jsonParser.getCodec();
- JsonNode node = oc.readTree(jsonParser);
- return gson.fromJson(node.toString(), JsonObject.class);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
deleted file mode 100644
index 5fca25ad0..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class ResponseBuilder {
-
- public static final String TIMESTAMP = "timestamp";
- public static final String MESSAGE = "message";
- public static final String SIMULATOR_STATUS = "simulatorStatus";
- public static final String REMAINING_TIME = "remainingTime";
-
- private HttpStatus httpStatus;
- private Map<String, Object> body = new LinkedHashMap<>();
-
- private ResponseBuilder(HttpStatus httpStatus) {
- this.httpStatus = httpStatus;
- }
-
- public static ResponseBuilder status(HttpStatus httpStatus) {
-
- return new ResponseBuilder(httpStatus);
- }
-
- public ResponseBuilder put(String key, Object value) {
-
- body.put(key, value);
- return this;
- }
-
- public ResponseEntity build() {
-
- if (body.isEmpty()) {
- return ResponseEntity.status(httpStatus).build();
- }
-
- return ResponseEntity.status(httpStatus).body(body);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/DBTemplateReader.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/DBTemplateReader.java
deleted file mode 100644
index 6c1125434..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/DBTemplateReader.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import java.io.IOException;
-import org.onap.pnfsimulator.template.Template;
-import org.onap.pnfsimulator.template.TemplateService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class DBTemplateReader implements TemplateReader {
- private final TemplateService service;
- private final Gson gson;
-
- @Autowired
- public DBTemplateReader(TemplateService service, Gson gson) {
- this.service = service;
- this.gson = gson;
- }
-
- @Override
- public JsonObject readTemplate(String templateName) throws IOException {
- Template template = service.get(templateName).orElseThrow(() -> new IOException("Template does not exist"));
- JsonElement jsonElement = gson.toJsonTree(template.getContent());
- return jsonElement.getAsJsonObject();
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/EventNotFoundException.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/EventNotFoundException.java
deleted file mode 100644
index 4f43d8c49..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/EventNotFoundException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-public class EventNotFoundException extends RuntimeException {
- private static final String NOT_FOUND = "Not found an event with id: ";
- public EventNotFoundException(String eventId) {
- super(NOT_FOUND + eventId);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/FilesystemTemplateReader.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/FilesystemTemplateReader.java
deleted file mode 100644
index a405a2e1e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/FilesystemTemplateReader.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-class FilesystemTemplateReader implements TemplateReader {
-
- private final Path templatesDir;
- private final Gson gson;
-
- @Autowired
- FilesystemTemplateReader(@Value("${templates.dir}") String templatesDir, Gson gson) {
- this.templatesDir = Paths.get(templatesDir);
- this.gson = gson;
- }
-
- public JsonObject readTemplate(String templateFileName) throws IOException {
- Path absTemplateFilePath = templatesDir.resolve(templateFileName);
- try (Stream<String> lines = Files.lines(absTemplateFilePath)) {
- String content = lines.collect(Collectors.joining("\n"));
- return gson.fromJson(content, JsonObject.class);
- }
- }
-}
-
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProviderImpl.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProviderImpl.java
deleted file mode 100644
index 16c0a0ee7..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/IncrementProviderImpl.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import org.onap.pnfsimulator.event.EventData;
-import org.onap.pnfsimulator.event.EventDataRepository;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class IncrementProviderImpl implements IncrementProvider {
- private final EventDataRepository repository;
-
- @Autowired
- public IncrementProviderImpl(EventDataRepository repository) {
- this.repository = repository;
- }
-
- @Override
- public int getAndIncrement(String id) {
- EventData eventData = repository.findById(id)
- .orElseThrow(() -> new EventNotFoundException(id));
- int value = eventData.getIncrementValue() + 1;
- eventData.setIncrementValue(value);
- repository.save(eventData);
- return value;
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/JsonTokenProcessor.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/JsonTokenProcessor.java
deleted file mode 100644
index da0026a19..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/JsonTokenProcessor.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonToken;
-import com.google.gson.stream.JsonWriter;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.util.Arrays;
-import java.util.stream.Collectors;
-
-public enum JsonTokenProcessor {
- STRING(JsonToken.STRING) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- String originalString = reader.nextString();
- if (keywordsExtractor.isPrimitive(originalString)) {
- writer.value(keywordsExtractor.substitutePrimitiveKeyword(originalString));
- } else {
- String possibleSubstitution = Arrays.stream(originalString.split(" "))
- .map(singleWord -> keywordsExtractor.substituteStringKeyword(singleWord, incrementValue)).collect(
- Collectors.joining(" "));
- writer.value(originalString.equals(possibleSubstitution) ? originalString : possibleSubstitution);
- }
- }
- },
- BEGIN_ARRAY(JsonToken.BEGIN_ARRAY) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- reader.beginArray();
- writer.beginArray();
- }
- },
- END_ARRAY(JsonToken.END_ARRAY) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- reader.endArray();
- writer.endArray();
- }
- },
- BEGIN_OBJECT(JsonToken.BEGIN_OBJECT) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- reader.beginObject();
- writer.beginObject();
- }
- },
- END_OBJECT(JsonToken.END_OBJECT) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- reader.endObject();
- writer.endObject();
- }
- },
- NAME(JsonToken.NAME) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- writer.name(reader.nextName());
- }
- },
- NUMBER(JsonToken.NUMBER) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- writer.value(new BigDecimal(reader.nextString()));
- }
- },
- BOOLEAN(JsonToken.BOOLEAN) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- writer.value(reader.nextBoolean());
- }
- },
- NULL(JsonToken.NULL) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- reader.nextNull();
- writer.nullValue();
- }
- },
- END_DOCUMENT(JsonToken.END_DOCUMENT) {
- @Override
- void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor)
- throws IOException {
- // do nothing
- }
- };
-
- private JsonToken jsonToken;
-
- JsonTokenProcessor(JsonToken jsonToken) {
- this.jsonToken = jsonToken;
- }
-
- boolean isProcessorFor(JsonToken jsonToken) {
- return this.jsonToken == jsonToken;
- }
-
- abstract void process(JsonReader reader, JsonWriter writer, int incrementValue, KeywordsExtractor keywordsExtractor) throws IOException;
-
- private static final String INVALID_JSON_BODY_UNSUPPORTED_JSON_TOKEN = "Invalid json body. Unsupported JsonToken.";
-
- static JsonTokenProcessor getProcessorFor(JsonToken jsonToken) throws IOException {
- return Arrays.stream(JsonTokenProcessor.values()).filter(processor -> processor.isProcessorFor(jsonToken)).findFirst()
- .orElseThrow(() -> new IOException(INVALID_JSON_BODY_UNSUPPORTED_JSON_TOKEN));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsExtractor.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsExtractor.java
deleted file mode 100644
index 23c383f37..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsExtractor.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator;
-
-import static io.vavr.API.$;
-import static io.vavr.API.Case;
-import static io.vavr.API.Match;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getEpochSecond;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getRandomLimitedInteger;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getRandomInteger;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getRandomLimitedString;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getRandomString;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getRandomPrimitiveInteger;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.getTimestampPrimitive;
-import static org.onap.pnfsimulator.simulator.keywords.NonParameterKeywordPatterns.$nonParameterKeyword;
-import static org.onap.pnfsimulator.simulator.keywords.SingleParameterKeywordPatterns.$singleParameterKeyword;
-import static org.onap.pnfsimulator.simulator.keywords.TwoParameterKeywordPatterns.$twoParameterKeyword;
-import io.vavr.API.Match.Pattern1;
-import org.onap.pnfsimulator.simulator.keywords.Keyword;
-import org.onap.pnfsimulator.simulator.keywords.NonParameterKeyword;
-import org.onap.pnfsimulator.simulator.keywords.SingleParameterKeyword;
-import org.onap.pnfsimulator.simulator.keywords.TwoParameterKeyword;
-import org.springframework.stereotype.Component;
-
-@Component
-public class KeywordsExtractor {
-
- String substituteStringKeyword(String text, int increment) {
- return Match(text).of(
- Case(isRandomStringParamKeyword(),
- spk -> spk.substituteKeyword(getRandomString().apply(spk.getAdditionalParameter()))),
- Case(isRandomStringNonParamKeyword(),
- npk -> npk.substituteKeyword(getRandomLimitedString().apply())),
- Case(isRandomIntegerParamKeyword(),
- tpk -> tpk.substituteKeyword(getRandomInteger().apply(tpk.getAdditionalParameter1(), tpk.getAdditionalParameter2()))),
- Case(isRandomIntegerNonParamKeyword(),
- npk -> npk.substituteKeyword(getRandomLimitedInteger().apply())),
- Case(isIncrementKeyword(),
- ik -> ik.substituteKeyword(String.valueOf(increment))),
- Case(isTimestampNonParamKeyword(),
- npk -> npk.substituteKeyword(getEpochSecond().apply())),
- Case(
- $(),
- () -> text
- ));
- }
-
- Long substitutePrimitiveKeyword(String text) {
- return Match(text).of(
- Case(isRandomPrimitiveIntegerParamKeyword(),
- tpk ->
- getRandomPrimitiveInteger().apply(tpk.getAdditionalParameter1(), tpk.getAdditionalParameter2())),
- Case(isTimestampPrimitiveNonParamKeyword(),
- tpk ->
- getTimestampPrimitive().apply()),
- Case(
- $(),
- () -> 0L
- ));
- }
-
- boolean isPrimitive(String text) {
- return Match(text).of(
- Case(isRandomPrimitiveIntegerParamKeyword(), () -> true),
- Case(isTimestampPrimitiveNonParamKeyword(), () -> true),
- Case($(), () -> false));
- }
-
- private Pattern1<String, SingleParameterKeyword> isRandomStringParamKeyword() {
- return $singleParameterKeyword($(spk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(spk, "RandomString")));
- }
-
- private Pattern1<String, NonParameterKeyword> isRandomStringNonParamKeyword() {
- return $nonParameterKeyword($(npk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(npk, "RandomString")));
- }
-
- private Pattern1<String, NonParameterKeyword> isIncrementKeyword() {
- return $nonParameterKeyword($(npk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(npk, "Increment")));
- }
-
- private Pattern1<String, TwoParameterKeyword> isRandomIntegerParamKeyword() {
- return $twoParameterKeyword($(tpk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(tpk, "RandomInteger")));
- }
-
- private Pattern1<String, TwoParameterKeyword> isRandomPrimitiveIntegerParamKeyword() {
- return $twoParameterKeyword($(tpk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(tpk, "RandomPrimitiveInteger")));
- }
-
- private Pattern1<String, NonParameterKeyword> isTimestampPrimitiveNonParamKeyword() {
- return $nonParameterKeyword($(npk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(npk, "TimestampPrimitive")));
- }
-
- private Pattern1<String, NonParameterKeyword> isRandomIntegerNonParamKeyword() {
- return $nonParameterKeyword($(npk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(npk, "RandomInteger")));
- }
-
- private Pattern1<String, NonParameterKeyword> isTimestampNonParamKeyword() {
- return $nonParameterKeyword($(npk -> Keyword.IS_MATCHING_KEYWORD_NAME.apply(npk, "Timestamp")));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsHandler.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsHandler.java
deleted file mode 100644
index 51e0c1f16..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsHandler.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonToken;
-import com.google.gson.stream.JsonWriter;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.StringWriter;
-import org.springframework.stereotype.Component;
-
-@Component
-public class KeywordsHandler {
-
- private KeywordsExtractor keywordsExtractor;
- private IncrementProvider incrementProvider;
-
- public KeywordsHandler(KeywordsExtractor keywordsExtractor, IncrementProvider incrementProvider) {
- this.keywordsExtractor = keywordsExtractor;
- this.incrementProvider = incrementProvider;
- }
-
- public JsonElement substituteKeywords(JsonElement jsonBody, String jobId) {
- int counter = incrementProvider.getAndIncrement(jobId);
- try (
- JsonReader reader = new JsonReader(new StringReader(jsonBody.toString()));
- StringWriter stringWriter = new StringWriter();
- JsonWriter jsonWriter = new JsonWriter(stringWriter);
- ) {
- modify(reader, jsonWriter, counter);
- return new Gson().fromJson(stringWriter.getBuffer().toString(), JsonElement.class);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- private void modify(JsonReader reader, JsonWriter writer, int incrementValue) throws IOException {
- JsonTokenProcessor jsonTokenProcessor;
- do {
- JsonToken token = reader.peek();
- jsonTokenProcessor = JsonTokenProcessor.getProcessorFor(token);
- jsonTokenProcessor.process(reader, writer, incrementValue, keywordsExtractor);
- } while (isJsonProcessingFinished(jsonTokenProcessor));
- }
-
- private boolean isJsonProcessingFinished(JsonTokenProcessor jsonTokenProcessor) {
- return !jsonTokenProcessor.isProcessorFor(JsonToken.END_DOCUMENT);
- }
-
-}
-
-
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsValueProvider.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsValueProvider.java
deleted file mode 100644
index 3bcfa5bca..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/KeywordsValueProvider.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator;
-
-import io.vavr.Function0;
-import io.vavr.Function1;
-import io.vavr.Function2;
-
-import java.time.Instant;
-import java.util.Random;
-
-import org.apache.commons.lang3.RandomStringUtils;
-
-class KeywordsValueProvider {
-
- private KeywordsValueProvider() {
- }
-
- static final int DEFAULT_STRING_LENGTH = 20;
- public static final int RANDOM_INTEGER_MAX_LIMITATION = 9;
- public static final int RANDOM_INTEGER_MIN_LIMITATION = 0;
-
- private static Function2<Integer, Integer, Integer> bigger = (left, right) -> left >= right ? left : right;
- private static Function2<Integer, Integer, Integer> smaller = (left, right) -> left < right ? left : right;
- private static Function2<Integer, Integer, Integer> randomPrimitiveIntegerFromSortedRange = (min, max) -> new Random().nextInt(max - min + 1) + min;
- private static Function2<Integer, Integer, String> randomIntegerFromSortedRange = (min, max) -> Integer.toString(new Random().nextInt(max - min + 1) + min);
-
- private static Function1<Integer, String> randomString = RandomStringUtils::randomAscii;
- private static Function2<Integer, Integer, String> randomInteger = (left, right) -> randomIntegerFromSortedRange.apply(smaller.apply(left, right), bigger.apply(left, right));
- private static Function0<String> randomLimitedInteger = () -> randomInteger.apply(RANDOM_INTEGER_MIN_LIMITATION, RANDOM_INTEGER_MAX_LIMITATION);
- private static Function0<String> randomLimitedString = () -> RandomStringUtils.randomAscii(DEFAULT_STRING_LENGTH);
- private static Function0<String> epochSecond = () -> Long.toString(Instant.now().getEpochSecond());
- private static Function2<Integer, Integer, Long> randomPrimitiveInteger = (left, right) -> randomPrimitiveIntegerFromSortedRange.apply(smaller.apply(left, right), bigger.apply(left, right)).longValue();
- private static Function0<Long> timestampPrimitive = () -> Instant.now().getEpochSecond();
-
- public static Function1<Integer, String> getRandomString() {
- return randomString;
- }
-
- public static Function2<Integer, Integer, String> getRandomInteger() {
- return randomInteger;
- }
-
- public static Function0<String> getRandomLimitedInteger() {
- return randomLimitedInteger;
- }
-
- public static Function0<String> getRandomLimitedString() {
- return randomLimitedString;
- }
-
- public static Function0<String> getEpochSecond() {
- return epochSecond;
- }
-
- public static Function2<Integer, Integer, Long> getRandomPrimitiveInteger() {
- return randomPrimitiveInteger;
- }
-
- public static Function0<Long> getTimestampPrimitive() {
- return timestampPrimitive;
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorService.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorService.java
deleted file mode 100644
index 704905584..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorService.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.common.base.Strings;
-import com.google.gson.JsonObject;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.util.Optional;
-import org.onap.pnfsimulator.event.EventData;
-import org.onap.pnfsimulator.event.EventDataService;
-import org.onap.pnfsimulator.rest.model.FullEvent;
-import org.onap.pnfsimulator.rest.model.SimulatorParams;
-import org.onap.pnfsimulator.rest.model.SimulatorRequest;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapterImpl;
-import org.onap.pnfsimulator.simulator.scheduler.EventScheduler;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfig;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfigService;
-import org.quartz.SchedulerException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-@Service
-public class SimulatorService {
-
- private final TemplatePatcher templatePatcher;
- private final TemplateReader templateReader;
- private final EventDataService eventDataService;
- private final EventScheduler eventScheduler;
- private SimulatorConfigService simulatorConfigService;
- private static final JsonObject EMPTY_JSON_OBJECT = new JsonObject();
-
- @Autowired
- public SimulatorService(TemplatePatcher templatePatcher, TemplateReader templateReader,
- EventScheduler eventScheduler, EventDataService eventDataService,
- SimulatorConfigService simulatorConfigService) {
- this.templatePatcher = templatePatcher;
- this.templateReader = templateReader;
- this.eventDataService = eventDataService;
- this.eventScheduler = eventScheduler;
- this.simulatorConfigService = simulatorConfigService;
- }
-
- public String triggerEvent(SimulatorRequest simulatorRequest) throws IOException, SchedulerException {
- String templateName = simulatorRequest.getTemplateName();
- SimulatorParams simulatorParams = simulatorRequest.getSimulatorParams();
- JsonObject template = templateReader.readTemplate(templateName);
- JsonObject input = Optional.ofNullable(simulatorRequest.getPatch()).orElse(new JsonObject());
- JsonObject patchedJson = templatePatcher
- .mergeTemplateWithPatch(template, input);
- JsonObject keywords = new JsonObject();
-
- EventData eventData = eventDataService.persistEventData(template, patchedJson, input, keywords);
-
- String targetVesUrl = getDefaultUrlIfNotProvided(simulatorParams.getVesServerUrl());
- return eventScheduler
- .scheduleEvent(targetVesUrl, Optional.ofNullable(simulatorParams.getRepeatInterval()).orElse(1),
- Optional.ofNullable(simulatorParams.getRepeatCount()).orElse(1), simulatorRequest.getTemplateName(),
- eventData.getId(),
- patchedJson);
- }
-
- public void triggerOneTimeEvent(FullEvent event) throws MalformedURLException {
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), id -> 1);
- JsonObject withKeywordsSubstituted = keywordsHandler.substituteKeywords(event.getEvent(), "").getAsJsonObject();
-
- HttpClientAdapter client = createHttpClientAdapter(event.getVesServerUrl());
- eventDataService.persistEventData(EMPTY_JSON_OBJECT, withKeywordsSubstituted, event.getEvent(), EMPTY_JSON_OBJECT);
-
- client.send(withKeywordsSubstituted.toString());
- }
-
- public SimulatorConfig getConfiguration() {
- return simulatorConfigService.getConfiguration();
- }
-
- public SimulatorConfig updateConfiguration(SimulatorConfig newConfig) {
- return simulatorConfigService.updateConfiguration(newConfig);
- }
-
- public boolean cancelAllEvents() throws SchedulerException {
- return eventScheduler.cancelAllEvents();
- }
-
- public boolean cancelEvent(String jobName) throws SchedulerException {
- return eventScheduler.cancelEvent(jobName);
- }
-
- HttpClientAdapter createHttpClientAdapter(String vesServerUrl) throws MalformedURLException {
- String targetVesUrl = getDefaultUrlIfNotProvided(vesServerUrl);
- return new HttpClientAdapterImpl(targetVesUrl);
- }
-
- private String getDefaultUrlIfNotProvided(String vesUrlSimulatorParam) {
- return Strings.isNullOrEmpty(vesUrlSimulatorParam)
- ? simulatorConfigService.getConfiguration().getVesServerUrl().toString() : vesUrlSimulatorParam;
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplatePatcher.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplatePatcher.java
deleted file mode 100644
index 1114d3c1b..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplatePatcher.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import org.springframework.stereotype.Component;
-
-import java.util.Map;
-
-@Component
-class TemplatePatcher {
-
- JsonObject mergeTemplateWithPatch(JsonObject templateJson, JsonObject patchJson) {
- JsonObject template = templateJson.deepCopy();
- patchTemplateNode(template, patchJson);
- return template;
- }
-
- private void patchTemplateNode(JsonObject templateJson, JsonObject patchJson) {
- for (Map.Entry<String, JsonElement> stringJsonElementEntry : patchJson.entrySet()) {
- String patchKey = stringJsonElementEntry.getKey();
- JsonElement patchValue = stringJsonElementEntry.getValue();
- JsonElement templateElement = templateJson.get(patchKey);
-
- if (!patchValue.isJsonObject() || templateElement == null || !templateElement.isJsonObject()) {
- templateJson.remove(patchKey);
- templateJson.add(patchKey, patchValue);
- } else {
- patchTemplateNode(templateElement.getAsJsonObject(), patchValue.getAsJsonObject());
- }
-
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplateReader.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplateReader.java
deleted file mode 100644
index bf06381d3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/TemplateReader.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.JsonObject;
-import java.io.IOException;
-
-public interface TemplateReader {
- JsonObject readTemplate(String templateName) throws IOException;
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
deleted file mode 100644
index 6b0761975..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.MalformedURLException;
-import java.util.UUID;
-
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.util.EntityUtils;
-import org.onap.pnfsimulator.simulator.client.utils.ssl.SslSupportLevel;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-
-public class HttpClientAdapterImpl implements HttpClientAdapter {
-
- public static final int CONNECTION_TIMEOUT = 1000;
- private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientAdapterImpl.class);
- private static final String CONTENT_TYPE = "Content-Type";
- private static final String APPLICATION_JSON = "application/json";
- private static final RequestConfig CONFIG = RequestConfig.custom()
- .setConnectTimeout(CONNECTION_TIMEOUT)
- .setConnectionRequestTimeout(CONNECTION_TIMEOUT)
- .setSocketTimeout(CONNECTION_TIMEOUT)
- .build();
- private static final Marker INVOKE = MarkerFactory.getMarker("INVOKE");
- private HttpClient client;
- private final String targetUrl;
-
- public HttpClientAdapterImpl(String targetUrl) throws MalformedURLException {
- this.client = SslSupportLevel.getSupportLevelBasedOnProtocol(targetUrl).getClient(CONFIG);
- this.targetUrl = targetUrl;
- }
-
- HttpClientAdapterImpl(HttpClient client, String targetUrl) {
- this.client = client;
- this.targetUrl = targetUrl;
- }
-
- @Override
- public void send(String content) {
- try {
- HttpPost request = createRequest(content);
- HttpResponse response = client.execute(request);
-
- //response has to be fully consumed otherwise apache won't release connection
- EntityUtils.consumeQuietly(response.getEntity());
- LOGGER.info(INVOKE, "Message sent, ves response code: {}", response.getStatusLine());
- } catch (IOException e) {
- LOGGER.warn("Error sending message to ves: " + e.getMessage(), e.getCause());
- }
- }
-
- private HttpPost createRequest(String content) throws UnsupportedEncodingException {
- HttpPost request = new HttpPost(this.targetUrl);
- StringEntity stringEntity = new StringEntity(content);
- request.addHeader(CONTENT_TYPE, APPLICATION_JSON);
- request.addHeader(X_ONAP_REQUEST_ID, MDC.get(REQUEST_ID));
- request.addHeader(X_INVOCATION_ID, UUID.randomUUID().toString());
- request.setEntity(stringEntity);
- return request;
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevel.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevel.java
deleted file mode 100644
index 29416341d..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevel.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client.utils.ssl;
-
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.conn.ssl.TrustAllStrategy;
-import org.apache.http.conn.ssl.TrustStrategy;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.ssl.SSLContextBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.net.ssl.SSLContext;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-
-public enum SslSupportLevel {
-
- NONE {
- public HttpClient getClient(RequestConfig requestConfig) {
- LOGGER.info("<!-----IN SslSupportLevel.NONE, Creating BasicHttpClient for http protocol----!>");
- return HttpClientBuilder
- .create()
- .setDefaultRequestConfig(requestConfig)
- .build();
- }
- },
- ALWAYS_TRUST {
- public HttpClient getClient(RequestConfig requestConfig) {
- LoggerFactory.getLogger(SslSupportLevel.class).info("<!-----IN SslSupportLevel.ALWAYS_TRUST, Creating client with SSL support for https protocol----!>");
- HttpClient client;
- try {
- SSLContext alwaysTrustSslContext = SSLContextBuilder.create().loadTrustMaterial(TRUST_STRATEGY_ALWAYS).build();
- client = HttpClients.custom()
- .setSSLContext(alwaysTrustSslContext)
- .setSSLHostnameVerifier(new NoopHostnameVerifier())
- .setDefaultRequestConfig(requestConfig)
- .build();
-
- } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) {
- LOGGER.error("Could not initialize client due to SSL exception: {}. Default client without SSL support will be used instead.\nCause: {}", e.getMessage(), e.getCause());
- client = NONE.getClient(requestConfig);
- }
- return client;
- }
- };
-
- private static final Logger LOGGER = LoggerFactory.getLogger(SslSupportLevel.class);
- private static final TrustStrategy TRUST_STRATEGY_ALWAYS = new TrustAllStrategy();
-
- public static SslSupportLevel getSupportLevelBasedOnProtocol(String url) throws MalformedURLException {
- return "https".equals(new URL(url).getProtocol()) ? SslSupportLevel.ALWAYS_TRUST : SslSupportLevel.NONE;
- }
-
- public abstract HttpClient getClient(RequestConfig requestConfig);
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/Keyword.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/Keyword.java
deleted file mode 100644
index edafe8f04..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/Keyword.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.keywords;
-
-import io.vavr.Function1;
-import io.vavr.Function2;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.regex.Matcher;
-import java.util.stream.Collectors;
-import lombok.Getter;
-
-@Getter
-public class Keyword {
-
- protected static final String LETTERS_REGEX = "([a-zA-Z]+)";
- protected static final String NONLETTERS_REGEX = "([^a-zA-Z]+)";
-
- protected static final Function1<String, String> OPTIONAL =
- (regex) -> regex + "?";
-
- private final String name;
- private final List<String> meaningfulParts;
-
- public static final Function2<Keyword, String, Boolean> IS_MATCHING_KEYWORD_NAME = (keyword, key) ->
- keyword != null && keyword.getName() != null && keyword.getName().equals(key);
-
- /**
- * Returns list of independent parts inside the keyword. Current implementation assumes that customer can join keywords with integer values, so
- * keyword is decomposed to parts then some parts of the keyword is skipped because of replacement process.
- *
- * @param matcher - Matcher to check find independent groups inside the keyword
- * @param skipGroups Informs this method about which groups should be consider as part of the replacement process
- * @return list of independent parts inside the keywords
- */
- static List<String> extractPartsFrom(Matcher matcher, List skipGroups) {
- List<String> parts = new ArrayList<String>();
- for (int i = 1; i <= matcher.groupCount(); i++) {
- if (matcher.group(i) != null && !skipGroups.contains(i)) {
- parts.add(matcher.group(i));
- }
- }
- return parts;
- }
-
- Keyword(String name, List<String> meaningfulParts) {
- this.name = name;
- this.meaningfulParts = meaningfulParts;
- }
-
- public String substituteKeyword(String substitution) {
- return meaningfulParts.stream()
- .map(part -> part.equals(name) ? substitution : part)
- .collect(Collectors.joining());
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/NonParameterKeyword.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/NonParameterKeyword.java
deleted file mode 100644
index 5e44550bc..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/NonParameterKeyword.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.keywords;
-
-import io.vavr.Tuple;
-import io.vavr.Tuple1;
-import io.vavr.match.annotation.Patterns;
-import io.vavr.match.annotation.Unapply;
-import java.util.Collections;
-import java.util.List;
-import java.util.regex.Pattern;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.val;
-
-@Patterns
-@Getter
-@Setter
-public class NonParameterKeyword extends Keyword {
-
- public static final int KEYWORD_NAME_GROUP = 2;
-
- private static final String KEYWORD_REGEX = new StringBuilder()
- .append(OPTIONAL.apply(NONLETTERS_REGEX))
- .append("#")
- .append(LETTERS_REGEX)
- .append("(?!\\()")
- .append(OPTIONAL.apply(NONLETTERS_REGEX))
- .toString();
-
- private NonParameterKeyword(String name, List<String> meaningfulParts) {
- super(name, meaningfulParts);
- }
-
- @Unapply
- static Tuple1<NonParameterKeyword> nonParameterKeyword(String keyword) {
- val matcher = Pattern.compile(KEYWORD_REGEX).matcher(keyword);
- NonParameterKeyword npk = null;
- if (matcher.find()) {
- npk = new NonParameterKeyword(
- matcher.group(KEYWORD_NAME_GROUP),
- extractPartsFrom(matcher, Collections.emptyList())
- );
- }
- return Tuple.of(npk);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/SingleParameterKeyword.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/SingleParameterKeyword.java
deleted file mode 100644
index b1c38c883..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/SingleParameterKeyword.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.keywords;
-
-import io.vavr.Tuple;
-import io.vavr.Tuple1;
-import io.vavr.match.annotation.Patterns;
-import io.vavr.match.annotation.Unapply;
-import java.util.Collections;
-import java.util.List;
-import java.util.regex.Pattern;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.val;
-
-@Patterns
-@Getter
-@Setter
-public class SingleParameterKeyword extends Keyword {
-
- public static final int KEYWORD_NAME_GROUP = 2;
- public static final int ADDITIONAL_PARAMETER_GROUP = 3;
-
- private static final String KEYWORD_REGEX = new StringBuilder()
- .append(OPTIONAL.apply(NONLETTERS_REGEX))
- .append("#")
- .append(LETTERS_REGEX)
- .append("\\((\\d+)\\)")
- .append(OPTIONAL.apply(NONLETTERS_REGEX))
- .toString();
- public static final int SKIPPED_GROUP_NUMBER = 3;
-
- private Integer additionalParameter;
-
- private SingleParameterKeyword(String name, List<String> meaningfulParts,
- Integer additionalParameter) {
- super(name, meaningfulParts);
- this.additionalParameter = additionalParameter;
- }
-
- @Unapply
- static Tuple1<SingleParameterKeyword> singleParameterKeyword(String keyword) {
- val matcher = Pattern.compile(KEYWORD_REGEX).matcher(keyword);
- SingleParameterKeyword spk = null;
- if (matcher.find()) {
- spk = new SingleParameterKeyword(
- matcher.group(KEYWORD_NAME_GROUP),
- extractPartsFrom(matcher, Collections.singletonList(SKIPPED_GROUP_NUMBER)),
- Integer.parseInt(matcher.group(ADDITIONAL_PARAMETER_GROUP))
- );
- }
- return Tuple.of(spk);
- }
-}
-
-
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/TwoParameterKeyword.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/TwoParameterKeyword.java
deleted file mode 100644
index 6fecfa63b..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/keywords/TwoParameterKeyword.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.keywords;
-
-import io.vavr.Tuple;
-import io.vavr.Tuple1;
-import io.vavr.match.annotation.Patterns;
-import io.vavr.match.annotation.Unapply;
-import java.util.Arrays;
-import java.util.List;
-import java.util.regex.Pattern;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.val;
-
-@Patterns
-@Getter
-@Setter
-public class TwoParameterKeyword extends Keyword {
-
- public static final int ADDITIONAL_PARAMETER_1_GROUP = 3;
- public static final int ADDITIONAL_PARAMETER_2_GROUP = 4;
- public static final int KEYWORD_NAME_GROUP = 2;
- protected static final List<Integer> ADDITIONAL_PARAMETERS_GROUPS = Arrays.asList(ADDITIONAL_PARAMETER_1_GROUP, ADDITIONAL_PARAMETER_2_GROUP);
-
- private static final String NON_LIMITED_NUMBER_REGEX = "\\((\\d+)";
- private static final String COLON_REGEX = "\\s?,\\s?";
- private static final String OPTIONAL_NUMBER_PARAM_REGEX = "(\\d+)\\)";
-
- private static final String KEYWORD_REGEX = OPTIONAL.apply(NONLETTERS_REGEX)
- + "#"
- + LETTERS_REGEX
- + NON_LIMITED_NUMBER_REGEX
- + COLON_REGEX
- + OPTIONAL_NUMBER_PARAM_REGEX
- + OPTIONAL.apply(NONLETTERS_REGEX);
-
- private Integer additionalParameter1;
- private Integer additionalParameter2;
-
- private TwoParameterKeyword(String name, List<String> meaningfulParts, Integer additionalParameter1,
- Integer additionalParameter2) {
- super(name, meaningfulParts);
- this.additionalParameter1 = additionalParameter1;
- this.additionalParameter2 = additionalParameter2;
- }
-
- @Unapply
- static Tuple1<TwoParameterKeyword> twoParameterKeyword(String keyword) {
- val matcher = Pattern.compile(KEYWORD_REGEX).matcher(keyword);
- TwoParameterKeyword tpk = null;
- if (matcher.find()) {
- tpk = new TwoParameterKeyword(
- matcher.group(KEYWORD_NAME_GROUP),
- extractPartsFrom(matcher, ADDITIONAL_PARAMETERS_GROUPS),
- Integer.parseInt(matcher.group(ADDITIONAL_PARAMETER_1_GROUP)),
- Integer.parseInt(matcher.group(ADDITIONAL_PARAMETER_2_GROUP))
- );
- }
- return Tuple.of(tpk);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventJob.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventJob.java
deleted file mode 100644
index 52d076fad..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventJob.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.scheduler;
-
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import org.onap.pnfsimulator.simulator.KeywordsHandler;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapterImpl;
-import org.quartz.Job;
-import org.quartz.JobDataMap;
-import org.quartz.JobExecutionContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.net.MalformedURLException;
-import java.util.Optional;
-
-public class EventJob implements Job {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(EventJob.class);
-
- static final String TEMPLATE_NAME = "TEMPLATE_NAME";
- static final String VES_URL = "VES_URL";
- static final String BODY = "BODY";
- static final String CLIENT_ADAPTER = "CLIENT_ADAPTER";
- static final String KEYWORDS_HANDLER = "KEYWORDS_HANDLER";
- static final String EVENT_ID = "EVENT_ID";
-
- @Override
- public void execute(JobExecutionContext jobExecutionContext) {
- JobDataMap jobDataMap = jobExecutionContext.getJobDetail().getJobDataMap();
- String templateName = jobDataMap.getString(TEMPLATE_NAME);
- String vesUrl = jobDataMap.getString(VES_URL);
- JsonObject body = (JsonObject) jobDataMap.get(BODY);
- String id = jobDataMap.getString(EVENT_ID);
- Optional<HttpClientAdapter> httpClientAdapter = getHttpClientAdapter(jobDataMap, vesUrl);
-
- if (httpClientAdapter.isPresent()) {
- KeywordsHandler keywordsHandler = (KeywordsHandler) jobDataMap.get(KEYWORDS_HANDLER);
- JsonElement processedBody = keywordsHandler.substituteKeywords(body, id);
- String processedBodyString = processedBody.toString();
- String jobKey = jobExecutionContext.getJobDetail().getKey().toString();
-
- logEventDetails(templateName, vesUrl, body.toString(), jobKey);
- httpClientAdapter.get().send(processedBodyString);
- } else {
- LOGGER.error("Could not send event as client is not available");
- }
- }
- private Optional<HttpClientAdapter> getHttpClientAdapter(JobDataMap jobDataMap, String vesUrl) {
- HttpClientAdapter adapter = null;
- try {
- adapter = (HttpClientAdapter) jobDataMap
- .getOrDefault(CLIENT_ADAPTER, new HttpClientAdapterImpl(vesUrl));
- } catch (MalformedURLException e) {
- LOGGER.error("Invalid format of vesServerUr: {}", vesUrl);
- }
- return Optional.ofNullable(adapter);
- }
-
- private void logEventDetails(String templateName, String vesUrl, String body, String jobKey) {
- LOGGER.info(String.format("Job %s:Sending event to %s from template %s",
- jobKey, vesUrl, templateName));
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(String.format("Job %s: Request body %s", jobKey, body));
- }
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventScheduler.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventScheduler.java
deleted file mode 100644
index 1b1746fa5..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/EventScheduler.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.scheduler;
-
-
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.BODY;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.CLIENT_ADAPTER;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.EVENT_ID;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.KEYWORDS_HANDLER;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.TEMPLATE_NAME;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.VES_URL;
-import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
-
-import com.google.gson.JsonObject;
-
-import java.net.MalformedURLException;
-import java.util.List;
-import java.util.Optional;
-import java.util.stream.Collectors;
-import org.onap.pnfsimulator.simulator.KeywordsHandler;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapterImpl;
-import org.quartz.JobBuilder;
-import org.quartz.JobDataMap;
-import org.quartz.JobDetail;
-import org.quartz.JobExecutionContext;
-import org.quartz.JobKey;
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.SimpleTrigger;
-import org.quartz.TriggerBuilder;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class EventScheduler {
-
-
- private final Scheduler scheduler;
- private final KeywordsHandler keywordsHandler;
-
- @Autowired
- public EventScheduler(Scheduler scheduler, KeywordsHandler keywordsHandler) {
- this.scheduler = scheduler;
- this.keywordsHandler = keywordsHandler;
- }
-
- public String scheduleEvent(String vesUrl, Integer repeatInterval, Integer repeatCount,
- String templateName, String eventId, JsonObject body)
- throws SchedulerException, MalformedURLException {
-
- JobDetail jobDetail = createJobDetail(vesUrl, templateName, eventId, body);
- SimpleTrigger trigger = createTrigger(repeatInterval, repeatCount);
-
- scheduler.scheduleJob(jobDetail, trigger);
- return jobDetail.getKey().getName();
- }
-
- public boolean cancelAllEvents() throws SchedulerException {
- List<JobKey> jobKeys = getActiveJobsKeys();
- return scheduler.deleteJobs(jobKeys);
- }
-
- public boolean cancelEvent(String jobName) throws SchedulerException {
- Optional<JobKey> activeJobKey = getActiveJobsKeys().stream().filter(e -> e.getName().equals(jobName)).findFirst();
- return activeJobKey.isPresent() && scheduler.deleteJob(activeJobKey.get());
- }
-
- private SimpleTrigger createTrigger(int interval, int repeatCount) {
- return TriggerBuilder.newTrigger()
- .withSchedule(simpleSchedule()
- .withIntervalInSeconds(interval)
- .withRepeatCount(repeatCount - 1))
- .build();
- }
-
- private JobDetail createJobDetail(String vesUrl, String templateName, String eventId, JsonObject body) throws MalformedURLException {
- JobDataMap jobDataMap = new JobDataMap();
- jobDataMap.put(TEMPLATE_NAME, templateName);
- jobDataMap.put(VES_URL, vesUrl);
- jobDataMap.put(EVENT_ID, eventId);
- jobDataMap.put(KEYWORDS_HANDLER, keywordsHandler);
- jobDataMap.put(BODY, body);
- jobDataMap.put(CLIENT_ADAPTER, new HttpClientAdapterImpl(vesUrl));
-
- return JobBuilder
- .newJob(EventJob.class)
- .withDescription(templateName)
- .usingJobData(jobDataMap)
- .build();
- }
-
- private List<JobKey> getActiveJobsKeys() throws SchedulerException {
- return scheduler.getCurrentlyExecutingJobs()
- .stream()
- .map(JobExecutionContext::getJobDetail)
- .map(JobDetail::getKey)
- .collect(Collectors.toList());
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/QuartzConfiguration.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/QuartzConfiguration.java
deleted file mode 100644
index 2beb9dc5e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/scheduler/QuartzConfiguration.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.pnfsimulator.simulator.scheduler;
-
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.impl.StdSchedulerFactory;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-class QuartzConfiguration {
-
- @Bean
- Scheduler provideScheduler() throws SchedulerException {
- StdSchedulerFactory stdSchedulerFactory = new StdSchedulerFactory();
- Scheduler scheduler = stdSchedulerFactory.getScheduler();
- scheduler.start();
- return scheduler;
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfig.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfig.java
deleted file mode 100644
index 0baa47796..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfig.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulatorconfig;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-import lombok.ToString;
-import org.springframework.data.annotation.Id;
-import org.springframework.data.mongodb.core.mapping.Field;
-
-import javax.validation.constraints.NotNull;
-import java.net.URL;
-
-@Getter
-@Setter
-@AllArgsConstructor
-@NoArgsConstructor
-@ToString
-public class SimulatorConfig {
-
- @JsonIgnore
- @Id
- private String id;
-
- @NotNull
- @Field("vesServerUrl")
- private URL vesServerUrl;
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigRepository.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigRepository.java
deleted file mode 100644
index 5e63ee493..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigRepository.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulatorconfig;
-
-import org.springframework.data.mongodb.repository.MongoRepository;
-
-public interface SimulatorConfigRepository extends MongoRepository<SimulatorConfig, String> {
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigService.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigService.java
deleted file mode 100644
index 206335117..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigService.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulatorconfig;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import java.util.List;
-
-@Service
-public class SimulatorConfigService {
-
- private final SimulatorConfigRepository repository;
-
- @Autowired
- public SimulatorConfigService(SimulatorConfigRepository repository) {
- this.repository = repository;
- }
-
-
- public SimulatorConfig getConfiguration() {
- List<SimulatorConfig> configs = repository.findAll();
- if (configs.isEmpty()) {
- throw new IllegalStateException("No configuration found in db");
- }
- return configs.get(0);
- }
-
- public SimulatorConfig updateConfiguration(SimulatorConfig configuration) {
- SimulatorConfig currentConfig = getConfiguration();
- currentConfig.setVesServerUrl(configuration.getVesServerUrl());
- return repository.save(currentConfig);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/FsToDbTemplateSynchronizer.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/FsToDbTemplateSynchronizer.java
deleted file mode 100644
index 881585bd6..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/FsToDbTemplateSynchronizer.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.stream.Stream;
-
-import org.bson.json.JsonParseException;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.filesystem.WatcherEventProcessor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Service;
-
-@Service
-public class FsToDbTemplateSynchronizer {
-
- private static final String CANNOT_SYNC = "Cannot synchronize templates. Check whether the proper folder exists.";
- private static final Logger LOGGER = LoggerFactory.getLogger(FsToDbTemplateSynchronizer.class);
-
- private final String templatesDir;
- private final Storage<Template> storage;
-
- @Autowired
- public FsToDbTemplateSynchronizer(@Value("${templates.dir}") String templatesDir,
- Storage<Template> storage) {
- this.templatesDir = templatesDir;
- this.storage = storage;
- }
-
- public void synchronize() {
- try {
- processTemplatesFolder();
- } catch (IOException e) {
- LOGGER.error(CANNOT_SYNC, e);
- }
- }
-
- private void processTemplatesFolder() throws IOException {
- try (Stream<Path> walk = Files.walk(Paths.get(templatesDir))) {
- walk.filter(Files::isRegularFile).forEach(path -> {
- try {
- WatcherEventProcessor.MODIFIED.processEvent(path, storage);
- } catch (IOException | JsonParseException e) {
- LOGGER
- .error("Cannot synchronize template: " + path.getFileName().toString(), e);
- }
- });
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/Template.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/Template.java
deleted file mode 100644
index c84b8d0b7..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/Template.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template;
-
-import java.util.Objects;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import lombok.NoArgsConstructor;
-import lombok.ToString;
-import org.onap.pnfsimulator.db.Row;
-import org.bson.Document;
-import org.onap.pnfsimulator.template.search.JsonUtils;
-import org.springframework.data.mongodb.core.mapping.Field;
-
-@NoArgsConstructor
-@ToString
-public class Template extends Row {
-
- @Field("content")
- private Document content;
-
- @Field("flatContent")
- private Document flatContent;
-
- @Field("lmod")
- private long lmod;
-
- public Template(String name, Document content, long lmod) {
- this.id = name;
- this.content = content;
- this.lmod = lmod;
- this.flatContent = new JsonUtils().flatten(content);
- }
-
- public Template(String name, String template, long lmod) {
- this.id = name;
- this.content = Document.parse(template);
- this.lmod = lmod;
- this.flatContent = new JsonUtils().flatten(this.content);
- }
-
- public void setContent(Document content) {
- this.content = content;
- this.flatContent = new JsonUtils().flatten(content);
- }
-
- public Document getContent() {
- return new Document(this.content);
- }
-
- @JsonIgnore
- public Document getFlatContent() {
- return new Document(this.flatContent);
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- Template template = (Template) o;
- return Objects.equals(content, template.content)
- && Objects.equals(id, template.id)
- && Objects.equals(lmod, template.lmod);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(content, id);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateRepository.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateRepository.java
deleted file mode 100644
index 78c9c77e0..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateRepository.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template;
-
-import org.springframework.data.mongodb.repository.MongoRepository;
-
-public interface TemplateRepository extends MongoRepository<Template, String> {
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateService.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateService.java
deleted file mode 100644
index 3e245e123..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/TemplateService.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template;
-
-import java.util.List;
-import java.util.Optional;
-
-import com.google.gson.JsonObject;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.template.search.TemplateSearchHelper;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Primary;
-import org.springframework.stereotype.Service;
-
-@Primary
-@Service
-public class TemplateService implements Storage<Template> {
-
- private final TemplateRepository templateRepository;
- private TemplateSearchHelper searchHelper;
-
-
- @Autowired
- public TemplateService(TemplateRepository templateRepository, TemplateSearchHelper searchHelper) {
- this.templateRepository = templateRepository;
- this.searchHelper = searchHelper;
- }
-
- @Override
- public List<Template> getAll() {
- return templateRepository.findAll();
- }
-
- @Override
- public Optional<Template> get(String name) {
- return templateRepository.findById(name);
- }
-
- @Override
- public void persist(Template template) {
- templateRepository.save(template);
- }
-
- @Override
- public boolean tryPersistOrOverwrite(Template template, boolean overwrite) {
- if (templateRepository.existsById(template.getId()) && !overwrite) {
- return false;
- }
- templateRepository.save(template);
- return true;
- }
-
- @Override
- public void delete(String templateName) {
- templateRepository.deleteById(templateName);
- }
-
- @Override
- public List<String> getIdsByContentCriteria(JsonObject stringQueryJson) {
- return searchHelper.getIdsOfDocumentMatchingCriteria(stringQueryJson);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/IllegalJsonValueException.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/IllegalJsonValueException.java
deleted file mode 100644
index 6890382ea..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/IllegalJsonValueException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search;
-
-public class IllegalJsonValueException extends IllegalArgumentException {
-
- IllegalJsonValueException(String s) {
- super(s);
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/JsonUtils.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/JsonUtils.java
deleted file mode 100644
index b595b4f36..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/JsonUtils.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search;
-
-import com.google.common.base.Strings;
-import com.google.gson.Gson;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonPrimitive;
-import org.bson.Document;
-
-/**
- * This util flattens nested json and produces json with keys transformed to form of json path
- * where default separator between parent object key and object key is ':'
- * For easing searching of boolean values, they are converted to its string representation
- */
-public class JsonUtils {
-
- private static final String DEFAULT_PARENT_KEY_TO_OBJECT_KEY_SEPARATOR = ":";
- private static final String SEED_PREFIX = "";
- private static final Gson GSON = new Gson();
-
- public JsonObject flatten(JsonObject original) {
- return flattenWithPrefixedKeys(DEFAULT_PARENT_KEY_TO_OBJECT_KEY_SEPARATOR, original.deepCopy(), SEED_PREFIX, new JsonObject());
- }
-
- public JsonObject flatten(String parentKeyToKeySeparator, JsonObject original) {
- return flattenWithPrefixedKeys(parentKeyToKeySeparator, original.deepCopy(), SEED_PREFIX, new JsonObject());
- }
-
- public Document flatten(Document original) {
- return flatten(DEFAULT_PARENT_KEY_TO_OBJECT_KEY_SEPARATOR, original);
- }
-
- public Document flatten(String parentKeyToKeySeparator, Document original) {
- JsonObject originalJsonObject = GSON.fromJson(original.toJson(), JsonObject.class);
- JsonObject flattenedJson = flatten(parentKeyToKeySeparator, originalJsonObject);
- return Document.parse(flattenedJson.toString());
- }
-
- private JsonObject flattenWithPrefixedKeys(String parentKeyToKeySeparator, JsonElement topLevelElem, String prefix, JsonObject acc) {
- if (topLevelElem.isJsonPrimitive()) {
- handleJsonPrimitive(topLevelElem, prefix, acc);
- } else if (topLevelElem.isJsonArray()) {
- handleJsonArray(parentKeyToKeySeparator, topLevelElem, prefix, acc);
- } else if (topLevelElem.isJsonObject()) {
- handleJsonObject(parentKeyToKeySeparator, topLevelElem, prefix, acc);
- } else {
- acc.add(prefix, topLevelElem.getAsJsonNull());
- }
- return acc.deepCopy();
- }
-
- private void handleJsonObject(String parentKeyToKeySeparator, JsonElement topLevelElem, String prefix, JsonObject acc) {
- boolean isEmpty = true;
- JsonObject thisToplevelObj = topLevelElem.getAsJsonObject();
- for (String key : thisToplevelObj.keySet()) {
- isEmpty = false;
- String keyPrefix = String.format("%s%s%s", prefix, parentKeyToKeySeparator, key);
- flattenWithPrefixedKeys(parentKeyToKeySeparator, thisToplevelObj.get(key), keyPrefix, acc);
- }
- if (isEmpty && !Strings.isNullOrEmpty(prefix)) {
- acc.add(prefix, new JsonObject());
- }
- }
-
- private void handleJsonArray(String parentKeyToKeySeparator, JsonElement topLevelElem, String prefix, JsonObject acc) {
- JsonArray asJsonArray = topLevelElem.getAsJsonArray();
- if (asJsonArray.size() == 0) {
- acc.add(prefix, new JsonArray());
- }
- for (int i = 0; i < asJsonArray.size(); i++) {
- flattenWithPrefixedKeys(parentKeyToKeySeparator, asJsonArray.get(i), String.format("%s[%s]", prefix, i), acc);
- }
- }
-
- private void handleJsonPrimitive(JsonElement topLevelElem, String prefix, JsonObject acc) {
- JsonPrimitive jsonPrimitive = topLevelElem.getAsJsonPrimitive();
- if (jsonPrimitive.isBoolean()) {
- acc.add(prefix, new JsonPrimitive(jsonPrimitive.getAsString()));
- } else {
- acc.add(prefix, topLevelElem.getAsJsonPrimitive());
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/TemplateSearchHelper.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/TemplateSearchHelper.java
deleted file mode 100644
index 3f22b1adf..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/TemplateSearchHelper.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search;
-
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import org.onap.pnfsimulator.template.search.handler.PrimitiveValueCriteriaBuilder;
-import org.onap.pnfsimulator.template.search.viewmodel.FlatTemplateContent;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.data.mongodb.core.MongoTemplate;
-import org.springframework.data.mongodb.core.query.Criteria;
-import org.springframework.data.mongodb.core.query.Query;
-import org.springframework.stereotype.Component;
-
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-
-@Component
-public class TemplateSearchHelper {
- private static final String PARENT_TO_CHILD_KEY_SEPARATOR = ":"; //compliant with flat json stored in db
- private static final String FLATTENED_JSON_KEY_REGEX = PARENT_TO_CHILD_KEY_SEPARATOR + "%s(?:(\\[[\\d]+\\]))?$";
- private static final String FLATTENED_TEMPLATES_VIEW = "flatTemplatesView";
-
- private MongoTemplate mongoTemplate;
- private PrimitiveValueCriteriaBuilder criteriaBuilder;
-
- @Autowired
- public TemplateSearchHelper(MongoTemplate mongoTemplate) {
- this.mongoTemplate = mongoTemplate;
- this.criteriaBuilder = new PrimitiveValueCriteriaBuilder();
- }
-
- public List<String> getIdsOfDocumentMatchingCriteria(JsonObject jsonCriteria) {
- if (isNullValuePresentInCriteria(jsonCriteria)) {
- throw new IllegalJsonValueException("Null values in search criteria are not supported.");
- }
- Criteria mongoDialectCriteria = composeCriteria(jsonCriteria);
- Query query = new Query(mongoDialectCriteria);
- List<FlatTemplateContent> flatTemplateContents = mongoTemplate.find(query, FlatTemplateContent.class, FLATTENED_TEMPLATES_VIEW);
- return flatTemplateContents
- .stream()
- .map(FlatTemplateContent::getId)
- .collect(Collectors.toList());
- }
-
-
- private Criteria composeCriteria(JsonObject criteria) {
- Criteria[] criteriaArr = criteria.entrySet()
- .stream()
- .map(this::mapEntryCriteriaWithRegex)
- .toArray(Criteria[]::new);
- return criteriaArr.length > 0 ? new Criteria().andOperator(criteriaArr) : new Criteria();
- }
-
- private Criteria mapEntryCriteriaWithRegex(Map.Entry<String, JsonElement> entry) {
- Pattern primitiveOrArrayElemKeyRegex = getCaseInsensitive(String.format(FLATTENED_JSON_KEY_REGEX, entry.getKey()));
- Criteria criteriaForJsonKey = Criteria.where("k").regex(primitiveOrArrayElemKeyRegex);
- Criteria criteriaWithValue = criteriaBuilder.applyValueCriteriaBasedOnPrimitiveType(criteriaForJsonKey.and("v"), entry.getValue().getAsJsonPrimitive());
- return Criteria.where("keyValues").elemMatch(criteriaWithValue);
-
- }
-
- private boolean isNullValuePresentInCriteria(JsonObject jsonObject) {
- return jsonObject.entrySet()
- .stream()
- .map(Map.Entry::getValue)
- .anyMatch(JsonElement::isJsonNull);
- }
-
- static Pattern getCaseInsensitive(String base) {
- return Pattern.compile(base, Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
- }
-}
-
-
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilder.java b/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilder.java
deleted file mode 100644
index 79d64b7dd..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilder.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search.handler;
-
-import com.google.common.collect.Lists;
-import com.google.gson.JsonPrimitive;
-import org.springframework.data.mongodb.core.query.Criteria;
-
-import java.util.List;
-import java.util.regex.Pattern;
-
-/**
- * This class is a helper class for constructing apropriate criteria for query send to mongodb based on type of value.
- * Query is build to search mongodb for templates that contains key-value pairs that satisfy given criteria.
- * Value is oftype JsonPrimitive, based on its primitive java type following criteria are build to get proper document:
- * -for string - there is a regex expression that ignores every meta character inside passed argument and searches for exact literal match ignoring case;
- * -for number - all numbers are treated as double (mongodb number type equivalent)
- * -for boolean - exact match, used string representation of boolean in search
- **/
-
-public class PrimitiveValueCriteriaBuilder {
-
- private final List<ValueTypeHandler> typeHandlers;
-
- public PrimitiveValueCriteriaBuilder() {
- typeHandlers = Lists.newArrayList(new StringValueHandler(), new NumberValueHandler(), new BoolValueHandler());
- }
-
- public Criteria applyValueCriteriaBasedOnPrimitiveType(Criteria baseCriteria, JsonPrimitive jsonPrimitive) {
- ValueTypeHandler typeHandler = typeHandlers.stream()
- .filter(el -> el.isProperTypeHandler(jsonPrimitive))
- .findFirst()
- .orElseThrow(() ->
- new IllegalArgumentException(String.format(
- "Expected json primitive, but given value: %s is of type: %s and could not be decoded",
- jsonPrimitive, jsonPrimitive.getClass().toString())));
- return typeHandler.chainCriteriaForValue(baseCriteria, jsonPrimitive);
- }
-
- private interface ValueTypeHandler {
- boolean isProperTypeHandler(JsonPrimitive value);
-
- Criteria chainCriteriaForValue(Criteria criteria, JsonPrimitive value);
- }
-
- private class BoolValueHandler implements ValueTypeHandler {
- public boolean isProperTypeHandler(JsonPrimitive value) {
- return value.isBoolean();
- }
-
- public Criteria chainCriteriaForValue(Criteria criteria, JsonPrimitive value) {
- return criteria.is(value.getAsString());
- }
-
- }
-
- private class NumberValueHandler implements ValueTypeHandler {
- public boolean isProperTypeHandler(JsonPrimitive value) {
- return value.isNumber();
- }
-
- public Criteria chainCriteriaForValue(Criteria baseCriteria, JsonPrimitive value) {
- return baseCriteria.is(value.getAsDouble());
- }
- }
-
- private class StringValueHandler implements ValueTypeHandler {
- public boolean isProperTypeHandler(JsonPrimitive value) {
- return value.isString();
- }
-
- public Criteria chainCriteriaForValue(Criteria baseCriteria, JsonPrimitive value) {
- return baseCriteria.regex(makeRegexCaseInsensitive(value.getAsString()));
- }
-
- private Pattern makeRegexCaseInsensitive(String base) {
- String metaCharEscaped = convertToIgnoreMetaChars(base);
- return Pattern.compile("^" + metaCharEscaped + "$", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
- }
-
- private String convertToIgnoreMetaChars(String valueWithMetaChars) {
- return Pattern.quote(valueWithMetaChars);
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/application.properties b/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/application.properties
deleted file mode 100644
index ae21b674c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/application.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-server.port=5000
-templates.dir=/app/templates
-spring.data.mongodb.host=mongo
-spring.data.mongodb.port=27017
-spring.data.mongodb.username=pnf_simulator_user
-spring.data.mongodb.password=zXcVbN123!
-spring.data.mongodb.database=pnf_simulator
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/logback.xml b/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/logback.xml
deleted file mode 100644
index 8569b562f..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/main/resources/logback.xml
+++ /dev/null
@@ -1,70 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="/var/log/ONAP/pnfsimulator"/>
- <Property name="archive" value="/var/log/ONAP/pnfsimulator/archive"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${archive}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="info">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/event/EventDataServiceTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/event/EventDataServiceTest.java
deleted file mode 100644
index 5ed51cc34..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/event/EventDataServiceTest.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.event;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-
-import org.hamcrest.collection.IsIterableContainingInOrder;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.BeforeEach;
-import org.mockito.ArgumentCaptor;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-
-public class EventDataServiceTest {
-
- @Mock
- private EventDataRepository repositoryMock;
-
- @InjectMocks
- private EventDataService service;
-
- private static EventData sampleEventData(String id, String template,
- String patched, String input, String keywords) {
- return EventData.builder()
- .id(id)
- .template(template)
- .patched(patched)
- .input(input)
- .keywords(keywords)
- .build();
- }
-
- @BeforeEach
- void resetMocks() {
- initMocks(this);
- }
-
- @Test
- void persistEventDataJsonObjectTest() {
- JsonParser parser = new JsonParser();
- JsonObject template = parser.parse("{ \"bla1\": \"bla2\"}").getAsJsonObject();
- JsonObject patched = parser.parse("{ \"bla3\": \"bla4\"}").getAsJsonObject();
- JsonObject input = parser.parse("{ \"bla5\": \"bla6\"}").getAsJsonObject();
- JsonObject keywords = parser.parse("{ \"bla7\": \"bla8\"}").getAsJsonObject();
- ArgumentCaptor<EventData> argumentCaptor = ArgumentCaptor.forClass(EventData.class);
-
- service.persistEventData(template, patched, input, keywords);
-
- verify(repositoryMock).save(argumentCaptor.capture());
- EventData captured = argumentCaptor.getValue();
-
- assertEquals(captured.getTemplate(), template.toString());
- assertEquals(captured.getPatched(), patched.toString());
- assertEquals(captured.getInput(), input.toString());
- assertEquals(captured.getKeywords(), keywords.toString());
- }
-
- @Test
- void getAllEventsTest() {
-
- List<EventData> eventDataList = new ArrayList<>();
- EventData ed1 = sampleEventData("id1", "t1", "p1", "i1", "k1");
- EventData ed2 = sampleEventData("id2", "t2", "p2", "i2", "k2");
- eventDataList.add(ed1);
- eventDataList.add(ed2);
-
- when(repositoryMock.findAll()).thenReturn(eventDataList);
- List<EventData> actualList = service.getAllEvents();
-
- assertEquals(eventDataList.size(), actualList.size());
- assertThat(actualList, IsIterableContainingInOrder.contains(ed1, ed2));
- }
-
- @Test
- void findByIdPresentTest() {
- String id = "some_object";
- EventData eventData = sampleEventData(id, "template", "patched", "input", "keywords");
- Optional<EventData> optional = Optional.of(eventData);
-
- when(repositoryMock.findById(id)).thenReturn(optional);
-
- Optional<EventData> actualOptional = service.getById(id);
- assertTrue(actualOptional.isPresent());
- EventData actualObject = actualOptional.get();
- assertEquals(eventData.getId(), actualObject.getId());
- assertEquals(eventData.getTemplate(), actualObject.getTemplate());
- assertEquals(eventData.getPatched(), actualObject.getPatched());
- assertEquals(eventData.getInput(), actualObject.getInput());
- assertEquals(eventData.getKeywords(), actualObject.getKeywords());
-
- }
-
- @Test
- void findByIdNotPresentTest() {
- String id = "some_object";
- Optional<EventData> optional = Optional.empty();
-
- when(repositoryMock.findById(id)).thenReturn(optional);
-
- Optional<EventData> actualOptional = service.getById(id);
- assertTrue(!actualOptional.isPresent());
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/InMemoryTemplateStorage.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/InMemoryTemplateStorage.java
deleted file mode 100644
index 98c4bc51c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/InMemoryTemplateStorage.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.filesystem;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-
-import com.google.gson.JsonObject;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.template.Template;
-
-public class InMemoryTemplateStorage implements Storage<Template> {
-
- private List<Template> storage = new ArrayList<>();
-
- @Override
- public List<Template> getAll() {
- return new ArrayList<>(storage);
- }
-
- @Override
- public Optional<Template> get(String name) {
- return storage.stream().filter(template -> template.getId().equals(name)).findFirst();
- }
-
- @Override
- public void persist(Template template) {
- if (!storage.contains(template)){
- storage.add(template);
- }
- }
-
- @Override
- public boolean tryPersistOrOverwrite(Template template, boolean overwrite) {
- if (!storage.contains(template) || overwrite){
- storage.add(template);
- return true;
- }
- return false;
- }
-
- @Override
- public void delete(String templateName) {
- get(templateName).ifPresent(template -> storage.remove(template));
- }
-
- @Override
- public List<String> getIdsByContentCriteria(JsonObject queryJson) {
- throw new RuntimeException("Method is not implemented.");
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessorTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessorTest.java
deleted file mode 100644
index 42ed4d397..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/filesystem/WatcherEventProcessorTest.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.filesystem;
-
-import static junit.framework.TestCase.fail;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardWatchEventKinds;
-import java.nio.file.WatchEvent;
-import java.time.Instant;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Optional;
-import org.bson.Document;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.template.Template;
-
-class WatcherEventProcessorTest {
-
- @Mock
- private WatchEvent watchEvent;
- @Mock
- private Path templatesDir;
-
- private Storage<Template> storage;
- private static Path jsonFilePath;
-
- @BeforeAll
- static void init() {
- jsonFilePath = Paths.get("src/test/resources/org/onap/pnfsimulator/simulator/filesystem/test1.json");
- }
-
- @BeforeEach
- void resetMocks() {
- initMocks(this);
- storage = new InMemoryTemplateStorage();
- initStubs();
- }
-
- @Test
- void shouldProcessCreatedEventTest() {
- // when
- Mockito.when(watchEvent.kind()).thenReturn(StandardWatchEventKinds.ENTRY_CREATE);
- WatcherEventProcessor.process(watchEvent, storage, templatesDir);
- // then
- verifyPersistedValue();
- }
-
- @Test
- void shouldProcessModifiedEventTest() {
- //given
- storage.persist(new Template("test1.json", new Document(Collections.emptyMap()), Instant.now().getNano()));
- // when
- Mockito.when(watchEvent.kind()).thenReturn(StandardWatchEventKinds.ENTRY_MODIFY);
- WatcherEventProcessor.process(watchEvent, storage, templatesDir);
- // then
- verifyPersistedValue();
- }
-
- private void verifyPersistedValue() {
- Assertions.assertEquals(storage.getAll().size(), 1);
- Optional<Template> templateFromStorage = storage.get("test1.json");
- if (templateFromStorage.isPresent()) {
- Template retrievedTemplate = templateFromStorage.get();
- Document templateContent = retrievedTemplate.getContent();
- Document flatContent = retrievedTemplate.getFlatContent();
- Assertions.assertEquals(templateContent.getString("field1"), "value1");
- Assertions.assertEquals(templateContent.getInteger("field2", 0), 2);
- Assertions.assertEquals(flatContent.getInteger(":nested:key1[0]", 0), 1);
- Assertions.assertEquals(flatContent.getInteger(":nested:key1[1]", 0), 2);
- Assertions.assertEquals(flatContent.getInteger(":nested:key1[2]", 0), 3);
- Assertions.assertEquals(flatContent.getString(":nested:key2"), "sampleValue2");
- } else {
- fail();
- }
- }
-
- @Test
- void shouldProcessDeletedEventTest() {
- //given
- HashMap<String, Object> legacyObject = new HashMap<>();
- legacyObject.put("field1", "value1");
- legacyObject.put("field2", 2);
-
- storage.persist(new Template("test1.json", new Document(legacyObject), Instant.now().getNano()));
- // when
- Mockito.when(watchEvent.kind()).thenReturn(StandardWatchEventKinds.ENTRY_DELETE);
- WatcherEventProcessor.process(watchEvent, storage, templatesDir);
- // then
- Assertions.assertEquals(storage.getAll().size(), 0);
- }
-
- private void initStubs() {
- Mockito.when(templatesDir.resolve(jsonFilePath)).thenReturn(jsonFilePath);
- Mockito.when(watchEvent.context()).thenReturn(jsonFilePath);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
deleted file mode 100644
index 36e8e6609..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
-import org.onap.pnfsimulator.rest.model.FullEvent;
-import org.onap.pnfsimulator.rest.model.SimulatorParams;
-import org.onap.pnfsimulator.rest.model.SimulatorRequest;
-import org.onap.pnfsimulator.rest.util.JsonObjectDeserializer;
-import org.onap.pnfsimulator.simulator.SimulatorService;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfig;
-import org.quartz.SchedulerException;
-import org.springframework.http.MediaType;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.MvcResult;
-import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-import java.io.IOException;
-import java.net.URL;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-class SimulatorControllerTest {
-
- private static final String START_ENDPOINT = "/simulator/start";
- private static final String CONFIG_ENDPOINT = "/simulator/config";
- private static final String EVENT_ENDPOINT = "/simulator/event";
- private static final String JSON_MSG_EXPRESSION = "$.message";
-
- private static final String NEW_URL = "http://0.0.0.0:8090/eventListener/v7";
- private static final String UPDATE_SIM_CONFIG_VALID_JSON = "{\"vesServerUrl\": \"" + NEW_URL + "\"}";
- private static final String SAMPLE_ID = "sampleId";
- private static final Gson GSON_OBJ = new Gson();
- private static String simulatorRequestBody;
- private MockMvc mockMvc;
- @InjectMocks
- private SimulatorController controller;
- @Mock
- private SimulatorService simulatorService;
-
- @BeforeAll
- static void beforeAll() {
- SimulatorParams simulatorParams = new SimulatorParams("http://0.0.0.0:8080", 1, 1);
- SimulatorRequest simulatorRequest = new SimulatorRequest(simulatorParams,
- "testTemplate.json", new JsonObject());
-
- simulatorRequestBody = GSON_OBJ.toJson(simulatorRequest);
- }
-
- @BeforeEach
- void setup() throws IOException, SchedulerException {
- MockitoAnnotations.initMocks(this);
- when(simulatorService.triggerEvent(any())).thenReturn("jobName");
- mockMvc = MockMvcBuilders
- .standaloneSetup(controller)
- .build();
- }
-
- @Test
- void shouldStartSimulatorProperly() throws Exception {
- startSimulator();
- SimulatorRequest simulatorRequest = new Gson().fromJson(simulatorRequestBody, SimulatorRequest.class);
-
- verify(simulatorService).triggerEvent(eq(simulatorRequest));
- }
-
- @Test
- void testShouldGetConfigurationWhenRequested() throws Exception {
- String newUrl = "http://localhost:8090/eventListener/v7";
- SimulatorConfig expectedConfig = new SimulatorConfig(SAMPLE_ID, new URL(newUrl));
- when(simulatorService.getConfiguration()).thenReturn(expectedConfig);
-
- MvcResult getResult = mockMvc
- .perform(get(CONFIG_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON)
- .content(UPDATE_SIM_CONFIG_VALID_JSON))
- .andExpect(status().isOk())
- .andReturn();
-
- String expectedVesUrlJsonPart = createStringReprOfJson("vesServerUrl", newUrl);
- assertThat(getResult.getResponse().getContentAsString()).contains(expectedVesUrlJsonPart);
- }
-
- @Test
- void testShouldSuccessfullyUpdateConfigurationWithNewVesUrl() throws Exception {
- String oldUrl = "http://localhost:8090/eventListener/v7";
- SimulatorConfig expectedConfigBeforeUpdate = new SimulatorConfig(SAMPLE_ID, new URL(oldUrl));
- SimulatorConfig expectedConfigAfterUpdate = new SimulatorConfig(SAMPLE_ID, new URL(NEW_URL));
-
- when(simulatorService.getConfiguration()).thenReturn(expectedConfigBeforeUpdate);
- when(simulatorService.updateConfiguration(any(SimulatorConfig.class))).thenReturn(expectedConfigAfterUpdate);
-
- MvcResult postResult = mockMvc
- .perform(put(CONFIG_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON)
- .content(UPDATE_SIM_CONFIG_VALID_JSON))
- .andExpect(status().isOk())
- .andReturn();
-
- String expectedVesUrlJsonPart = createStringReprOfJson("vesServerUrl", expectedConfigAfterUpdate.getVesServerUrl().toString());
- assertThat(postResult.getResponse().getContentAsString()).contains(expectedVesUrlJsonPart);
- }
-
- @Test
- void testShouldRaiseExceptionWhenUpdateConfigWithIncorrectPayloadWasSent() throws Exception {
- mockMvc
- .perform(put(CONFIG_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON)
- .content("{\"vesUrl\": \"" + NEW_URL + "\"}"))
- .andExpect(status().isBadRequest());
- }
-
- @Test
- void testShouldRaiseExceptionWhenUrlInInvalidFormatIsSent() throws Exception {
- mockMvc
- .perform(put(CONFIG_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON)
- .content("{\"vesUrl\": \"http://0.0.0.0:VES-PORT/eventListener/v7\"}"))
- .andExpect(status().isBadRequest());
- }
-
- @Test
- void testShouldSendEventDirectly() throws Exception {
- String contentAsString = mockMvc
- .perform(post(EVENT_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
- .content("{\"vesServerUrl\":\"http://0.0.0.0:8080/simulator/v7\",\n" +
- " \"event\":{ \n" +
- " \"commonEventHeader\":{ \n" +
- " \"domain\":\"notification\",\n" +
- " \"eventName\":\"vFirewallBroadcastPackets\"\n" +
- " },\n" +
- " \"notificationFields\":{ \n" +
- " \"arrayOfNamedHashMap\":[ \n" +
- " { \n" +
- " \"name\":\"A20161221.1031-1041.bin.gz\",\n" +
- " \"hashMap\":{ \n" +
- " \"fileformatType\":\"org.3GPP.32.435#measCollec\"}}]}}}"))
- .andExpect(status().isAccepted()).andReturn().getResponse().getContentAsString();
- assertThat(contentAsString).contains("One-time direct event sent successfully");
- }
-
- @Test
- void testShouldReplaceKeywordsAndSendEventDirectly() throws Exception {
- String contentAsString = mockMvc
- .perform(post(EVENT_ENDPOINT)
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
- .content("{\"vesServerUrl\": \"http://localhost:9999/eventListener\",\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"eventId\": \"#RandomString(20)\",\n" +
- " \"sourceName\": \"PATCHED_sourceName\",\n" +
- " \"version\": 3.0\n}}}"))
- .andExpect(status().isAccepted()).andReturn().getResponse().getContentAsString();
- assertThat(contentAsString).contains("One-time direct event sent successfully");
-
- verify(simulatorService, Mockito.times(1)).triggerOneTimeEvent(any(FullEvent.class));
- }
-
-
- private void startSimulator() throws Exception {
- mockMvc
- .perform(post(START_ENDPOINT)
- .content(simulatorRequestBody)
- .contentType(MediaType.APPLICATION_JSON).characterEncoding("utf-8"))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Request started"));
-
- }
-
- private String createStringReprOfJson(String key, String value) {
- return GSON_OBJ.toJson(ImmutableMap.of(key, value));
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/TemplateControllerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/TemplateControllerTest.java
deleted file mode 100644
index f34d73cd9..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/TemplateControllerTest.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.onap.pnfsimulator.rest.TemplateController.CANNOT_OVERRIDE_TEMPLATE_MSG;
-import static org.onap.pnfsimulator.rest.TemplateController.TEMPLATE_NOT_FOUND_MSG;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonObject;
-import com.google.gson.reflect.TypeToken;
-import java.lang.reflect.Type;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
-
-import org.assertj.core.util.Lists;
-import org.bson.Document;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import static org.mockito.Mockito.times;
-import org.mockito.MockitoAnnotations;
-import org.onap.pnfsimulator.db.Storage;
-import org.onap.pnfsimulator.rest.model.SearchExp;
-import org.onap.pnfsimulator.template.Template;
-import org.onap.pnfsimulator.template.search.IllegalJsonValueException;
-import org.springframework.http.MediaType;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.MvcResult;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-class TemplateControllerTest {
-
- private static final String LIST_URL = "/template/list";
- private static final String GET_FORMAT_STR = "/template/get/%s";
- private static final String SEARCH_ENDPOINT = "/template/search";
- private static final String UPLOAD_URL_NOFORCE = "/template/upload";
- private static final String UPLOAD_URL_FORCE = "/template/upload?override=true";
- private static final String SAMPLE_TEMPLATE_JSON = "{\"event\": {\n"
- + " \"commonEventHeader\": {\n"
- + " \"domain\": \"measurementsForVfScaling\",\n"
- + " \"eventName\": \"vFirewallBroadcastPackets\",\n"
- + " }"
- + "}}";
-
- public static final String TEMPLATE_REQUEST = "{\n"
- + " \"name\": \"someTemplate\",\n"
- + " \"template\": {\n"
- + " \"commonEventHeader\": {\n"
- + " \"domain\": \"notification\",\n"
- + " \"eventName\": \"vFirewallBroadcastPackets\"\n"
- + " },\n"
- + " \"notificationFields\": {\n"
- + " \"arrayOfNamedHashMap\": [{\n"
- + " \"name\": \"A20161221.1031-1041.bin.gz\",\n"
- + "\n"
- + " \"hashMap\": {\n"
- + " \"fileformatType\": \"org.3GPP.32.435#measCollec\"\n"
- + " }\n"
- + " }]\n"
- + " }\n"
- + " }\n"
- + "}";
- private static final Document SAMPLE_TEMPLATE_BSON = Document.parse(SAMPLE_TEMPLATE_JSON);
- private static final List<String> SAMPLE_TEMPLATE_NAME_LIST = Lists.newArrayList("notification.json", "registration.json");
- private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
- private static final Gson GSON_OBJ = new GsonBuilder().create();
- private MockMvc mockMvc;
-
- @Mock
- private Storage<Template> templateService;
- @InjectMocks
- private TemplateController controller;
-
- @BeforeEach
- void setup() {
- MockitoAnnotations.initMocks(this);
- mockMvc = MockMvcBuilders
- .standaloneSetup(controller)
- .build();
- }
-
- @Test
- void shouldGetAllTemplates() throws Exception {
- List<Template> templateList = createTemplatesList();
- when(templateService.getAll()).thenReturn(templateList);
-
- MvcResult getResult = mockMvc
- .perform(get(LIST_URL)
- .accept(MediaType.APPLICATION_JSON))
- .andExpect(status().isOk())
- .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andReturn();
-
- Type listType = new TypeToken<ArrayList<Template>>() {}.getType();
- List<Template> resultList = GSON_OBJ.fromJson(getResult.getResponse().getContentAsString(), listType);
- assertThat(resultList).containsExactlyInAnyOrderElementsOf(templateList);
- }
-
- @Test
- void shouldListEmptyCollectionWhenNoTemplatesAvailable() throws Exception {
- List<Template> templateList = Collections.emptyList();
- when(templateService.getAll()).thenReturn(templateList);
-
- MvcResult getResult = mockMvc
- .perform(get(LIST_URL))
- .andExpect(status().isOk())
- .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andReturn();
-
- String templatesAsString = GSON_OBJ.toJson(templateList);
- assertThat(getResult.getResponse().getContentAsString()).containsSequence(templatesAsString);
- }
-
- @Test
- void shouldSuccessfullyGetExisitngTemplateByName() throws Exception {
- String sampleTemplateName = "someTemplate";
- String requestUrl = String.format(GET_FORMAT_STR, sampleTemplateName);
- Template sampleTemplate = new Template(sampleTemplateName, SAMPLE_TEMPLATE_BSON, 0L);
-
- when(templateService.get(sampleTemplateName)).thenReturn(Optional.of(sampleTemplate));
-
- MvcResult getResult = mockMvc
- .perform(get(requestUrl))
- .andExpect(status().isOk())
- .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andReturn();
-
- Template result = new Gson().fromJson(getResult.getResponse().getContentAsString(), Template.class);
- assertThat(result).isEqualTo(sampleTemplate);
- }
-
- @Test
- void shouldReturnNotFoundWhenGetNonExisitngTemplateByName() throws Exception {
- String sampleTemplateName = "doesNotExist";
- String requestUrl = String.format(GET_FORMAT_STR, sampleTemplateName);
-
- when(templateService.get(sampleTemplateName)).thenReturn(Optional.empty());
-
- MvcResult getResult = mockMvc
- .perform(get(requestUrl))
- .andExpect(status().isNotFound())
- .andExpect(content().contentType(MediaType.TEXT_PLAIN_VALUE))
- .andReturn();
-
- assertThat(getResult.getResponse().getContentLength()).isEqualTo(TEMPLATE_NOT_FOUND_MSG.length());
- }
-
-
- @Test
- void shouldReturnNamesOfTemplatesThatSatisfyGivenCriteria() throws Exception {
- when(templateService.getIdsByContentCriteria(any(JsonObject.class))).thenReturn(SAMPLE_TEMPLATE_NAME_LIST);
- SearchExp expr = new SearchExp(new JsonObject());
-
- String responseContent = mockMvc
- .perform(post(SEARCH_ENDPOINT).content(GSON_OBJ.toJson(expr)).contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andExpect(status().isOk())
- .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andReturn().getResponse().getContentAsString();
-
- List<String> actualTemplates = OBJECT_MAPPER.readValue(responseContent, new TypeReference<List<String>>() {});
- verify(templateService, times(1)).getIdsByContentCriteria(any(JsonObject.class));
- assertThat(actualTemplates).isEqualTo(SAMPLE_TEMPLATE_NAME_LIST);
- }
-
- @Test
- void shouldRaiseBadRequestWhenNullValueProvidedInSearchJsonAsJsonValue() throws Exception {
- when(templateService.getIdsByContentCriteria(any(JsonObject.class))).thenThrow(IllegalJsonValueException.class);
- SearchExp expr = new SearchExp(new JsonObject());
-
- mockMvc.perform(post(SEARCH_ENDPOINT)
- .content(GSON_OBJ.toJson(expr))
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
- .andExpect(status().isBadRequest());
- }
-
-
- @Test
- void testTryUploadNewTemplate() throws Exception {
- when(templateService.tryPersistOrOverwrite(any(Template.class), eq(false))).thenReturn(true);
-
- MvcResult postResult = mockMvc
- .perform(post(UPLOAD_URL_NOFORCE)
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
- .content(TEMPLATE_REQUEST))
- .andExpect(status().isCreated())
- .andReturn();
- }
-
- @Test
- void testTryUploadNewTemplateWithForce() throws Exception {
- when(templateService.tryPersistOrOverwrite(any(Template.class), eq(true))).thenReturn(true);
-
- MvcResult postResult = mockMvc
- .perform(post(UPLOAD_URL_FORCE)
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
- .content(TEMPLATE_REQUEST))
- .andExpect(status().isCreated())
- .andReturn();
- }
-
- @Test
- void testOverrideExistingTemplateWithoutForceShouldFail() throws Exception {
- when(templateService.tryPersistOrOverwrite(any(Template.class), eq(true))).thenReturn(false);
-
- MvcResult postResult = mockMvc
- .perform(post(UPLOAD_URL_FORCE)
- .contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
- .content(TEMPLATE_REQUEST))
- .andExpect(status().isConflict())
- .andReturn();
-
- assertThat(postResult.getResponse().getContentAsString()).isEqualTo(CANNOT_OVERRIDE_TEMPLATE_MSG);
- }
-
- private List<Template> createTemplatesList() {
- return Arrays.asList(
- new Template("1", SAMPLE_TEMPLATE_BSON, 0L),
- new Template("2", SAMPLE_TEMPLATE_BSON, 0L),
- new Template("3", SAMPLE_TEMPLATE_BSON, 0L));
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java
deleted file mode 100644
index 1591a59be..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import org.junit.jupiter.api.Test;
-
-class DateUtilTest {
-
- @Test
- void getFormattedDate() {
- Calendar currentCalendar = Calendar.getInstance();
- String expectedResult = String.valueOf(currentCalendar.get(Calendar.YEAR));
-
- assertEquals(expectedResult, DateUtil.getTimestamp(new SimpleDateFormat("yyyy")));
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
deleted file mode 100644
index 0d62ee925..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import static org.junit.jupiter.api.Assertions.assertAll;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNull;
-
-import java.util.Map;
-import org.junit.jupiter.api.Test;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-class ResponseBuilderTest {
-
-
- private static final HttpStatus SAMPLE_STATUS = HttpStatus.OK;
-
- @Test
- void response_should_have_empty_body_when_built_immediately() {
- ResponseEntity responseEntity = ResponseBuilder.status(SAMPLE_STATUS).build();
-
- assertAll(
- () -> assertEquals(responseEntity.getStatusCode(), SAMPLE_STATUS),
- () -> assertNull(responseEntity.getBody())
- );
- }
-
- @Test
- void builder_should_set_response_status_and_body() {
- String key = "key";
- String value = "value";
- ResponseEntity response = ResponseBuilder
- .status(SAMPLE_STATUS)
- .put(key, value)
- .build();
-
- Map<String, Object> body = (Map<String, Object>) response.getBody();
-
- assertAll(
- () -> assertEquals(SAMPLE_STATUS, response.getStatusCode()),
- () -> assertEquals(value, body.get(key))
- );
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/IncrementProviderImplTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/IncrementProviderImplTest.java
deleted file mode 100644
index 53f02da0e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/IncrementProviderImplTest.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.util.Optional;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mock;
-import org.onap.pnfsimulator.event.EventData;
-import org.onap.pnfsimulator.event.EventDataRepository;
-
-public class IncrementProviderImplTest {
- private IncrementProvider incrementProvider;
-
- @Mock
- private EventDataRepository eventDataRepositoryMock;
-
- @BeforeEach
- void setUp() {
- eventDataRepositoryMock = mock(EventDataRepository.class);
- incrementProvider = new IncrementProviderImpl(eventDataRepositoryMock);
- }
-
- @Test
- public void getAndIncrementTest() {
- ArgumentCaptor<EventData> eventDataArgumentCaptor = ArgumentCaptor.forClass(EventData.class);
- String eventId = "1";
- int initialIncrementValue = 0;
- int expectedValue = initialIncrementValue + 1;
- EventData eventData = EventData.builder().id(eventId).incrementValue(initialIncrementValue).build();
- Optional<EventData> optional = Optional.of(eventData);
-
- when(eventDataRepositoryMock.findById(eventId)).thenReturn(optional);
-
- int value = incrementProvider.getAndIncrement(eventId);
-
- verify(eventDataRepositoryMock).save(eventDataArgumentCaptor.capture());
-
- assertThat(value).isEqualTo(expectedValue);
- assertThat(eventDataArgumentCaptor.getValue().getIncrementValue()).isEqualTo(expectedValue);
-
- }
-
- @Test
- public void shouldThrowOnNonExistingEvent() {
- Optional<EventData> emptyOptional = Optional.empty();
- String nonExistingEventId = "THIS_DOES_NOT_EXIST";
- when(eventDataRepositoryMock.findById(nonExistingEventId)).thenReturn(emptyOptional);
-
- assertThrows(EventNotFoundException.class,
- () -> incrementProvider.getAndIncrement(nonExistingEventId));
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomIntegerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomIntegerTest.java
deleted file mode 100644
index 8198e95a9..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomIntegerTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorInvalidRandomIntegerTest {
-
- private final String keyword;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection INVALID_INTEGER_KEYWORDS = Arrays.asList(new Object[][]{
- {"#RandoInteger"},
- {"#Randominteger(23,11)"},
- {"#randomInteger(11,34)"},
- {"#Random_Integer(11,13)"},
- {"#RandomInteger(11)"},
- {"RandomInteger(11)"},
- {"RandomInteger"}
- });
-
- public KeywordsExtractorInvalidRandomIntegerTest(String keyword) {
- this.keyword = keyword;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return INVALID_INTEGER_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substituteStringKeyword(this.keyword, 1), this.keyword);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomStringTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomStringTest.java
deleted file mode 100644
index 6834c0dc6..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidRandomStringTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorInvalidRandomStringTest {
-
- private final String keyword;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection INVALID_STRING_KEYWORDS = Arrays.asList(new Object[][]{
- {"#RandoString"},
- {"#Randomstring(23)"},
- {"#randomString(11)"},
- {"#Random_String(11)"},
- {"#RandomString(11,10)"},
- {"RandomString(11)"},
- {"RandomString"}
- });
-
- public KeywordsExtractorInvalidRandomStringTest(String keyword) {
- this.keyword = keyword;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return INVALID_STRING_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substituteStringKeyword(this.keyword, 1).length(), this.keyword.length());
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidTimestampTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidTimestampTest.java
deleted file mode 100644
index eda40707b..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorInvalidTimestampTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorInvalidTimestampTest {
-
- private final String keyword;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection INVALID_TIMESTAMP_KEYWORDS = Arrays.asList(new Object[][]{
- {"#Timesamp"},
- {"#Timestamp(10)"},
- {"#timestamp"},
- {"#Timestamp(11,13)"},
- {"Timestamp"}
- });
-
- public KeywordsExtractorInvalidTimestampTest(String keyword) {
- this.keyword = keyword;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return INVALID_TIMESTAMP_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substituteStringKeyword(this.keyword, 1), this.keyword);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomIntegerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomIntegerTest.java
deleted file mode 100644
index be79488b5..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomIntegerTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorValidRandomIntegerTest {
-
- private final String keyword;
- private final String shouldParseTo;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection VALID_INTEGER_KEYWORDS = Arrays.asList(new Object[][]{
- {"#RandomInteger(23,23)", "23"},
- {"#RandomInteger(6, 6)12", "612"},
- {"1#RandomInteger(11,11)", "111"},
- {"1#RandomInteger(11,11)2", "1112"}
- });
-
- public KeywordsExtractorValidRandomIntegerTest(String keyword, String shouldParseTo) {
- this.keyword = keyword;
- this.shouldParseTo = shouldParseTo;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return VALID_INTEGER_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substituteStringKeyword(this.keyword, 1), this.shouldParseTo);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomPrimitiveIntegerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomPrimitiveIntegerTest.java
deleted file mode 100644
index fd72a5145..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomPrimitiveIntegerTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorValidRandomPrimitiveIntegerTest {
-
- private final String keyword;
- private final Integer shouldParseTo;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection VALID_INTEGER_KEYWORDS = Arrays.asList(new Object[][]{
- {"#RandomPrimitiveInteger(23,23)", 23},
- {"#RandomPrimitiveInteger(6, 6)12", 6},
- {"1#RandomPrimitiveInteger(11,11)", 11},
- {"1#RandomPrimitiveInteger(11,11)2", 11}
- });
-
- public KeywordsExtractorValidRandomPrimitiveIntegerTest(String keyword, Integer shouldParseTo) {
- this.keyword = keyword;
- this.shouldParseTo = shouldParseTo;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return VALID_INTEGER_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substitutePrimitiveKeyword(this.keyword), this.shouldParseTo);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomStringTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomStringTest.java
deleted file mode 100644
index f0fdc0ff3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidRandomStringTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.DEFAULT_STRING_LENGTH;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorValidRandomStringTest {
-
- private final String keyword;
- private final int length;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection VALID_STRING_KEYWORDS = Arrays.asList(new Object[][]{
- {"#RandomString", DEFAULT_STRING_LENGTH},
- {"1#RandomString2", 1 + DEFAULT_STRING_LENGTH + 1},
- {"#RandomString(23)", 23},
- {"#RandomString(11)12", 11 + 2},
- {"1#RandomString(11)", 1 + 11},
- {"1#RandomString(11)2", 1 + 11 + 1}
- });
-
- public KeywordsExtractorValidRandomStringTest(String keyword, int length) {
- this.keyword = keyword;
- this.length = length;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return VALID_STRING_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- assertEquals(keywordsExtractor.substituteStringKeyword(this.keyword, 1).length(), this.length);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampPrimitiveTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampPrimitiveTest.java
deleted file mode 100644
index 7743e5558..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampPrimitiveTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-import java.time.Instant;
-import java.util.Arrays;
-import java.util.Collection;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorValidTimestampPrimitiveTest {
- private final String keyword;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection VALID_TIMESTAMP_KEYWORDS = Arrays.asList(new Object[][]{
- {"#TimestampPrimitive"}
- });
-
- public KeywordsExtractorValidTimestampPrimitiveTest(String keyword) {
- this.keyword = keyword;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return VALID_TIMESTAMP_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- long currentTimestamp = Instant.now().getEpochSecond();
- Long timestamp = keywordsExtractor.substitutePrimitiveKeyword(this.keyword);
- long afterExecution = Instant.now().getEpochSecond();
-
- assertThat(timestamp).isBetween(currentTimestamp, afterExecution);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampTest.java
deleted file mode 100644
index f5c12c311..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsExtractorValidTimestampTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-public class KeywordsExtractorValidTimestampTest {
-
- private final String keyword;
- private final int length;
- private KeywordsExtractor keywordsExtractor;
-
- private static final Collection VALID_TIMESTAMP_KEYWORDS = Arrays.asList(new Object[][]{
- {"#Timestamp", 10},
- {"#Timestamp12", 10 + 2},
- {"1#Timestamp", 1 + 10},
- {"1#Timestamp2", 1 + 10 +1}
- });
-
- public KeywordsExtractorValidTimestampTest(String keyword, Integer length) {
- this.keyword = keyword;
- this.length = length;
- }
-
- @Before
- public void setUp() {
- this.keywordsExtractor = new KeywordsExtractor();
- }
-
- @Parameterized.Parameters
- public static Collection data() {
- return VALID_TIMESTAMP_KEYWORDS;
- }
-
- @Test
- public void checkValidRandomStringKeyword() {
- String substitution = keywordsExtractor.substituteStringKeyword(this.keyword, 1);
- assertEquals(substitution.length(), this.length);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsHandlerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsHandlerTest.java
deleted file mode 100644
index e67d4a33b..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsHandlerTest.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.DEFAULT_STRING_LENGTH;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.Queue;
-import org.junit.jupiter.api.Test;
-
-class KeywordsHandlerTest {
-
- private static final String TEMPLATE_JSON = "{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"#RandomString\"\n" +
- " },\n" +
- " \"measurementsForVfScalingFields\": {\n" +
- " \"measurementsForVfSclaingFieldsVersion\": 2.0,\n" +
- " \"additionalMeasurements\": {\n" +
- " \"name\": \"licenseUsage\",\n" +
- " \"extraFields\": {\n" +
- " \"name\": \"#RandomString(4)\",\n" +
- " \"value\": \"1\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- private static final String TEMPLATE_JSON_WITH_MANY_KEYWORDS_INSIDE_SINGLE_VALUE = "{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain1\": \"#RandomString(1) #RandomString(2) #RandomString(3)\",\n" +
- " \"domain2\": \"1 #RandomString(1) 2\"\n" +
- " },\n" +
- " \"measurementsForVfScalingFields\": {\n" +
- " \"measurementsForVfSclaingFieldsVersion\": 2.0,\n" +
- " \"additionalMeasurements\": {\n" +
- " \"name\": \"licenseUsage\",\n" +
- " \"extraFields\": {\n" +
- " \"value\": \"1\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- private static final String TEMPLATE_JSON_WITH_ARRAY = "{\n"
- + " \"event\": {\n"
- + " \"commonEventHeader\": {\n"
- + " \"domain\": \"#RandomString(1)\",\n"
- + " \"version\": 2.0\n"
- + " },\n"
- + " \"measurementsForVfScalingFields\": {\n"
- + " \"additionalMeasurements\": [\n"
- + " {\n"
- + " \"name\": \"licenseUsage\",\n"
- + " \"arrayOfFields\": [\n"
- + " {\n"
- + " \"name\": \"G711AudioPort\",\n"
- + " \"value\": \"1\"\n"
- + " },\n"
- + " {\n"
- + " \"name\": [\"1\",\"2\"],\n"
- + " \"value\": \"#RandomString(2)\"\n"
- + " },\n"
- + " {\n"
- + " \"name\": \"G722AudioPort\",\n"
- + " \"value\": \"1\"\n"
- + " }\n"
- + " ]\n"
- + " }\n"
- + " ]\n"
- + " }\n"
- + " }\n"
- + "}";
-
- private static final String TEMPLATE_ONE_INCREMENT_JSON = "{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"#RandomString\"\n" +
- " },\n" +
- " \"measurementsForVfScalingFields\": {\n" +
- " \"measurementsForVfSclaingFieldsVersion\": 2.0,\n" +
- " \"additionalMeasurements\": {\n" +
- " \"name\": \"licenseUsage\",\n" +
- " \"extraFields\": {\n" +
- " \"name\": \"#RandomString(4)\",\n" +
- " \"value\": \"#Increment\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- private static final String TEMPLATE_WITH_SIMPLE_VALUE= "\"#RandomString(4)\"";
-
- private static final String TEMPLATE_WITH_ARRAY_OF_PRIMITIVES = "[ 1, \"#RandomString(5)\", 3]";
-
- private static final String TEMPLATE_TWO_INCREMENT_JSON = "{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"#RandomString\"\n" +
- " },\n" +
- " \"measurementsForVfScalingFields\": {\n" +
- " \"measurementsForVfSclaingFieldsVersion\": 2.0,\n" +
- " \"additionalMeasurements\": {\n" +
- " \"name\": \"licenseUsage\",\n" +
- " \"extraFields\": {\n" +
- " \"name\": \"#RandomString(4)\",\n" +
- " \"value\": \"#Increment\",\n" +
- " \"otherValue\": \"#Increment\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- private Gson gson = new Gson();
-
- @Test
- void shouldReplaceRandomStringKeyword() {
- // given
- JsonObject templateJson = gson.fromJson(TEMPLATE_JSON, JsonObject.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> 1);
-
- // when
- JsonObject resultJson = keywordsHandler.substituteKeywords(templateJson, "").getAsJsonObject();
-
- // then
- String extraFields = resultJson
- .get("event").getAsJsonObject()
- .get("measurementsForVfScalingFields").getAsJsonObject()
- .get("additionalMeasurements").getAsJsonObject()
- .get("extraFields").getAsJsonObject()
- .get("name").getAsString();
- String newDomain = resultJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain").getAsString();
-
- assertThat(extraFields.length()).isEqualTo(4);
- assertThat(newDomain.length()).isEqualTo(DEFAULT_STRING_LENGTH);
- }
-
- @Test
- void shouldReplaceRandomStringKeywordsInsideSingleValue() {
- // given
- JsonObject templateJson = gson.fromJson(TEMPLATE_JSON_WITH_MANY_KEYWORDS_INSIDE_SINGLE_VALUE, JsonObject.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> 1);
-
- // when
- JsonObject resultJson = keywordsHandler.substituteKeywords(templateJson, "").getAsJsonObject();
-
- // then
- String newDomain1 = resultJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain1").getAsString();
- String newDomain2 = resultJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain2").getAsString();
-
- assertThat(newDomain1.length()).isEqualTo(1+1+2+1+3);
- assertThat(newDomain2.length()).isEqualTo(1+1+1+1+1);
- }
-
- @Test
- void shouldReplaceRandomStringKeywordInTeplateAsArrayWithPrimitves() {
- // given
- JsonElement templateJson = gson.fromJson(TEMPLATE_WITH_ARRAY_OF_PRIMITIVES, JsonElement.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> 1);
-
- // when
- JsonElement resultJson = keywordsHandler.substituteKeywords(templateJson, "");
- assertThat(resultJson.getAsJsonArray().get(1).getAsString().length()).isEqualTo(5);
- }
-
- @Test
- void shouldReplaceRandomStringKeywordInTeplateAsSimpleValue() {
- // given
- JsonElement templateJson = gson.fromJson(TEMPLATE_WITH_SIMPLE_VALUE, JsonElement.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> 1);
-
- // when
- JsonElement resultJson = keywordsHandler.substituteKeywords(templateJson, "");
-
- // then
- assertThat(resultJson.getAsString().length()).isEqualTo(4);
- }
-
- @Test
- void shouldReplaceRandomStringKeywordInTeplateWithJsonArray() {
- // given
- JsonElement templateJson = gson.fromJson(TEMPLATE_JSON_WITH_ARRAY, JsonElement.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> 1);
-
- // when
- JsonObject resultJson = keywordsHandler.substituteKeywords(templateJson, "").getAsJsonObject();
-
- // then
- String actualValue = resultJson
- .get("event").getAsJsonObject()
- .get("measurementsForVfScalingFields").getAsJsonObject()
- .get("additionalMeasurements").getAsJsonArray()
- .get(0).getAsJsonObject()
- .get("arrayOfFields").getAsJsonArray()
- .get(1).getAsJsonObject()
- .get("value").getAsString();
- String otherActualValue = resultJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain").getAsString();
-
- assertThat(otherActualValue.length()).isEqualTo(1);
- assertThat(actualValue.length()).isEqualTo(2);
- }
-
- @Test
- void shouldReplaceOneIncrementKeyword() {
- // given
- final Integer newIncrementedValue = 2;
- JsonObject templateJson = gson.fromJson(TEMPLATE_ONE_INCREMENT_JSON, JsonObject.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), (id) -> newIncrementedValue);
-
- // when
- JsonObject resultJson = keywordsHandler.substituteKeywords(templateJson, "some random id").getAsJsonObject();
-
- // then
- String actualValue = resultJson
- .get("event").getAsJsonObject()
- .get("measurementsForVfScalingFields").getAsJsonObject()
- .get("additionalMeasurements").getAsJsonObject()
- .get("extraFields").getAsJsonObject()
- .get("value").getAsString();
-
- assertThat(actualValue).isEqualTo(newIncrementedValue.toString());
- }
-
- @Test
- void shouldReplaceTwoIncrementKeyword() {
- // given
- final Integer firstIncrementValue = 2;
- final Integer secondIncrementValue = 3;
- JsonObject templateJson = gson.fromJson(TEMPLATE_TWO_INCREMENT_JSON, JsonObject.class);
- KeywordsHandler keywordsHandler = new KeywordsHandler(new KeywordsExtractor(), new IncrementProvider() {
- Queue<Integer> sequenceOfValues = new LinkedList<>(
- Arrays.asList(firstIncrementValue, secondIncrementValue));
-
- @Override
- public int getAndIncrement(String id) {
- return sequenceOfValues.poll();
- }
- });
-
- // when
- JsonObject resultJson = keywordsHandler.substituteKeywords(templateJson, "some random id").getAsJsonObject();
- resultJson = keywordsHandler.substituteKeywords(templateJson, "some random id").getAsJsonObject();
-
- // then
- String actualValue = resultJson
- .get("event").getAsJsonObject()
- .get("measurementsForVfScalingFields").getAsJsonObject()
- .get("additionalMeasurements").getAsJsonObject()
- .get("extraFields").getAsJsonObject()
- .get("value").getAsString();
-
- String actualOtherValue = resultJson
- .get("event").getAsJsonObject()
- .get("measurementsForVfScalingFields").getAsJsonObject()
- .get("additionalMeasurements").getAsJsonObject()
- .get("extraFields").getAsJsonObject()
- .get("otherValue").getAsString();
-
- assertThat(actualValue).isEqualTo(secondIncrementValue.toString());
- assertThat(actualOtherValue).isEqualTo(secondIncrementValue.toString());
-
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsValueProviderTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsValueProviderTest.java
deleted file mode 100644
index 73e4c31df..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/KeywordsValueProviderTest.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.onap.pnfsimulator.simulator.KeywordsValueProvider.DEFAULT_STRING_LENGTH;
-
-import java.util.Random;
-import org.junit.jupiter.api.RepeatedTest;
-import org.junit.jupiter.api.Test;
-
-class KeywordsValueProviderTest {
-
- @RepeatedTest(10)
- void randomLimitedStringTest() {
- String supplierResult = KeywordsValueProvider.getRandomLimitedString().apply();
- assertEquals(supplierResult.length(), DEFAULT_STRING_LENGTH);
- }
-
- @RepeatedTest(10)
- void randomStringTest() {
- int length = new Random().nextInt(15) + 1;
- String supplierResult = KeywordsValueProvider.getRandomString().apply(length);
- assertEquals(supplierResult.length(), length);
- }
-
- @RepeatedTest(10)
- void randomIntegerTest(){
- int min = new Random().nextInt(10) + 1;
- int max = new Random().nextInt(1000) + 20;
- String supplierResult = KeywordsValueProvider.getRandomInteger().apply(min, max);
- assertTrue(Integer.parseInt(supplierResult)>=min);
- assertTrue(Integer.parseInt(supplierResult)<=max);
- }
-
- @Test
- void randomIntegerContainsMaximalAndMinimalValuesTest(){
- int anyNumber = new Random().nextInt(10) + 1;
- String supplierResult = KeywordsValueProvider.getRandomInteger().apply(anyNumber, anyNumber);
- assertEquals(Integer.parseInt(supplierResult), anyNumber);
- }
-
- @Test
- void randomIntegerFromNegativeRangeTest(){
- String supplierResult = KeywordsValueProvider.getRandomInteger().apply(-20, -20);
- assertEquals(Integer.parseInt(supplierResult), -20);
- }
-
- @RepeatedTest(10)
- void randomIntegerFromParametersWithDifferentOrdersTest(){
- String supplierResult = KeywordsValueProvider.getRandomInteger().apply(-20, -10);
- assertTrue(Integer.parseInt(supplierResult)>=-20);
- assertTrue(Integer.parseInt(supplierResult)<=-10);
- }
-
- @RepeatedTest(10)
- void epochSecondGeneratedInCorrectFormatTest(){
- String supplierResult = KeywordsValueProvider.getEpochSecond().apply();
- assertEquals(supplierResult.length(), 10);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorServiceTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorServiceTest.java
deleted file mode 100644
index 32dd532aa..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorServiceTest.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonSyntaxException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.onap.pnfsimulator.event.EventData;
-import org.onap.pnfsimulator.event.EventDataService;
-import org.onap.pnfsimulator.rest.model.FullEvent;
-import org.onap.pnfsimulator.rest.model.SimulatorParams;
-import org.onap.pnfsimulator.rest.model.SimulatorRequest;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.scheduler.EventScheduler;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfig;
-import org.onap.pnfsimulator.simulatorconfig.SimulatorConfigService;
-import org.quartz.SchedulerException;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-
-import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.internal.verification.VerificationModeFactory.times;
-
-class SimulatorServiceTest {
-
- private static final String VES_URL = "http://0.0.0.0:8080";
- private static final Gson GSON = new Gson();
- private static final JsonObject VALID_PATCH = GSON.fromJson("{\"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"sourceName\": \"SomeCustomSource\"}}}\n", JsonObject.class);
- private static JsonObject VALID_FULL_EVENT = GSON.fromJson("{\"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"notification\",\n" +
- " \"eventName\": \"vFirewallBroadcastPackets\"\n" +
- " },\n" +
- " \"notificationFields\": {\n" +
- " \"arrayOfNamedHashMap\": [{\n" +
- " \"name\": \"A20161221.1031-1041.bin.gz\",\n" +
- " \"hashMap\": {\n" +
- " \"fileformatType\": \"org.3GPP.32.435#measCollec\"}}]}}}", JsonObject.class);
- private static JsonObject FULL_EVENT_WITH_KEYWORDS = GSON.fromJson("{\"event\":{ \n" +
- " \"commonEventHeader\":{ \n" +
- " \"domain\":\"notification\",\n" +
- " \"eventName\":\"#RandomString(20)\",\n" +
- " \"eventOrderNo\":\"#Increment\"}}}", JsonObject.class);
- private static final String SOME_CUSTOM_SOURCE = "SomeCustomSource";
- private static final String CLOSED_LOOP_VNF ="ClosedLoopVNF";
- private static final String SAMPLE_ID = "sampleId";
- private static final EventData SAMPLE_EVENT = EventData.builder().id("1").build();
- private final ArgumentCaptor<JsonObject> bodyCaptor = ArgumentCaptor.forClass(JsonObject.class);
- private final ArgumentCaptor<Integer> intervalCaptor = ArgumentCaptor.forClass(Integer.class);
- private final ArgumentCaptor<Integer> repeatCountCaptor = ArgumentCaptor
- .forClass(Integer.class);
- private final ArgumentCaptor<String> templateNameCaptor = ArgumentCaptor.forClass(String.class);
- private final ArgumentCaptor<String> eventIdCaptor = ArgumentCaptor.forClass(String.class);
- private final ArgumentCaptor<String> vesUrlCaptor = ArgumentCaptor.forClass(String.class);
- private final ArgumentCaptor<String> eventContentCaptor = ArgumentCaptor.forClass(String.class);
- private SimulatorService simulatorService;
- private EventDataService eventDataService;
- private EventScheduler eventScheduler;
- private SimulatorConfigService simulatorConfigService;
- private static TemplatePatcher templatePatcher = new TemplatePatcher();
- private static TemplateReader templateReader = new FilesystemTemplateReader(
- "src/test/resources/org/onap/pnfsimulator/simulator/", GSON);
-
- @BeforeEach
- void setUp() {
- eventDataService = mock(EventDataService.class);
- eventScheduler = mock(EventScheduler.class);
- simulatorConfigService = mock(SimulatorConfigService.class);
-
- simulatorService = new SimulatorService(templatePatcher, templateReader,
- eventScheduler, eventDataService, simulatorConfigService);
- }
-
- @Test
- void shouldTriggerEventWithGivenParams() throws IOException, SchedulerException {
- String templateName = "validExampleMeasurementEvent.json";
- SimulatorParams simulatorParams = new SimulatorParams(VES_URL, 1, 1);
- SimulatorRequest simulatorRequest = new SimulatorRequest(simulatorParams,
- templateName, VALID_PATCH);
-
- doReturn(SAMPLE_EVENT).when(eventDataService).persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class), any(JsonObject.class));
-
- simulatorService.triggerEvent(simulatorRequest);
-
- assertEventHasExpectedStructure(VES_URL, templateName, SOME_CUSTOM_SOURCE);
- }
-
- @Test
- void shouldTriggerEventWithDefaultVesUrlWhenNotProvidedInRequest() throws IOException, SchedulerException {
- String templateName = "validExampleMeasurementEvent.json";
- SimulatorRequest simulatorRequest = new SimulatorRequest(
- new SimulatorParams("", 1, 1),
- templateName, VALID_PATCH);
-
- URL inDbVesUrl = new URL("http://0.0.0.0:8080/eventListener/v6");
- doReturn(SAMPLE_EVENT).when(eventDataService).persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class), any(JsonObject.class));
- when(simulatorConfigService.getConfiguration()).thenReturn(new SimulatorConfig(SAMPLE_ID, inDbVesUrl));
-
- simulatorService.triggerEvent(simulatorRequest);
-
- assertEventHasExpectedStructure(inDbVesUrl.toString(), templateName, SOME_CUSTOM_SOURCE);
- }
-
- @Test
- void shouldThrowJsonSyntaxWhenInvalidJson() {
- //given
- JsonObject patch = GSON.fromJson("{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"sourceName\": \"" + SOME_CUSTOM_SOURCE + "\"\n" +
- " }\n" +
- " }\n" +
- "}\n", JsonObject.class);
- EventData eventData = EventData.builder().id("1").build();
-
- SimulatorParams simulatorParams = new SimulatorParams(VES_URL, 1, 1);
- SimulatorRequest simulatorRequest = new SimulatorRequest(simulatorParams,
- "invalidJsonStructureEvent.json", patch);
- doReturn(eventData).when(eventDataService).persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class), any(JsonObject.class));
-
- //when
- assertThrows(JsonSyntaxException.class,
- () -> simulatorService.triggerEvent(simulatorRequest));
- }
-
- @Test
- void shouldHandleNonExistingPatchSection() throws IOException, SchedulerException {
- String templateName = "validExampleMeasurementEvent.json";
- JsonObject nullPatch = null;
- SimulatorRequest simulatorRequest = new SimulatorRequest(
- new SimulatorParams("", 1, 1),
- templateName, nullPatch);
-
- URL inDbVesUrl = new URL("http://0.0.0.0:8080/eventListener/v6");
- doReturn(SAMPLE_EVENT).when(eventDataService).persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class), any(JsonObject.class));
- doReturn(new SimulatorConfig(SAMPLE_ID, inDbVesUrl)).when(simulatorConfigService).getConfiguration();
-
- simulatorService.triggerEvent(simulatorRequest);
-
- assertEventHasExpectedStructure(inDbVesUrl.toString(), templateName, CLOSED_LOOP_VNF);
- }
-
- @Test
- void shouldSuccessfullySendOneTimeEventWithVesUrlWhenPassed() throws MalformedURLException {
- SimulatorService spiedTestedService = spy(new SimulatorService(templatePatcher,templateReader, eventScheduler, eventDataService, simulatorConfigService));
-
- HttpClientAdapter adapterMock = mock(HttpClientAdapter.class);
- doNothing().when(adapterMock).send(eventContentCaptor.capture());
- doReturn(adapterMock).when(spiedTestedService).createHttpClientAdapter(any(String.class));
- FullEvent event = new FullEvent(VES_URL, VALID_FULL_EVENT);
-
- spiedTestedService.triggerOneTimeEvent(event);
-
- assertThat(eventContentCaptor.getValue()).isEqualTo(VALID_FULL_EVENT.toString());
- verify(eventDataService, times(1)).persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class), any(JsonObject.class));
- verify(adapterMock, times(1)).send(VALID_FULL_EVENT.toString());
- }
-
- @Test
- void shouldSubstituteKeywordsAndSuccessfullySendOneTimeEvent() throws MalformedURLException {
- SimulatorService spiedTestedService = spy(new SimulatorService(templatePatcher,templateReader, eventScheduler, eventDataService, simulatorConfigService));
-
- HttpClientAdapter adapterMock = mock(HttpClientAdapter.class);
- doNothing().when(adapterMock).send(eventContentCaptor.capture());
- doReturn(adapterMock).when(spiedTestedService).createHttpClientAdapter(any(String.class));
- FullEvent event = new FullEvent(VES_URL, FULL_EVENT_WITH_KEYWORDS);
-
- spiedTestedService.triggerOneTimeEvent(event);
-
- JsonObject sentContent = GSON.fromJson(eventContentCaptor.getValue(), JsonElement.class).getAsJsonObject();
- assertThat(sentContent.getAsJsonObject("event").getAsJsonObject("commonEventHeader").get("eventOrderNo").getAsString()).isEqualTo("1");
- assertThat(sentContent.getAsJsonObject("event").getAsJsonObject("commonEventHeader").get("eventName").getAsString()).hasSize(20);
- }
-
-
- private void assertEventHasExpectedStructure(String expectedVesUrl, String templateName, String sourceNameString) throws SchedulerException, MalformedURLException {
- verify(eventScheduler, times(1)).scheduleEvent(vesUrlCaptor.capture(), intervalCaptor.capture(),
- repeatCountCaptor.capture(), templateNameCaptor.capture(), eventIdCaptor.capture(), bodyCaptor.capture());
- assertThat(vesUrlCaptor.getValue()).isEqualTo(expectedVesUrl);
- assertThat(intervalCaptor.getValue()).isEqualTo(1);
- assertThat(repeatCountCaptor.getValue()).isEqualTo(1);
- assertThat(templateNameCaptor.getValue()).isEqualTo(templateName);
- String actualSourceName = GSON.fromJson(bodyCaptor.getValue(), JsonObject.class)
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("sourceName").getAsString();
- assertThat(actualSourceName).isEqualTo(sourceNameString);
- verify(eventDataService)
- .persistEventData(any(JsonObject.class), any(JsonObject.class), any(JsonObject.class),
- any(JsonObject.class));
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplatePatcherTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplatePatcherTest.java
deleted file mode 100644
index 52e0d6ae6..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplatePatcherTest.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import org.assertj.core.api.AssertionsForInterfaceTypes;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-
-class TemplatePatcherTest {
-
- private static final String TEMPLATE_JSON = "{\n" +
- " \"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"measurementsForVfScaling\"\n" +
- " },\n" +
- " \"measurementsForVfScalingFields\": {\n" +
- " \"measurementsForVfSclaingFieldsVersion\": 2.0,\n" +
- " \"additionalMeasurements\": {\n" +
- " \"name\": \"licenseUsage\",\n" +
- " \"extraFields\": {\n" +
- " \"name\": \"G711AudioPort\",\n" +
- " \"value\": \"1\"\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- private TemplatePatcher templatePatcher;
- private Gson gson = new Gson();
- private JsonObject templateJson;
-
- @BeforeEach
- void setUp() {
- templatePatcher = new TemplatePatcher();
- templateJson = gson.fromJson(TEMPLATE_JSON, JsonObject.class);
- }
-
- @Test
- void shouldReplaceJsonElementsInTemplate() {
- //given
- String patchJsonString = "{\n"
- + " \"event\": {\n"
- + " \"commonEventHeader\": {\n"
- + " \"domain\": \"newDomain\"\n"
- + " }\n"
- + " }\n"
- + "}";
- JsonObject patchJson = gson.fromJson(patchJsonString, JsonObject.class);
-
- //when
- JsonObject requestJson = templatePatcher.mergeTemplateWithPatch(templateJson, patchJson);
-
- //then
- String newDomain = requestJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain").getAsString();
- assertThat(newDomain).isEqualTo("newDomain");
- }
-
- @Test
- void shouldAddWholeJsonObjectToTemplateWhenItFinished() {
- //given
- String patchJsonString =
- "{\n"
- + " \"event\": {\n"
- + " \"commonEventHeader\": {\n"
- + " \"domain\": {\n"
- + " \"extraFields\": {\n"
- + " \"name\": \"G711AudioPort\",\n"
- + " \"value\": \"1\"\n"
- + " }\n"
- + " }\n"
- + " }\n"
- + " }\n"
- + "}";
- JsonObject patchJson = gson.fromJson(patchJsonString, JsonObject.class);
-
- //when
- JsonObject requestJson = templatePatcher.mergeTemplateWithPatch(templateJson, patchJson);
-
- //then
- JsonElement newDomain = requestJson
- .get("event").getAsJsonObject()
- .get("commonEventHeader").getAsJsonObject()
- .get("domain");
- assertThat(newDomain.isJsonObject()).isTrue();
- JsonObject newDomainJO = newDomain.getAsJsonObject();
- AssertionsForInterfaceTypes.assertThat(newDomainJO.keySet()).containsExactly("extraFields");
- JsonObject newDomainExtraFields = newDomainJO.get("extraFields").getAsJsonObject();
- AssertionsForInterfaceTypes.assertThat(newDomainExtraFields.keySet()).containsExactly("name", "value");
- }
-
- @Test
- void shouldReplaceJsonObjectWithJsonElementFromPatch() {
- //given
- String patchJsonString = "{ \"event\": \"test\" }";
- JsonObject patchJson = gson.fromJson(patchJsonString, JsonObject.class);
-
- //when
- JsonObject requestJson = templatePatcher.mergeTemplateWithPatch(templateJson, patchJson);
-
- //then
- assertThat(requestJson.get("event").isJsonObject()).isFalse();
- assertThat(requestJson.get("event").getAsString()).isEqualTo("test");
- }
-
- @Test
- void shouldAddNewKeyIfPatchHasItAndTempleteDoesnt() {
- //given
- String patchJsonString = "{ \"newTestKey\": { \"newTestKeyChild\":\"newTestValue\" }}";
- JsonObject patchJson = gson.fromJson(patchJsonString, JsonObject.class);
-
- //when
- JsonObject requestJson = templatePatcher.mergeTemplateWithPatch(templateJson, patchJson);
-
- //then
- assertThat(requestJson.get("event").isJsonObject()).isTrue();
- assertThat(requestJson.get("newTestKey").isJsonObject()).isTrue();
- JsonObject newTestKey = requestJson.get("newTestKey").getAsJsonObject();
- AssertionsForInterfaceTypes.assertThat(newTestKey.keySet()).containsExactly("newTestKeyChild");
- assertThat(newTestKey.get("newTestKeyChild").getAsString()).isEqualTo("newTestValue");
-
- }
-
-
- @Test
- void shouldNotChangeInputTemplateParam() {
- //given
- String patchJsonString = "{ \"newTestKey\": { \"newTestKeyChild\":\"newTestValue\" }}";
- JsonObject patchJson = gson.fromJson(patchJsonString, JsonObject.class);
-
- //when
- templatePatcher.mergeTemplateWithPatch(templateJson, patchJson);
-
- //then
- assertThat(templateJson).isEqualTo(gson.fromJson(TEMPLATE_JSON, JsonObject.class));
-
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplateReaderTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplateReaderTest.java
deleted file mode 100644
index f029fce75..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TemplateReaderTest.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonSyntaxException;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-import org.springframework.test.context.TestPropertySource;
-
-import java.io.IOException;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-
-@TestPropertySource
-class TemplateReaderTest {
-
- private FilesystemTemplateReader templateReader = new FilesystemTemplateReader("src/test/resources/org/onap/pnfsimulator/simulator/", new Gson());
-
- @Test
- void testShouldReadJsonFromFile() throws IOException {
- JsonObject readJson = templateReader.readTemplate("validExampleMeasurementEvent.json");
- assertThat(readJson.keySet()).containsOnly("event");
- assertThat(readJson.get("event").getAsJsonObject().keySet()).containsExactlyInAnyOrder("commonEventHeader", "measurementsForVfScalingFields");
- }
-
- @Test
- void testShouldRaiseExceptionWhenInvalidJsonIsRead() {
- Assertions.assertThrows(JsonSyntaxException.class, () -> templateReader.readTemplate("invalidJsonStructureEvent.json"));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
deleted file mode 100644
index 41bd7b1e6..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.conn.socket.PlainConnectionSocketFactory;
-import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-
-class HttpClientAdapterImplTest {
-
- private static final String HTTPS_URL = "https://0.0.0.0:8443/";
- private static final String HTTP_URL = "http://0.0.0.0:8000/";
-
- private HttpClient httpClient;
- private HttpResponse httpResponse;
-
- @BeforeEach
- void setup() {
- httpClient = mock(HttpClient.class);
- httpResponse = mock(HttpResponse.class);
- }
-
- @Test
- void sendShouldSuccessfullySendRequestGivenValidUrl() throws IOException {
- assertAdapterSentRequest("http://valid-url:8080");
- }
-
- @Test
- void sendShouldSuccessfullySendRequestGivenValidUrlUsingHTTPS() throws IOException {
- assertAdapterSentRequest("https://valid-url:8443");
- }
-
- @Test
- void shouldThrowExceptionWhenMalformedVesUrlPassed(){
- assertThrows(MalformedURLException.class, () -> new HttpClientAdapterImpl("http://blablabla:VES-PORT"));
- }
- @Test
- void shouldCreateAdapterWithClientNotSupportingSSLConnection() throws MalformedURLException {
- HttpClientAdapter adapterWithHttps = new HttpClientAdapterImpl(HTTPS_URL);
- try {
- adapterWithHttps.send("sample");
- } catch (Exception actualException) {
- assertThat(actualException).hasStackTraceContaining(SSLConnectionSocketFactory.class.toString());
- }
- }
-
- @Test
- void shouldCreateAdapterWithClientSupportingPlainConnectionOnly() throws MalformedURLException {
- HttpClientAdapter adapterWithHttps = new HttpClientAdapterImpl(HTTP_URL);
- try {
- adapterWithHttps.send("sample");
- } catch (Exception actualException) {
- assertThat(actualException).hasStackTraceContaining(PlainConnectionSocketFactory.class.toString());
- }
- }
-
- private void assertAdapterSentRequest(String targetUrl) throws IOException {
- HttpClientAdapter adapter = new HttpClientAdapterImpl(httpClient, targetUrl);
- doReturn(httpResponse).when(httpClient).execute(any());
-
- adapter.send("test-msg");
-
- verify(httpClient).execute(any());
- verify(httpResponse).getStatusLine();
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevelTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevelTest.java
deleted file mode 100644
index ff41c441d..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/client/utils/ssl/SslSupportLevelTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client.utils.ssl;
-
-import org.junit.jupiter.api.Test;
-
-import java.net.MalformedURLException;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-class SslSupportLevelTest {
-
- private static final String HTTPS_URL = "https://127.0.0.1:8443/";
- private static final String HTTP_URL = "http://127.0.0.1:8080/";
-
- @Test
- void testShouldReturnAlwaysTrustSupportLevelForHttpsUrl() throws MalformedURLException {
- SslSupportLevel actualSupportLevel = SslSupportLevel.getSupportLevelBasedOnProtocol(HTTPS_URL);
- assertEquals(actualSupportLevel, SslSupportLevel.ALWAYS_TRUST);
- }
-
- @Test
- void testShouldReturnNoneSupportLevelForHttpUrl() throws MalformedURLException {
- SslSupportLevel actualSupportLevel = SslSupportLevel.getSupportLevelBasedOnProtocol(HTTP_URL);
- assertEquals(actualSupportLevel, SslSupportLevel.NONE);
- }
-
- @Test
- void testShouldRaiseExceptionWhenInvalidUrlPassed(){
- assertThrows(MalformedURLException.class, () -> SslSupportLevel.getSupportLevelBasedOnProtocol("http://bla:VES-PORT/"));
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventJobTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventJobTest.java
deleted file mode 100644
index 25ed84c43..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventJobTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.scheduler;
-
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.BODY;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.CLIENT_ADAPTER;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.EVENT_ID;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.KEYWORDS_HANDLER;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.TEMPLATE_NAME;
-import static org.onap.pnfsimulator.simulator.scheduler.EventJob.VES_URL;
-
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.onap.pnfsimulator.simulator.KeywordsExtractor;
-import org.onap.pnfsimulator.simulator.KeywordsHandler;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.quartz.JobDataMap;
-import org.quartz.JobDetail;
-import org.quartz.JobExecutionContext;
-import org.quartz.JobKey;
-
-class EventJobTest {
-
- @Test
- void shouldSendEventWhenExecuteCalled() {
- //given
- EventJob eventJob = new EventJob();
- String templateName = "template name";
- String vesUrl = "http://someurl:80/";
- String eventId = "1";
- JsonParser parser = new JsonParser();
- JsonObject body = parser.parse("{\"a\": \"A\"}").getAsJsonObject();
- HttpClientAdapter clientAdapter = mock(HttpClientAdapter.class);
- JobExecutionContext jobExecutionContext =
- createMockJobExecutionContext(templateName, eventId, vesUrl, body, clientAdapter);
-
- ArgumentCaptor<String> vesUrlCaptor = ArgumentCaptor.forClass(String.class);
- ArgumentCaptor<String> bodyCaptor = ArgumentCaptor.forClass(String.class);
-
- //when
- eventJob.execute(jobExecutionContext);
-
- //then
- verify(clientAdapter).send(bodyCaptor.capture());
- assertThat(bodyCaptor.getValue()).isEqualTo(body.toString());
- }
-
- private JobExecutionContext createMockJobExecutionContext(String templateName, String eventId, String vesURL,
- JsonObject body, HttpClientAdapter clientAdapter) {
-
- JobDataMap jobDataMap = new JobDataMap();
- jobDataMap.put(TEMPLATE_NAME, templateName);
- jobDataMap.put(KEYWORDS_HANDLER, new KeywordsHandler(new KeywordsExtractor(), (id) -> 1));
- jobDataMap.put(EVENT_ID, eventId);
- jobDataMap.put(VES_URL, vesURL);
- jobDataMap.put(BODY, body);
- jobDataMap.put(CLIENT_ADAPTER, clientAdapter);
-
- JobExecutionContext jobExecutionContext = mock(JobExecutionContext.class);
- JobDetail jobDetail = mock(JobDetail.class);
- when(jobExecutionContext.getJobDetail()).thenReturn(jobDetail);
- when(jobDetail.getJobDataMap()).thenReturn(jobDataMap);
- when(jobDetail.getKey()).thenReturn(new JobKey("jobId", "group"));
- return jobExecutionContext;
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventSchedulerTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventSchedulerTest.java
deleted file mode 100644
index 9d0f7d84f..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/scheduler/EventSchedulerTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.scheduler;
-
-import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import com.google.gson.JsonObject;
-
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.quartz.JobDataMap;
-import org.quartz.JobDetail;
-import org.quartz.JobExecutionContext;
-import org.quartz.JobKey;
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.SimpleTrigger;
-
-class EventSchedulerTest {
-
- @InjectMocks
- EventScheduler eventScheduler;
-
- @Mock
- Scheduler quartzScheduler;
-
- @BeforeEach
- void setUp() {
- MockitoAnnotations.initMocks(this);
- }
-
- @Test
- void shouldTriggerEventWithGivenConfiguration() throws SchedulerException, MalformedURLException {
- //given
- ArgumentCaptor<JobDetail> jobDetailCaptor = ArgumentCaptor.forClass(JobDetail.class);
- ArgumentCaptor<SimpleTrigger> triggerCaptor = ArgumentCaptor.forClass(SimpleTrigger.class);
-
- String vesUrl = "http://some:80/";
- int repeatInterval = 1;
- int repeatCount = 4;
- String testName = "testName";
- String eventId = "1";
- JsonObject body = new JsonObject();
-
- //when
- eventScheduler.scheduleEvent(vesUrl, repeatInterval, repeatCount, testName, eventId, body);
-
- //then
- verify(quartzScheduler).scheduleJob(jobDetailCaptor.capture(), triggerCaptor.capture());
- JobDataMap actualJobDataMap = jobDetailCaptor.getValue().getJobDataMap();
- assertThat(actualJobDataMap.get(EventJob.BODY)).isEqualTo(body);
- assertThat(actualJobDataMap.get(EventJob.TEMPLATE_NAME)).isEqualTo(testName);
- assertThat(actualJobDataMap.get(EventJob.VES_URL)).isEqualTo(vesUrl);
-
- SimpleTrigger actualTrigger = triggerCaptor.getValue();
- // repeat count adds 1 to given value
- assertThat(actualTrigger.getRepeatCount()).isEqualTo(repeatCount - 1);
-
- //getRepeatInterval returns interval in ms
- assertThat(actualTrigger.getRepeatInterval()).isEqualTo(repeatInterval * 1000);
- }
-
- @Test
- void shouldCancelAllEvents() throws SchedulerException {
- //given
- List<JobKey> jobsKeys = Arrays.asList(new JobKey("jobName1"), new JobKey("jobName2"),
- new JobKey("jobName3"), new JobKey("jobName4"));
- List<JobExecutionContext> jobExecutionContexts = createExecutionContextWithKeys(jobsKeys);
- when(quartzScheduler.getCurrentlyExecutingJobs()).thenReturn(jobExecutionContexts);
- when(quartzScheduler.deleteJobs(jobsKeys)).thenReturn(true);
-
- //when
- boolean isCancelled = eventScheduler.cancelAllEvents();
-
- //then
- assertThat(isCancelled).isTrue();
- }
-
- @Test
- void shouldCancelSingleEvent() throws SchedulerException {
- //given
- JobKey jobToRemove = new JobKey("jobName3");
- List<JobKey> jobsKeys = Arrays.asList(new JobKey("jobName1"), new JobKey("jobName2"),
- jobToRemove, new JobKey("jobName4"));
- List<JobExecutionContext> jobExecutionContexts = createExecutionContextWithKeys(jobsKeys);
-
- when(quartzScheduler.getCurrentlyExecutingJobs()).thenReturn(jobExecutionContexts);
- when(quartzScheduler.deleteJob(jobToRemove)).thenReturn(true);
-
- //when
- boolean isCancelled = eventScheduler.cancelEvent("jobName3");
-
- //then
- assertThat(isCancelled).isTrue();
- }
-
- private List<JobExecutionContext> createExecutionContextWithKeys(List<JobKey> jobsKeys) {
- List<JobExecutionContext> contexts = new ArrayList<>();
- for (JobKey key : jobsKeys) {
- contexts.add(createExecutionContextFromKey(key));
- }
- return contexts;
- }
-
- private JobExecutionContext createExecutionContextFromKey(JobKey key) {
- JobExecutionContext context = mock(JobExecutionContext.class);
- JobDetail jobDetail = mock(JobDetail.class);
- when(context.getJobDetail()).thenReturn(jobDetail);
- when(jobDetail.getKey()).thenReturn(key);
- return context;
- }
-
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigServiceTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigServiceTest.java
deleted file mode 100644
index 4ed097289..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulatorconfig/SimulatorConfigServiceTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulatorconfig;
-
-import org.assertj.core.util.Lists;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.List;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-import static org.assertj.core.api.Java6Assertions.assertThatThrownBy;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-class SimulatorConfigServiceTest {
-
- private static final String SAMPLE_ID = "sampleId";
- private static final String SAMPLE_NEW_VES_URL = "http://localhost:8090/eventListener/v7";
- @Mock
- private SimulatorConfigRepository repository;
-
- @InjectMocks
- private SimulatorConfigService service;
-
- @BeforeEach
- void resetMocks() {
- initMocks(this);
- }
-
- @Test
- void testShouldReturnConfiguration() throws MalformedURLException {
- List<SimulatorConfig> expectedConfig = getExpectedConfig();
- when(repository.findAll()).thenReturn(expectedConfig);
-
- SimulatorConfig configs = service.getConfiguration();
-
- assertThat(configs).isNotNull();
- }
-
- @Test
- void testShouldRaiseExceptionWhenNoConfigurationPresent() {
- when(repository.findAll()).thenReturn(Lists.emptyList());
-
- assertThatThrownBy(() -> service.getConfiguration())
- .isInstanceOf(IllegalStateException.class)
- .hasMessageContaining("No configuration found in db");
- }
-
- @Test
- void testShouldUpdateConfigurationWithVesUrl() throws MalformedURLException {
- URL updatedUrl = new URL("http://localhost:8090/listener/v8");
- SimulatorConfig configWithUpdates = new SimulatorConfig("sampleId", updatedUrl);
- List<SimulatorConfig> expectedConfig = getExpectedConfig();
-
- when(repository.findAll()).thenReturn(expectedConfig);
- when(repository.save(any(SimulatorConfig.class))).thenReturn(configWithUpdates);
-
- SimulatorConfig updatedConfig = service.updateConfiguration(configWithUpdates);
-
- assertThat(updatedConfig).isEqualToComparingFieldByField(configWithUpdates);
- }
-
- @Test
- void testShouldRaiseExceptionWhenNoConfigInDbPresentOnUpdate() throws MalformedURLException {
- when(repository.findAll()).thenReturn(Lists.emptyList());
-
- SimulatorConfig configWithUpdates = new SimulatorConfig(SAMPLE_ID, new URL(SAMPLE_NEW_VES_URL));
-
- assertThatThrownBy(() -> service.updateConfiguration(configWithUpdates))
- .isInstanceOf(IllegalStateException.class)
- .hasMessageContaining("No configuration found in db");
- }
-
- private List<SimulatorConfig> getExpectedConfig() throws MalformedURLException {
- URL sampleVesUrl = new URL("http://localhost:8080/eventListener/v7");
- SimulatorConfig config = new SimulatorConfig(SAMPLE_ID, sampleVesUrl);
- return Lists.newArrayList(config);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/TemplateServiceTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/TemplateServiceTest.java
deleted file mode 100644
index 074696094..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/TemplateServiceTest.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import org.assertj.core.util.Lists;
-import org.bson.Document;
-import org.junit.Assert;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;;
-import org.onap.pnfsimulator.template.search.viewmodel.FlatTemplateContent;
-import org.onap.pnfsimulator.template.search.TemplateSearchHelper;
-import org.springframework.data.mongodb.core.MongoTemplate;
-import org.springframework.data.mongodb.core.query.Query;
-
-import java.time.Instant;
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyObject;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-class TemplateServiceTest {
- private static final Gson GSON = new Gson();
- private static final Template SAMPLE_TEMPLATE = new Template("sample name", new Document(), Instant.now().getNano());
- private static final List<Template> SAMPLE_TEMPLATE_LIST = Collections.singletonList(SAMPLE_TEMPLATE);
-
- @Mock
- private TemplateRepository templateRepositoryMock;
-
- @Mock
- private MongoTemplate mongoTemplate;
-
- @InjectMocks
- private TemplateService service;
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- TemplateSearchHelper searchHelper = new TemplateSearchHelper(mongoTemplate);
- service = new TemplateService(templateRepositoryMock, searchHelper);
- }
-
- @Test
- void testShouldReturnAllTemplates() {
- when(templateRepositoryMock.findAll()).thenReturn(SAMPLE_TEMPLATE_LIST);
-
- List<Template> actual = service.getAll();
- assertThat(actual).containsExactly(SAMPLE_TEMPLATE_LIST.get(0));
- }
-
-
- @Test
- void testShouldGetTemplateBySpecifiedName() {
- when(templateRepositoryMock.findById("sample name")).thenReturn(Optional.of(SAMPLE_TEMPLATE));
-
- Optional<Template> actualTemplate = service.get("sample name");
- assertThat(actualTemplate).isPresent();
- assertThat(actualTemplate.get()).isEqualTo(SAMPLE_TEMPLATE);
- }
-
- @Test
- void testShouldSaveTemplate() {
- service.persist(SAMPLE_TEMPLATE);
-
- verify(templateRepositoryMock, times(1)).save(SAMPLE_TEMPLATE);
- }
-
- @Test
- void testShouldDeleteTemplateByName() {
- service.delete("sample name");
-
- verify(templateRepositoryMock, times(1)).deleteById("sample name");
- }
-
-
- @Test
- void testShouldReturnTemplatesAccordingToGivenSearchCriteria() {
- doReturn(Lists.emptyList()).when(mongoTemplate).find(any(Query.class), anyObject(), any(String.class));
-
- List<String> idsByContentCriteria = service.getIdsByContentCriteria(GSON.fromJson("{\"domain\": \"notification.json\"}", JsonObject.class));
-
- assertThat(idsByContentCriteria).isEmpty();
- }
-
- @Test
- void shouldReturnNamesForGivenComposedSearchCriteria(){
- JsonObject composedCriteriaObject = GSON.fromJson("{\"eventName\": \"pnfRegistration_Nokia_5gDu\", \"sequence\": 1}", JsonObject.class);
- List<FlatTemplateContent> arr = Lists.newArrayList(new FlatTemplateContent("sampleId", null));
-
- doReturn(arr).when(mongoTemplate).find(any(Query.class), anyObject(), any(String.class));
-
- List<String> idsByContentCriteria = service.getIdsByContentCriteria(composedCriteriaObject);
- assertThat(idsByContentCriteria).containsOnly("sampleId");
- }
-
- @Test
- void shouldReturnFalseWhenOverwritingWithoutForce() {
- String id = "someTemplate";
- Template template = new Template(id, new Document(), Instant.now().getNano());
- when(templateRepositoryMock.existsById(id)).thenReturn(true);
- boolean actual = service.tryPersistOrOverwrite(template, false);
- Assert.assertFalse(actual);
- }
-
- @Test
- void shouldReturnTrueWhenOverwritingWithForce() {
- String id = "someTemplate";
- Template template = new Template(id, new Document(), Instant.now().getNano());
- when(templateRepositoryMock.existsById(id)).thenReturn(true);
- boolean actual = service.tryPersistOrOverwrite(template, true);
- Assert.assertTrue(actual);
- }
-
- @Test
- void shouldReturnTrueWhenSavingNonExistingTemplate() {
- String id = "someTemplate";
- Template template = new Template(id, new Document(), Instant.now().getNano());
- when(templateRepositoryMock.existsById(id)).thenReturn(false);
- boolean actual = service.tryPersistOrOverwrite(template, false);
- Assert.assertTrue(actual);
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/JsonUtilsTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/JsonUtilsTest.java
deleted file mode 100644
index fa0bed182..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/JsonUtilsTest.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import org.bson.Document;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-
-class JsonUtilsTest {
-
- private static final Gson GSON_HELPER = new Gson();
- private JsonUtils utils;
-
- @BeforeEach
- void setUp() {
- utils = new JsonUtils();
- }
-
- private static final String NOTIFICATION_JSON = "{\n\"event\": {\n" +
- " \"commonEventHeader\": {\n" +
- " \"domain\": \"notification\",\n" +
- " \"eventName\": \"vFirewallBroadcastPackets\"\n" +
- " },\n" +
- " \"notificationFields\": {\n" +
- " \"changeIdentifier\": \"PM_MEAS_FILES\",\n" +
- " \"arrayOfNamedHashMap\": [{\n" +
- " \"name\": \"A20161221.1031-1041.bin.gz\",\n" +
- " \"hashMap\": {\n" +
- " \"fileformatType\": \"org.3GPP.32.435#measCollec\",\n" +
- " \"fileFormatVersion\": \"V10\"\n"+
- " }\n" +
- " }, {\n" +
- " \"name\": \"A20161222.1042-1102.bin.gz\",\n" +
- " \"hashMap\": {\n" +
- " \"fileFormatType\": \"org.3GPP.32.435#measCollec\",\n" +
- " \"fileFormatVersion\": \"1.0.0\"\n" +
- " }\n" +
- " }],\n" +
- " \"notificationFieldsVersion\": \"2.0\"\n}\n\n}}";
- private static final String EXPECTED_FLATTENED_NOTIFICATION = "{" +
- " \":event:commonEventHeader:domain\" : \"notification\"," +
- " \":event:commonEventHeader:eventName\" : \"vFirewallBroadcastPackets\"," +
- " \":event:notificationFields:changeIdentifier\" : \"PM_MEAS_FILES\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[0]:name\" : \"A20161221.1031-1041.bin.gz\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[0]:hashMap:fileformatType\" : \"org.3GPP.32.435#measCollec\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[0]:hashMap:fileFormatVersion\" : \"V10\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[1]:name\" : \"A20161222.1042-1102.bin.gz\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[1]:hashMap:fileFormatType\" : \"org.3GPP.32.435#measCollec\"," +
- " \":event:notificationFields:arrayOfNamedHashMap[1]:hashMap:fileFormatVersion\" : \"1.0.0\"," +
- " \":event:notificationFields:notificationFieldsVersion\" : \"2.0\" }";
-
- @Test
- void shouldFlattenNestedJsonAndSeparateKeysWithDoubleHash(){
- JsonObject templateJson = GSON_HELPER.fromJson(NOTIFICATION_JSON, JsonObject.class);
-
- JsonObject result = utils.flatten(templateJson);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(EXPECTED_FLATTENED_NOTIFICATION, JsonObject.class));
- }
-
- @Test
- void shouldWorkOnEmptyJsonObject(){
- JsonObject result = utils.flatten(new JsonObject());
-
- assertThat(result.toString()).isEqualTo("{}");
- }
-
- @Test
- void shouldFlattenObjectWithArrayValue(){
- String expectedFlattenedObjectWithArray = "{" +
- " \":sample[0]\": 1," +
- " \":sample[1]\": 2," +
- " \":sample[2]\": 3}";
- JsonObject jsonWithPrimitivesArray = GSON_HELPER.fromJson("{\"sample\": [1, 2, 3]}", JsonObject.class);
-
- JsonObject result = utils.flatten(jsonWithPrimitivesArray);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(expectedFlattenedObjectWithArray, JsonObject.class));
- }
-
- @Test
- void shouldFlattenObjectWithEmptyArrayValue(){
- String expectedFlattenedObjectWithEmptyArray = "{\":sample\": []}";
- JsonObject jsonWithEmptyArrayValue = GSON_HELPER.fromJson("{\"sample\": []}", JsonObject.class);
-
- JsonObject result = utils.flatten(jsonWithEmptyArrayValue);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(expectedFlattenedObjectWithEmptyArray, JsonObject.class));
- }
-
- @Test
- void shouldFlattenNestedObjectWithEmptyObjectValue(){
- String expectedFlattenedNestedObjectWithEmptyObject = "{\":sample:key\": {}}";
- JsonObject nestedJsonWithEmptyObject = GSON_HELPER.fromJson("{\"sample\": {\"key\":{}}}", JsonObject.class);
-
- JsonObject result = utils.flatten(nestedJsonWithEmptyObject);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(expectedFlattenedNestedObjectWithEmptyObject, JsonObject.class));
- }
-
- @Test
- void shouldFlattenObjectWithDifferentDataTypes(){
- String jsonWithDifferentDataTypes = "{ \"topLevelKey\": {\"sampleInt\": 1, \"sampleBool\": false, \"sampleDouble\": 10.0, \"sampleString\": \"str\"}}";
- String expectedResult = "{\":topLevelKey:sampleInt\": 1," +
- " \":topLevelKey:sampleBool\": \"false\"," +
- " \":topLevelKey:sampleDouble\": 10.0," +
- " \":topLevelKey:sampleString\": \"str\"}";
- JsonObject templateJson = GSON_HELPER.fromJson(jsonWithDifferentDataTypes, JsonObject.class);
-
- JsonObject result = utils.flatten(templateJson);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(expectedResult, JsonObject.class));
- }
-
- @Test
- void shouldHandleNullValues(){
- String jsonWithNullValue = "{ \"topLevelKey\": {\"sampleNull\": null, \"sampleString\": \"str\"}}";
- String expectedResult = "{\":topLevelKey:sampleNull\": null," +
- " \":topLevelKey:sampleString\": \"str\"}";
- JsonObject templateJson = GSON_HELPER.fromJson(jsonWithNullValue, JsonObject.class);
-
- JsonObject result = utils.flatten(templateJson);
-
- assertThat(result).isEqualTo(GSON_HELPER.fromJson(expectedResult, JsonObject.class));
- }
-
- @Test
- void shouldFlattenBsonDocument(){
- Document documentInput = Document.parse(NOTIFICATION_JSON);
-
- Document result = utils.flatten(documentInput);
-
- assertThat(result.toJson()).isEqualTo(EXPECTED_FLATTENED_NOTIFICATION);
- }
-
- @Test
- void shouldNotChangeEmptyBsonDocument(){
- Document input = Document.parse("{}");
-
- Document result = utils.flatten(input);
-
- assertThat(result.toJson()).isEqualTo("{ }");
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/TemplateSearchHelperTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/TemplateSearchHelperTest.java
deleted file mode 100644
index aeef8706a..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/TemplateSearchHelperTest.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import com.mongodb.BasicDBList;
-import org.assertj.core.util.Lists;
-import org.bson.Document;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.onap.pnfsimulator.template.search.viewmodel.FlatTemplateContent;
-import org.springframework.data.mongodb.core.MongoTemplate;
-import org.springframework.data.mongodb.core.query.BasicQuery;
-import org.springframework.data.mongodb.core.query.Query;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyObject;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-
-class TemplateSearchHelperTest {
-
- private static final Gson GSON = new Gson();
- private static final String FLATTENED_TEMPLATES_VIEW = "flatTemplatesView";
-
- @Mock
- private MongoTemplate mongoTemplate;
-
- @InjectMocks
- private TemplateSearchHelper helper;
-
- private static final ArgumentCaptor<Query> QUERY_CAPTOR = ArgumentCaptor.forClass(Query.class);
- private static final ArgumentCaptor<String> COLLECTION_NAME_CAPTOR = ArgumentCaptor.forClass(String.class);
- private static final ArgumentCaptor<Class<FlatTemplateContent>> CLASS_TYPE_CAPTOR = ArgumentCaptor.forClass((Class) FlatTemplateContent.class);
-
-
- @BeforeEach
- void setUp() {
- initMocks(this);
- }
-
- @Test
- void shouldReturnNamesForGivenComposedSearchCriteria(){
- String expectedComposedQueryString = "{\"$and\":[{\"keyValues\":{\"$elemMatch\":{\"k\":{\"$regex\":\":eventName(?:(\\\\[[\\\\d]+\\\\]))?$\",\"$options\":\"iu\"},\"v\":{\"$regex\":\"^\\\\QpnfRegistration_Nokia_5gDu\\\\E$\",\"$options\":\"iu\"}}}},{\"keyValues\":{\"$elemMatch\":{\"k\":{\"$regex\":\":sequence(?:(\\\\[[\\\\d]+\\\\]))?$\",\"$options\":\"iu\"},\"v\":1.0}}}]}";
- Query expectedQuery = new BasicQuery(expectedComposedQueryString);
-
- String composedCriteriaInputJson = "{\"eventName\": \"pnfRegistration_Nokia_5gDu\", \"sequence\": 1}";
- JsonObject composedCriteriaObject = GSON.fromJson(composedCriteriaInputJson, JsonObject.class);
-
- when(mongoTemplate.find(any(Query.class), anyObject(), any(String.class))).thenReturn(Lists.newArrayList(new FlatTemplateContent("sampleId1", null), new FlatTemplateContent("sampleId2", null)));
-
- List<String> idsOfDocumentMatchingCriteria = helper.getIdsOfDocumentMatchingCriteria(composedCriteriaObject);
-
- assertThat(idsOfDocumentMatchingCriteria).containsOnly("sampleId1", "sampleId2");
- verify(mongoTemplate, times(1)).find(QUERY_CAPTOR.capture(), CLASS_TYPE_CAPTOR.capture(), COLLECTION_NAME_CAPTOR.capture());
- assertThat(QUERY_CAPTOR.getValue().toString()).isEqualTo(expectedQuery.toString());
- assertThat(COLLECTION_NAME_CAPTOR.getValue()).isEqualTo(FLATTENED_TEMPLATES_VIEW);
- assertThat(CLASS_TYPE_CAPTOR.getValue()).isEqualTo(FlatTemplateContent.class);
- }
-
- @Test
- void shouldReturnTemplatesAccordingToGivenSearchCriteria() {
- Query expectedQueryStructure = new BasicQuery("{\"$and\":[{\"keyValues\": { \"$elemMatch\" : { \"k\" : { \"$regex\" : \":domain(?:(\\\\[[\\\\d]+\\\\]))?$\", \"$options\" : \"iu\" }, \"v\" : { \"$regex\" : \"^\\\\Qnotification\\\\E$\", \"$options\" : \"iu\" }}}}]}");
-
- helper.getIdsOfDocumentMatchingCriteria(GSON.fromJson("{\"domain\": \"notification\"}", JsonObject.class));
-
-
- verify(mongoTemplate, times(1)).find(QUERY_CAPTOR.capture(), CLASS_TYPE_CAPTOR.capture(), COLLECTION_NAME_CAPTOR.capture());
-
- assertThat(QUERY_CAPTOR.getValue().toString()).isEqualTo(expectedQueryStructure.toString());
- assertThat(COLLECTION_NAME_CAPTOR.getValue()).isEqualTo(FLATTENED_TEMPLATES_VIEW);
- assertThat(CLASS_TYPE_CAPTOR.getValue()).isEqualTo(FlatTemplateContent.class);
- }
-
- @Test
- void shouldGetQueryForEmptyJson(){
- JsonObject jsonObject = GSON.fromJson("{}", JsonObject.class);
-
- String expectedComposedQueryString = "{}";
- Query expectedQuery = new BasicQuery(expectedComposedQueryString);
-
- helper.getIdsOfDocumentMatchingCriteria(jsonObject);
-
- verify(mongoTemplate, times(1)).find(QUERY_CAPTOR.capture(), CLASS_TYPE_CAPTOR.capture(), COLLECTION_NAME_CAPTOR.capture());
- Query queryBasedOnCriteria = QUERY_CAPTOR.getValue();
-
- assertThat(QUERY_CAPTOR.getValue().toString()).isEqualTo(expectedQuery.toString());
- assertThat(COLLECTION_NAME_CAPTOR.getValue()).isEqualTo(FLATTENED_TEMPLATES_VIEW);
- assertThat(CLASS_TYPE_CAPTOR.getValue()).isEqualTo(FlatTemplateContent.class);
- }
-
-
- @Test
- void shouldGetQueryWithAllTypeValues(){
- JsonObject jsonObject = GSON.fromJson("{\"stringKey\": \"stringValue\", \"numberKey\": 16.00, \"boolKey\": false}", JsonObject.class);
-
- helper.getIdsOfDocumentMatchingCriteria(jsonObject);
-
- verify(mongoTemplate, times(1)).find(QUERY_CAPTOR.capture(), CLASS_TYPE_CAPTOR.capture(), COLLECTION_NAME_CAPTOR.capture());
- Query queryBasedOnCriteria = QUERY_CAPTOR.getValue();
-
- assertThat(queryBasedOnCriteria.getQueryObject().get("$and")).isInstanceOf(List.class);
- List<Document> conditionDocuments = new ArrayList<>((List<Document>) queryBasedOnCriteria.getQueryObject().get("$and"));
- List<Document> conditions = conditionDocuments.stream().map(el -> (Document) el.get("keyValues")).map(el -> (Document) el.get("$elemMatch")).collect(Collectors.toList());
-
- assertThat(conditionDocuments).hasSize(3);
- assertJsonPreparedKeyHasCorrectStructure(conditions.get(0), "stringKey");
- assertThat(conditions.get(0).get("v").toString()).isEqualTo(TemplateSearchHelper.getCaseInsensitive("^\\QstringValue\\E$").toString());
-
- assertJsonPreparedKeyHasCorrectStructure(conditions.get(1), "numberKey");
- assertThat(conditions.get(1).get("v")).isEqualTo(16.0);
-
- assertJsonPreparedKeyHasCorrectStructure(conditions.get(2), "boolKey");
- assertThat(conditions.get(2).get("v")).isEqualTo("false");
- }
-
- @Test
- void shouldThrowExceptionWhenNullIsPresentAsCriteriaValue(){
- JsonObject jsonObject = GSON.fromJson("{\"stringKey\": \"stringValue\", \"nullKey\": null}", JsonObject.class);
-
- assertThrows(IllegalJsonValueException.class, () -> helper.getIdsOfDocumentMatchingCriteria(jsonObject));
- }
-
- private void assertJsonPreparedKeyHasCorrectStructure(Document actual, String expectedPattern){
- assertThat(actual.get("k").toString()).isEqualTo(Pattern.compile(String.format(":%s(?:(\\[[\\d]+\\]))?$", expectedPattern)).toString());
-
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilderTest.java b/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilderTest.java
deleted file mode 100644
index 31bcf1cb2..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/java/org/onap/pnfsimulator/template/search/handler/PrimitiveValueCriteriaBuilderTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Simulator
- * ================================================================================
- * Copyright (C) 2019 Nokia. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.template.search.handler;
-
-import com.google.gson.JsonPrimitive;
-import org.junit.jupiter.api.Test;
-import org.springframework.data.mongodb.core.query.Criteria;
-
-import static org.assertj.core.api.Java6Assertions.assertThat;
-
-class PrimitiveValueCriteriaBuilderTest {
-
- private PrimitiveValueCriteriaBuilder builder = new PrimitiveValueCriteriaBuilder();
-
- @Test
- void testShouldAddRegexLikeCriteriaForStringType(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive("sample"));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : { \"$regex\" : \"^\\\\Qsample\\\\E$\", \"$options\" : \"iu\" } }");
- }
-
- @Test
- void testShouldAddRegexLikeAndEscapeStringWithMetaChars(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive("[1,2,3,4,5]"));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : { \"$regex\" : \"^\\\\Q[1,2,3,4,5]\\\\E$\", \"$options\" : \"iu\" } }");
- }
-
- @Test
- void testShouldAddRegexLikeCriteriaForIntType(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive(1));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : 1.0 }");
- }
-
- @Test
- void testShouldAddRegexLikeCriteriaForLongType(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive(Long.MAX_VALUE));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : 9.223372036854776E18 }");
- }
-
- @Test
- void testShouldAddRegexLikeCriteriaForDoubleType(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive(2.5));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : 2.5 }");
- }
-
- @Test
- void testShouldAddRegexLikeCriteriaForBooleanType(){
- Criteria criteria = builder.applyValueCriteriaBasedOnPrimitiveType(Criteria.where("k").is("10").and("v"), new JsonPrimitive(true));
-
- assertThat(criteria.getCriteriaObject().toJson()).isEqualTo("{ \"k\" : \"10\", \"v\" : \"true\" }");
- }
-
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/application.properties b/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/application.properties
deleted file mode 100644
index fe10b41ce..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/application.properties
+++ /dev/null
@@ -1 +0,0 @@
-templates.dir=src/test/resources/org/onap/pnfsimulator/simulator \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/logback-test.xml b/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/logback-test.xml
deleted file mode 100644
index ad4f0c85e..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/logback-test.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="${java.io.tmpdir}"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${log-path}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="info">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/filesystem/test1.json b/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/filesystem/test1.json
deleted file mode 100644
index 6ef87c52c..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/filesystem/test1.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "field1": "value1",
- "field2": 2,
- "nested": {
- "key1": [1, 2, 3],
- "key2": "sampleValue2"
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/invalidJsonStructureEvent.json b/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/invalidJsonStructureEvent.json
deleted file mode 100644
index 89d5e89d3..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/invalidJsonStructureEvent.json
+++ /dev/null
@@ -1 +0,0 @@
-{"sampleKey1": [{"sampleKey2": "1"}, {"sampleKey2": "2"}] \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validExampleMeasurementEvent.json b/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validExampleMeasurementEvent.json
deleted file mode 100644
index 989d6ead5..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validExampleMeasurementEvent.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "event": {
- "commonEventHeader": {
- "domain": "measurementsForVfScaling",
- "eventName": "vFirewallBroadcastPackets",
- "eventId": "4cfc-91cf-31a46",
- "nfType": "mrfx",
- "priority": "Normal",
- "reportingEntityName": "myVNF",
- "sequence": 1,
- "sourceName": "ClosedLoopVNF",
- "startEpochMicrosec": 1531616794,
- "lastEpochMicrosec": 1531719042,
- "version": 2.0
- },
- "measurementsForVfScalingFields": {
- "measurementsForVfSclaingFieldsVersion": 2.0,
- "measurementsForVfScalingVersion": 2.0,
- "measurementInterval": 180,
- "concurrentSessions": 2,
- "cpuUsageArray": [
- {
- "cpuIdentifier": "INTEL_CORE_I7_1",
- "percentUsage": 50
- },
- {
- "cpuIdentifier": "INTEL_CORE_I7_2",
- "percentUsage": 70
- }
- ],
- "memoryUsageArray": [
- {
- "vmIdentifier": "vmIdentifier",
- "memoryFree": 50,
- "memoryUsed": 10
- }
- ],
- "vNicUsageArray": [
- {
- "receivedTotalPacketsDelta": 30
- }
- ],
- "numberOfMediaPortsInUse": 100,
- "additionalMeasurements": [
- {
- "name": "licenseUsage",
- "arrayOfFields": [
- {
- "name": "G711AudioPort",
- "value": "1"
- },
- {
- "name": "G729AudioPort",
- "value": "1"
- },
- {
- "name": "G722AudioPort",
- "value": "1"
- },
- {
- "name": "AMRAudioPort",
- "value": "4"
- },
- {
- "name": "AMRWBAudioPort",
- "value": "5"
- },
- {
- "name": "OpusAudioPort",
- "value": "6"
- },
- {
- "name": "H263VideoPort",
- "value": "7"
- },
- {
- "name": "H264NonHCVideoPort",
- "value": "8"
- },
- {
- "name": "H264HCVideoPort",
- "value": "9"
- }
- ]
- }
- ]
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/templates/measurement.json b/test/mocks/pnfsimulator/pnfsimulator/templates/measurement.json
deleted file mode 100644
index 182003ded..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/templates/measurement.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "event": {
- "commonEventHeader": {
- "domain": "measurementsForVfScaling",
- "eventName": "vFirewallBroadcastPackets",
- "eventId": "eventID123121312323",
- "nfType": "mrfx",
- "priority": "Normal",
- "reportingEntityName": "vnf",
- "sequence": 1,
- "sourceName": "sample-vnf-#RandomInteger(1,10)",
- "startEpochMicrosec": "#TimestampPrimitive",
- "lastEpochMicrosec": "#TimestampPrimitive",
- "version": 1.0
- },
- "measurementsForVfScalingFields": {
- "measurementsForVfScalingFieldsVersion": 2.0,
- "measurementsForVfScalingVersion": 2.0,
- "measurementInterval": 180,
- "concurrentSessions": 2,
- "requestRate": "#RandomPrimitiveInteger(50,100)",
- "meanRequestLatency": "#RandomPrimitiveInteger(1,1000)",
- "cpuUsageArray": [
- {
- "cpuIdentifier": "INTEL_CORE_I7_1",
- "percentUsage": "#RandomPrimitiveInteger(1,100)"
- },
- {
- "cpuIdentifier": "INTEL_CORE_I7_2",
- "percentUsage": "#RandomPrimitiveInteger(1,100)"
- }
- ],
- "memoryUsageArray": [
- {
- "vmIdentifier": "vmIdentifier",
- "memoryFree": 50,
- "memoryUsed": 10
- }
- ],
- "numberOfMediaPortsInUse": 100,
- "additionalMeasurements": [
- ]
- }
- }
-}
diff --git a/test/mocks/pnfsimulator/pnfsimulator/templates/notification.json b/test/mocks/pnfsimulator/pnfsimulator/templates/notification.json
deleted file mode 100644
index 5657a5052..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/templates/notification.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
- "event": {
- "commonEventHeader": {
- "domain": "notification",
- "eventName": "vFirewallBroadcastPackets",
- "eventId": "4cfc-91cf-31a46",
- "priority": "Normal",
- "reportingEntityName": "myVNF",
- "sequence": 1,
- "sourceName": "ClosedLoopVNF",
- "startEpochMicrosec": 1531616794,
- "lastEpochMicrosec": 1531719042,
- "vesEventListenerVersion": "7.0.1",
- "version": "4.0.1"
- },
- "notificationFields": {
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady",
- "arrayOfNamedHashMap": [{
- "name": "A20161221.1031-1041.bin.gz",
- "hashMap": {
- "fileformatType": "org.3GPP.32.435#measCollec",
- "fileFormatVersion": "V10",
- "location": "ftpes://192.169.0.1:22/ftp/rop/A20161224.1030-1045.bin.gz",
- "compression": "gzip"
- }
- }, {
- "name": "A20161222.1042-1102.bin.gz",
- "hashMap": {
- "fileFormatType": "org.3GPP.32.435#measCollec",
- "fileFormatVersion": "V10",
- "location": "ftpes://192.168.0.102:22/ftp/rop/A20161224.1045-1100.bin.gz",
- "compression": "gzip"
- }
- }],
- "notificationFieldsVersion": "2.0"
- }
-
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pnfsimulator/templates/registration.json b/test/mocks/pnfsimulator/pnfsimulator/templates/registration.json
deleted file mode 100644
index 5a3261fa5..000000000
--- a/test/mocks/pnfsimulator/pnfsimulator/templates/registration.json
+++ /dev/null
@@ -1,33 +0,0 @@
-{
- "event": {
- "commonEventHeader": {
- "eventId": "registration_39239592",
- "eventType": "pnfRegistration",
- "reportingEntityName": "NOK6061ZW3",
- "domain": "pnfRegistration",
- "nfcNamingCode": "oam",
- "sequence": 0,
- "sourceId": "val13",
- "internalHeaderFields": {},
- "priority": "Normal",
- "sourceName": "NOK6061ZW3",
- "eventName": "pnfRegistration_Nokia_5gDu",
- "version": "4.0.1",
- "nfNamingCode": "gNB",
- "startEpochMicrosec": 1539239592379,
- "vesEventListenerVersion": "7.0.1",
- "lastEpochMicrosec": 1539239592379
- },
- "pnfRegistrationFields": {
- "pnfRegistrationFieldsVersion":"2.0",
- "serialNumber": "6061ZW3",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4",
- "unitFamily": "BBU",
- "modelNumber": "val6",
- "softwareVersion": "val7",
- "unitType": "val8"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/pom.xml b/test/mocks/pnfsimulator/pom.xml
deleted file mode 100644
index 2f683a9ae..000000000
--- a/test/mocks/pnfsimulator/pom.xml
+++ /dev/null
@@ -1,77 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ============LICENSE_START=======================================================
- Simulator
- ================================================================================
- Copyright (C) 2019 Nokia. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <packaging>pom</packaging>
-
- <parent>
- <groupId>org.onap.oparent</groupId>
- <artifactId>oparent</artifactId>
- <version>2.0.0</version>
- </parent>
-
- <groupId>org.onap.simulator</groupId>
- <artifactId>simulator-parent</artifactId>
- <version>5.0.0-SNAPSHOT</version>
- <modules>
- <module>pnfsimulator</module>
- <module>netconfsimulator</module>
- <module>deployment</module>
- </modules>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- </properties>
-
- <profiles>
- <profile>
- <id>tests</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>pnfsimulator/integration</module>
- </modules>
- </profile>
- </profiles>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.17</version>
- <configuration>
- <suppressionsLocation>checkstyle-suppressions.xml</suppressionsLocation>
- <suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
- </configuration>
- </plugin>
- <plugin>
- <groupId>com.spotify</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>1.1.1</version>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/pnfsimulator/simulator-cli/.gitignore b/test/mocks/pnfsimulator/simulator-cli/.gitignore
deleted file mode 100644
index 96a29d6e2..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/.gitignore
+++ /dev/null
@@ -1,7 +0,0 @@
-**/*.iml
-**/.idea
-**/target
-**/__pycache__
-build/**
-dist/**
-pnf_simulator_cli.egg-info/** \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/simulator-cli/README.md b/test/mocks/pnfsimulator/simulator-cli/README.md
deleted file mode 100644
index 5dc6f76b9..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/README.md
+++ /dev/null
@@ -1,304 +0,0 @@
-## PNF/NETCONF SIMULATOR CLI
-
-### Overview
-Anytime you want to see a basic usage of a tool, you can run fully descriptive help using command:
-```
-./{tool_name}.py -h # --help argument is also acceptable
-```
-
-#### PNF Simulator CLI
-PNF Simulator CLI provides command line interface to remotely interact with running PNF Simulator.
-
-Using the PNF Simulator CLI user is able to trigger events, retrieve simulator's configuration and change default VES url stored
-inside simulator.
-
-#### Netconf Simulator CLI
-Dedicated tool to help with management of the Netconf Server is also available.
-
-Using the Netconf Simulator CLI user is able to retrieve simulator's cm history stored inside simulator as well as open the live session to actively listen for new configuration changes.
-
-### Requirements and installation
-Requirements
-* Python > 3.5
-
-Installation:
-* Go to directory containing setup.py and invoke `python setup.py install`
-* Go to cli directory
-* Add executable privilege to pnf_simulator.py and netconf_simulator.py (for example `chmod +x <path_to_pnf_simulator.py>`)
-
-### Pnf simulator
-#### Usage
-* [send](#send-action)
-* [configure](#configure-action)
-* [get-config](#get-config-action)
-* [template](#template-action)
-* [filter](#filter-templates-action)
-
-
-#### Help
-Invoke `pnf_simulator.py [send|configure|get-config] -h` to display help.
-
-##### Send Action
-Send action allows user to trigger sending events from Simulator to VES Collector.
-
-*sending repeating events backed by template persisted in db
-`usage: pnf_simulator.py send template [-h] --address ADDRESS --name NAME
- [--patch PATCH] [--repeats REPEATS]
- [--interval INTERVAL]
- [--ves-server-url VES_SERVER_URL] [--verbose]
-`
-
-Parameters
-` --address ADDRESS` `IP address of simulator`
-` --name NAME` `Name of template file which should be used as a base for event.
- Cannot be used simultaneously with parameter: event.`
-` --patch PATCH` `Json which should be merged into template to override parameters.
- Acceptable format: valid json wrapped using single quotes (example:'{"abc":1}').
- Cannot be used simultaneously with parameter: event.`
-` --repeats REPEATS` `Number of events to be send`
-` --interval INTERVAL` `Interval between two consecutive events (in seconds)`
-` --ves-server-url VES_SERVER_URL` `Well-formed URL which will override current VES endpoint stored in simulator's DB`
-` --verbose` `Displays additional logs`
-
-
-*sending event only once by passing path to file with complete event
-`usage: pnf_simulator.py send event [-h] --address ADDRESS --filepath FILEPATH
- [--ves-server-url VES_SERVER_URL] [--verbose]
-`
-Parameters
-` --address ADDRESS` `IP address of simulator`
-` --filepath FILEPATH` `Path to file with full, legitimate event that is to be send directly to VES only once.
- This event is not associated with template and will not be persisted in db.
- Cannot be used simultaneously with parameters: template and patch.`
-` --ves-server-url VES_SERVER_URL` `Well-formed URL which will override current VES endpoint stored in simulator's DB`
-` --verbose` `Displays additional logs`
-
-example content of file with complete event:
-```
-{
- "commonEventHeader": {
- "eventId": "#Timestamp",
- "sourceName": "#Increment",
- "version": 3.0
- }
-}
-```
-
-##### Configure Action
-Configure action allows user to change Simulator's configuration (VES Server URL)
-`usage: pnf_simulator.py configure [-h] --address ADDRESS --ves-server-url
- VES_SERVER_URL [--verbose]
-`
-
-Parameters
-
-` --address ADDRESS` `IP address of simulator`
-` --ves-server-url VES_SERVER_URL` `Well-formed URL which should be set as a default VES Server URL in simulator`
-` --verbose` `Displays additional logs`
-
-##### Get Config Action
-Get Config action allows user to retrieve actual Simulator's configuration
-`usage: pnf_simulator.py get-config [-h] --address ADDRESS [--verbose] `
-
-Parameters
-
-`--address ADDRESS` `IP address of simulator`
-`--verbose` `Displays additional logs`
-
-##### Template Action
-Template action allows user to:
-* retrieve a single template by name
-* list all available templates.
-* upload template to PNF Simulator (can overwrite existing template)
-
-`usage: pnf_simulator.py template [-h]
- (--list | --get-content NAME | --upload FILENAME)
- [--override] --address ADDRESS [--verbose]`
-
-Parameters
-
-`--get-content NAME` `Gets the template by name`
-`--list` `List all templates`
-`--upload FILENAME [--override]` `Uploads the template given as FILENAME file. Optionally overrides any exisitng templates with matching filename`
-`--address ADDRESS` `IP address of simulator`
-`--verbose` `Displays additional logs`
-
-#### Filter Templates Action
-Filter template action allows to search through templates in order to find names of those that satisfy given criteria.
-Criteria are passed in JSON format, as key-values pairs. Relation between pairs with criteria is AND (all conditions must be satisfied by template to have it returned).
-No searching for null values is supported.
-Search expression must be valid JSON, thus no duplicate keys are allowed - user could specify the same parameter multiple times, but only last occurrence will be applied to query.
-
-
-`usage: pnf_simulator.py filter [-h]
- --criteria CRITERIA --address ADDRESS [--verbose]`
-
-Parameters
-`--criteria CRITERIA` `Json with criteria as key-value pairs, where values can be one of following data types: string, integer, double, boolean.
- Acceptable format: valid json wrapped using single quotes (example:'{"searchedInt":1}').
- Cannot be used simultaneously with parameter: event.`
-`--address ADDRESS` `IP address of simulator`
-`--verbose` `Displays additional logs`
-
-
-### Netconf simulator
-#### Usage
-* [load-model](#load-model-action)
-* [delete-model](#delete-model-action)
-* [get-config](#get-config-action)
-* [edit-config](#edit-config-action)
-* [tailf](#tailf-action)
-* [less](#less-action)
-* [cm-history](#cm-history-action)
-
-#### Help
-Invoke `netconf_simulator.py [tailf|less|cm-history] -h` to display help.
-
-
-#### Load-model action
-Loads to netconf server new YANG model that corresponds with schema passed as yang-model parameter,
-assigns name specified in module-name and initializes model with startup configuration passed in config file.
-`usage: netconf_simulator.py load-module [-h] --address ADDRESS ---module-name MODULE_NAME --yang-model YANG_MODEL_FILEPATH --config <XML_CONFIG_FILEPATH> [--verbose]`
-
-example YANG schema (file content for YANG_MODEL)
-```
-Response status: 200
-module pnf-simulator {
- namespace "http://nokia.com/pnf-simulator";
- prefix config;
- container config {
- config true;
- leaf itemValue1 {type uint32;}
- leaf itemValue2 {type uint32;}
- leaf itemValue3 {type uint32;}
- leaf-list allow-user {
- type string;
- ordered-by user;
- description "A sample list of user names.";
- }
- }
-}
-```
-
-example startup configuration (file content of XML_CONFIG)
-```
-<config xmlns="http://nokia.com/pnf-simulator">
- <itemValue1>100</itemValue1>
- <itemValue2>200</itemValue2>
- <itemValue3>300</itemValue3>
-</config>
-```
-
-
-example output (without verbose flag):
-```
-Response status: 200
-Successfully started
-```
-
-#### Delete-model action
-Deletes a YANG model loaded in the netconf server.
-
-`usage: netconf_simulator.py delete-model [-h] --address ADDRESS --model-name
- MODEL_NAME [--verbose]`
-
-Example output (without verbose flag):
-```
-Response status: 200
-Successfully deleted
-```
-
-#### Get-config Action
-Returns active running configurations.
-By default it returns all running configurations. To retrieve one specific configuration (represented by _/'module_name':'container'_ ) user needs to pass module-name and container.
-Example:
-`
-netconf_simulator.py get-config --address localhost --module-name pnf-simulator --container config
-`
-
-
-`usage: netconf_simulator.py get-config [-h] --address ADDRESS [--verbose] [--module-name MODULE-NAME] [--container CONTAINER]`
-
-example output (without verbose flag):
-```
-Response status: 200
-<config xmlns="http://nokia.com/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>2781</itemValue1>
- <itemValue2>3782</itemValue2>
- <itemValue3>3333</itemValue3>
-</config>
-```
-
-#### Edit-config Action
-Modifies existing configuration (e.g. change parameter values, modify or remove parameter from model).
-To edit configuration, netconf compliant XML file should be prepared and used as one of edit-config parameters.
-`usage: netconf_simulator.py edit-config [-h] --address ADDRESS --config <XML_CONFIG_FILEPATH> [--verbose]`
-
-example - parameter values modification
-file content:
-```
-<config xmlns="http://nokia.com/pnf-simulator">
- <itemValue1>1</itemValue1>
- <itemValue2>2</itemValue2>
- <itemValue3>3</itemValue3>
-</config>
-```
-
-example output (without verbose flag):
-```
-Response status: 202
-<config xmlns="http://nokia.com/pnf-simulator" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0">
- <itemValue1>1</itemValue1>
- <itemValue2>2</itemValue2>
- <itemValue3>3</itemValue3>
-</config>
-```
-
-##### Less Action
-Less action allows user to watch historical configuration changes.
-Size of the configuration changes list is limited to the 100 last cm events by default, but can be incresed/decresead using a 'limit' attribute.
-`usage: netconf_simulator.py less [-h] --address ADDRESS [--limit LIMIT] [--verbose]`
-
-Output from the command can be easily piped into other tools like native less, more, etc. e.g.:
-`netconf_simulator.py less --address 127.0.0.1 | less`
-
-Last known configuration is last printed to the output, so order of the printed configuration events complies with time when the configuration was stored inside the simulator.
-
-Parameters:
-
-`--address ADDRESS` - `IP address of simulator`
-
-`--limit LIMIT` - ` Number of configurations to print at output`
-
-`--verbose` - ` Displays additional logs`
-
-Single message is represented as a pair of timestamp in epoch format and suitable configuration entry.
-
-##### Tailf Action
-Tailf action allows user to actively listen for new uploaded configuration changes.
-Size of the historical configuration changes list is limited to the 10 last cm events.
-`usage: netconf_simulator.py tailf [-h] --address ADDRESS [--verbose]`
-
-The listener can be easily terminated at anytime using `CTRL+C` shortcut.
-
-Parameters:
-
-`--address ADDRESS` - `IP address of simulator`
-
-`--verbose` - ` Displays additional logs`
-
-Single message is represented as a pair of timestamp in epoch format and suitable configuration entry.
-
-##### Cm-history Action
-Cm-history action allows user to view list of all uploaded configuration changes.
-`usage: netconf_simulator.py cm-history [-h] --address ADDRESS [--verbose]`
-
-Last known configuration is last printed to the output, so order of the printed configuration events complies with time when the configuration was stored inside the simulator.
-
-Parameters:
-
-`--address ADDRESS` - `IP address of simulator`
-
-`--verbose` - ` Displays additional logs`
-
-Single message is represented as a pair of timestamp in epoch format and suitable configuration entry.
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/client/tailf_client.py b/test/mocks/pnfsimulator/simulator-cli/cli/client/tailf_client.py
deleted file mode 100644
index d1cb60d97..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/cli/client/tailf_client.py
+++ /dev/null
@@ -1,59 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-import logging
-
-import websockets
-import asyncio
-import signal
-import sys
-
-
-class TailfClient(object):
-
- def __init__(self, url: str, verbose: bool = False) -> None:
- self._url = url
- self._is_running = False
- self._connection = None
- self.logger = logging.getLogger()
- self.logger.setLevel(logging.DEBUG if verbose else logging.INFO)
- signal.signal(signal.SIGINT, self._handle_keyboard_interrupt)
-
- def tailf_messages(self):
- self._is_running = True
- self.logger.debug("Attempting to connect to websocket server on %s", self._url)
- asyncio.get_event_loop().run_until_complete(
- self._tailf_messages()
- )
-
- async def _tailf_messages(self):
- try:
- async with websockets.connect(self._url) as connection:
- self.logger.debug("Connection with %s established", self._url)
- self._connection = connection
- while self._is_running:
- print(await self._connection.recv(), "\n")
- except ConnectionRefusedError:
- self.logger.error("Cannot establish connection with %s", self._url)
-
- def _handle_keyboard_interrupt(self, sig, frame):
- self.logger.warning("CTR-C pressed, interrupting.")
- self._is_running = False
- sys.exit(0)
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/data/logging.ini b/test/mocks/pnfsimulator/simulator-cli/cli/data/logging.ini
deleted file mode 100644
index 8b2b40285..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/cli/data/logging.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-[loggers]
-keys=root
-
-[handlers]
-keys=consoleHandler
-
-[formatters]
-keys=simpleFormatter
-
-[logger_root]
-level=DEBUG
-handlers=consoleHandler
-
-[handler_consoleHandler]
-class=StreamHandler
-formatter=simpleFormatter
-args=(sys.stdout,)
-
-[formatter_simpleFormatter]
-format=%(message)s
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/netconf_simulator.py b/test/mocks/pnfsimulator/simulator-cli/cli/netconf_simulator.py
deleted file mode 100755
index a3b3bf1de..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/cli/netconf_simulator.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/env python3
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-import argparse
-import logging
-import logging.config
-import requests
-import os
-import sys
-from requests import Response
-
-from cli.client.tailf_client import TailfClient
-
-TAILF_FUNC_ENDPOINT = "ws://{}:9000/netconf"
-LESS_FUNC_ENDPOINT = "/store/less"
-CM_HISTORY_ENDPOINT = "/store/cm-history"
-GET_CONFIG_ENDPOINT = "/netconf/get"
-MODEL_ENDPOINT = "/netconf/model/{}"
-EDIT_CONFIG_ENDPOINT = "/netconf/edit-config"
-logging.basicConfig()
-
-DEFAULT_EXTERNAL_SIM_PORT = 8080
-DEFAULT_INTERNAL_SIM_PORT = 9000
-
-
-class NetconfSimulatorClient(object):
- def __init__(self, ip: str, protocol: str = 'http', port: int = DEFAULT_EXTERNAL_SIM_PORT, verbose: bool = False) -> None:
- self._ip = ip
- self._protocol = protocol
- self._port = port
- self._configure_logger(verbose)
- self._verbose=verbose
-
- def tailf_like_func(self) -> None:
- url = TAILF_FUNC_ENDPOINT.format(self._ip)
- client = TailfClient(url, self._verbose)
- client.tailf_messages()
-
- def get_cm_history(self) -> None:
- self.logger.info("Attempting to retrieve all netconf configuration changes")
- simulator_address = "{}://{}:{}{}".format(self._protocol, self._ip, self._port, CM_HISTORY_ENDPOINT)
- self.logger.debug("Simulator address: %s", simulator_address)
- try:
- response = requests.get(simulator_address)
- self._log_json_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def less_like_func(self, limit: int) -> None:
- self.logger.info("Attempting to run less on CM change")
- simulator_address = "{}://{}:{}{}".format(self._protocol, self._ip, self._port, LESS_FUNC_ENDPOINT)
- parameters = {"offset": limit} if limit else None
- self.logger.debug("Simulator address: %s", simulator_address)
- try:
- response = requests.get(url = simulator_address, params = parameters)
- self._log_json_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def get_config(self, module_name: str=None, container:str=None)-> None:
- self.logger.info("Attempting to run get-config")
- simulator_address = self._create_get_endpoint(module_name, container)
- self.logger.debug("Simulator address: %s", simulator_address)
- try:
- response = requests.get(simulator_address)
- self._log_string_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def load_yang_model(self, module_name: str, yang_model_path: str, config_path: str) -> None:
- self.logger.info(
- "Attempting to load new yang model with its initial configuration")
- simulator_address = "{}://{}:{}{}".format(self._protocol, self._ip, self._port, MODEL_ENDPOINT.format(module_name))
- files = {"yangModel": open(yang_model_path, "rb"),
- "initialConfig": open(config_path, "rb")}
- self.logger.debug("Simulator address: %s", simulator_address)
-
- try:
- response = requests.post(simulator_address, files=files)
- self._log_string_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def delete_yang_model(self, model_name: str) -> None:
- self.logger.info(
- "Attempting to delete a yang model")
- simulator_address = "{}://{}:{}{}".format(self._protocol, self._ip, self._port, MODEL_ENDPOINT.format(model_name))
- self.logger.debug("Simulator address: %s", simulator_address)
-
- try:
- response = requests.delete(simulator_address)
- self._log_string_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def edit_config(self, new_config_path: str):
- self.logger.info("Attempting to apply new configuration")
- simulator_address = "{}://{}:{}{}".format(self._protocol, self._ip, self._port, EDIT_CONFIG_ENDPOINT)
- files = {"editConfigXml": open(new_config_path,"rb")}
- self.logger.debug("Simulator address: %s", simulator_address)
-
- try:
- response = requests.post(simulator_address, files=files)
- self._log_string_response(response)
- except requests.ConnectionError:
- self.logger.error("Failed to establish connection with {}".format(simulator_address))
-
- def _log_json_response(self, response: Response) ->None:
- self.logger.info("Response status: %d", response.status_code)
- self.logger.info(" ----- HEAD -----")
- for message in response.json():
- self.logger.info("{}: {}".format(str(message['timestamp']), message['configuration']))
- self.logger.info(" ----- END ------")
- self.logger.debug(response.headers)
-
- def _configure_logger(self, verbose):
- logging_conf = os.path.join(sys.prefix, 'logging.ini')
- if os.path.exists(logging_conf):
- logging.config.fileConfig(logging_conf)
- else:
- print("Couldn't find logging.ini, using default logger config")
- self.logger = logging.getLogger()
- self.logger.setLevel(logging.DEBUG if verbose else logging.INFO)
-
- def _log_string_response(self, response: Response)->None:
- self.logger.info("Response status: %d", response.status_code)
- self.logger.info(response.text)
- self.logger.debug(response.headers)
-
- def _create_get_endpoint(self, module_name: str, container: str):
- endpoint = "{}://{}:{}{}".format(self._protocol, self._ip, self._port,
- GET_CONFIG_ENDPOINT)
- if module_name and container:
- endpoint = endpoint + "/{}/{}".format(module_name, container)
- elif (not module_name and container) or (module_name and not container):
- raise AttributeError(
- "Both module_name and container must be present or absent")
- return endpoint
-
-def create_argument_parser():
- parser = argparse.ArgumentParser(description="Netconf Simulator Command Line Interface. ")
- subparsers = parser.add_subparsers(title="Available actions")
- tailf_parser = subparsers.add_parser("tailf",
- description="Method which allows user to view N last lines of configuration changes")
-
- __configure_tailf_like_parser(tailf_parser)
- less_parser = subparsers.add_parser("less", description="Method which allows user to traverse configuration changes")
- __configure_less_like_parser(less_parser)
- cm_history_parser = subparsers.add_parser("cm-history",
- description="Method which allows user to view all configuration changes")
- __configure_cm_history_parser(cm_history_parser)
-
- load_model_parser = subparsers.add_parser("load-model")
- __configure_load_model_parser(load_model_parser)
-
- delete_model_parser = subparsers.add_parser("delete-model")
- __configure_delete_model_parser(delete_model_parser)
-
- get_config_parser = subparsers.add_parser("get-config")
- __configure_get_config_parser(get_config_parser)
- edit_config_parser = subparsers.add_parser("edit-config")
- __configure_edit_config_parser(edit_config_parser)
- return parser
-
-
-def run_tailf(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose)
- client.tailf_like_func()
-
-
-def run_get_cm_history(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose, port=DEFAULT_INTERNAL_SIM_PORT)
- client.get_cm_history()
-
-
-def run_less(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose, port=DEFAULT_INTERNAL_SIM_PORT)
- client.less_like_func(args.limit)
-
-
-def run_load_model(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose,
- port=DEFAULT_INTERNAL_SIM_PORT)
- client.load_yang_model(args.module_name, args.yang_model, args.config)
-
-
-def run_delete_model(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose,
- port=DEFAULT_INTERNAL_SIM_PORT)
- client.delete_yang_model(args.model_name)
-
-
-def run_get_config(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose, port=DEFAULT_INTERNAL_SIM_PORT)
- client.get_config(args.module_name, args.container)
-
-
-def run_edit_config(args):
- client = NetconfSimulatorClient(args.address, verbose=args.verbose, port=DEFAULT_INTERNAL_SIM_PORT)
- client.edit_config(args.config)
-
-
-def __configure_tailf_like_parser(tailf_func_parser):
- tailf_func_parser.add_argument("--address", required=True, help="IP address of simulator")
- tailf_func_parser.add_argument("--verbose", action='store_true',
- help="Displays additional logs")
- tailf_func_parser.set_defaults(func=run_tailf)
-
-
-def __configure_less_like_parser(less_func_parser):
- less_func_parser.add_argument("--address", required=True, help="IP address of simulator")
- less_func_parser.add_argument("--limit", help="Limit of configurations to retrieve")
- less_func_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- less_func_parser.set_defaults(func=run_less)
-
-
-def __configure_cm_history_parser(cm_history_parser):
- cm_history_parser.add_argument("--address", required=True, help="IP address of simulator")
- cm_history_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- cm_history_parser.set_defaults(func=run_get_cm_history)
-
-
-def __configure_load_model_parser(load_model_parser):
- load_model_parser.add_argument("--address", required=True, help="IP address of simulator")
- load_model_parser.add_argument("--module-name", required=True, help="Module name corresponding to yang-model")
- load_model_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- load_model_parser.add_argument("--yang-model", required=True, help="Path to file with yang model")
- load_model_parser.add_argument("--config", required=True, help="Path to file with initial xml config")
- load_model_parser.set_defaults(func=run_load_model)
-
-
-def __configure_delete_model_parser(delete_model_parser):
- delete_model_parser.add_argument("--address", required=True, help="IP address of simulator")
- delete_model_parser.add_argument("--model-name", required=True, help="YANG model name to delete")
- delete_model_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- delete_model_parser.set_defaults(func=run_delete_model)
-
-
-def __configure_get_config_parser(get_config_parser):
- get_config_parser.add_argument("--address", required=True, help="IP address of simulator")
- get_config_parser.add_argument("--verbose", action='store_true',help="Displays additional logs")
- get_config_parser.add_argument("--module-name", help="Module name corresponding to yang-model", default=None)
- get_config_parser.add_argument("--container", help="Container name corresponding to module name", default=None)
- get_config_parser.set_defaults(func=run_get_config)
-
-
-def __configure_edit_config_parser(edit_config_parser):
- edit_config_parser.add_argument("--address", required=True, help="IP address of simulator")
- edit_config_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- edit_config_parser.add_argument("--config", required=True, help="Path to file with xml config to apply")
- edit_config_parser.set_defaults(func=run_edit_config)
-
-
-if __name__ == "__main__":
- argument_parser = create_argument_parser()
- result = argument_parser.parse_args()
- if hasattr(result, 'func'):
- result.func(result)
- else:
- argument_parser.parse_args(['-h'])
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/pnf_simulator.py b/test/mocks/pnfsimulator/simulator-cli/cli/pnf_simulator.py
deleted file mode 100755
index 9176fd8e0..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/cli/pnf_simulator.py
+++ /dev/null
@@ -1,374 +0,0 @@
-#!/usr/bin/env python3
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-import argparse
-import http.client
-import json
-import logging
-import ntpath
-from typing import Dict
-
-SEND_PERIODIC_EVENT_ENDPOINT = "/simulator/start"
-SEND_ONE_TIME_EVENT_ENDPOINT = "/simulator/event"
-CONFIG_ENDPOINT = "/simulator/config"
-LIST_TEMPLATES_ENDPOINT = "/template/list"
-GET_TEMPLATE_BY_NAME_ENDPOINT = "/template/get"
-UPLOAD_TEMPLATE_NOFORCE = "/template/upload"
-UPLOAD_TEMPLATE_FORCE = "/template/upload?override=true"
-FILTER_TEMPLATES_ENDPOINT = "/template/search"
-
-logging.basicConfig()
-
-
-class Messages(object):
- OVERRIDE_VALID_ONLY_WITH_UPLOAD = "--override is valid only with --upload parameter"
-
-
-class SimulatorParams(object):
- def __init__(self, repeats: int = 1, interval: int = 1, ves_server_url: str = None) -> None:
- self.repeats_count = repeats
- self.repeats_interval = interval
- self.ves_server_url = ves_server_url
-
- def to_json(self) -> Dict:
- to_return = {"repeatCount": self.repeats_count,
- "repeatInterval": self.repeats_interval}
- if self.ves_server_url:
- to_return["vesServerUrl"] = self.ves_server_url
- return to_return
-
- def __repr__(self) -> str:
- return str(self.to_json())
-
-
-class PersistedEventRequest(object):
- def __init__(self, simulator_params: SimulatorParams, template: str, patch: Dict = None) -> None:
- self.params = simulator_params
- self.template = template
- self.patch = patch or {}
-
- def to_json(self) -> Dict:
- return {"simulatorParams": self.params, "templateName": self.template,
- "patch": self.patch}
-
- def __repr__(self) -> str:
- return str(self.to_json())
-
-
-class FullEventRequest(object):
- def __init__(self, event_body: Dict, ves_server_url: str = None) -> None:
- self.event_body = event_body
- self.ves_server_url = ves_server_url or ""
-
- def to_json(self) -> Dict:
- return {"vesServerUrl": self.ves_server_url, "event": self.event_body}
-
- def __repr__(self) -> str:
- return str(self.to_json())
-
-
-class TemplateUploadRequest(object):
- def __init__(self, template_name: str, template_body: Dict) -> None:
- self.template_name = template_name
- self.template_body = template_body
-
- def to_json(self) -> Dict:
- return {"name": self.template_name, "template": self.template_body}
-
- def __repr__(self) -> str:
- return str(self.to_json())
-
-
-class SimulatorClient(object):
- def __init__(self, ip: str, port: int = 5000, verbose: bool = False) -> None:
- self._ip = ip
- self._port = port
- self.logger = logging.getLogger()
- self.logger.setLevel(logging.DEBUG if verbose else logging.INFO)
-
- def send_event(self, request: PersistedEventRequest) -> None:
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.info("Attempting to send event")
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, SEND_PERIODIC_EVENT_ENDPOINT)
- self.logger.debug("REQUEST %s", request)
-
- connection.request("POST", SEND_PERIODIC_EVENT_ENDPOINT, body=json.dumps(request, cls=RequestSerializer),
- headers={"Content-Type": "application/json"})
-
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def send_one_time_event(self, request: FullEventRequest) -> None:
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.info("Attempting to send one time event")
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, SEND_ONE_TIME_EVENT_ENDPOINT)
- self.logger.debug("REQUEST %s", request.to_json())
-
- connection.request("POST", SEND_ONE_TIME_EVENT_ENDPOINT, body=json.dumps(request.to_json()),
- headers={"Content-Type": "application/json"})
-
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def get_configuration(self) -> None:
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.info("Attempting to retrieve Simulator configuration")
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, CONFIG_ENDPOINT)
- connection.request("GET", CONFIG_ENDPOINT)
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def edit_configuration(self, ves_server_url: str) -> None:
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.info("Attempting to update Simulator configuration")
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, CONFIG_ENDPOINT)
- request = {"vesServerUrl": ves_server_url}
- self.logger.debug("REQUEST %s", request)
- connection.request("PUT", CONFIG_ENDPOINT, body=json.dumps(request),
- headers={"Content-Type": "application/json"})
-
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def _log_response(self, response: http.client.HTTPResponse):
- self.logger.info("Response status: %s ", response.status)
- self.logger.info(response.read().decode())
- self.logger.debug(response.headers)
-
- def list_templates(self):
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.info("Attempting to retrieve all templates")
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, LIST_TEMPLATES_ENDPOINT)
- connection.request("GET", LIST_TEMPLATES_ENDPOINT)
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def get_template_by_name(self, name):
- connection = http.client.HTTPConnection(self._ip, self._port)
- endpoint = GET_TEMPLATE_BY_NAME_ENDPOINT + "/" + name
- self.logger.info("Attempting to retrieve template by name: '%s'", name)
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, endpoint)
- connection.request("GET", endpoint)
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def upload_template(self, template_request, force):
- connection = http.client.HTTPConnection(self._ip, self._port)
- endpoint = UPLOAD_TEMPLATE_FORCE if force else UPLOAD_TEMPLATE_NOFORCE
- self.logger.info("Attempting to upload template: '%s'", template_request)
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, endpoint)
- connection.request("POST", endpoint,
- body=json.dumps(template_request.to_json()),
- headers={"Content-Type": "application/json"})
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
- def search_for_templates(self, filter_criteria: str):
- connection = http.client.HTTPConnection(self._ip, self._port)
- self.logger.debug("Simulator address: ip %s, port %s, endpoint %s", self._ip, self._port, FILTER_TEMPLATES_ENDPOINT)
- filter_request = {"searchExpr": json.loads(filter_criteria)}
- self.logger.debug("Filter criteria: %s", str(filter_criteria))
- connection.request("POST", FILTER_TEMPLATES_ENDPOINT,
- body=json.dumps(filter_request),
- headers={"Content-Type": "application/json"})
- response = connection.getresponse()
-
- self._log_response(response)
- connection.close()
-
-
-class RequestSerializer(json.JSONEncoder):
- def default(self, o):
- return o.to_json() if (isinstance(o, SimulatorParams) or isinstance(o, PersistedEventRequest)) else o
-
-
-
-def create_argument_parser():
- parser = argparse.ArgumentParser(description="PNF Simulator Command Line Interface. ")
- subparsers = parser.add_subparsers(title="Available actions")
- send_parser = subparsers.add_parser("send",
- description="Method which allows user to trigger simulator to start sending "
- "events. Available options: [template, event]")
-
- send_subparsers = send_parser.add_subparsers()
- one_time_send_event_parser = send_subparsers.add_parser("event", description="Option for direct, one-time event sending to VES. This option does not require having corresponging template.")
- __configure_one_time_send_parser(one_time_send_event_parser)
- persisted_send_event_parser = send_subparsers.add_parser("template")
- __configure_persisted_send_parser(persisted_send_event_parser)
-
- configure_parser = subparsers.add_parser("configure", description="Method which allows user to set new default "
- "value for VES Endpoint")
- __configure_config_parser(configure_parser)
-
- get_config_parser = subparsers.add_parser("get-config",
- description="Method which allows user to view simulator configuration")
- __configure_get_config_parser(get_config_parser)
-
- template_config_parser = subparsers.add_parser("template", description="Template management operations")
- __configure_template_parser(template_config_parser)
-
- template_filter_parser = subparsers.add_parser("filter", description="Method for searching through templates to find those satisfying given criteria")
- __configure_template_filter_parser(template_filter_parser)
-
- return parser
-
-
-def _perform_send_action(args):
- if (not args.interval and args.repeats) or (args.interval and not args.repeats):
- raise Exception("Either both repeats and interval must be present or missing")
-
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.send_event(_create_scheduled_event_request(args))
-
-
-def _perform_one_time_send_action(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.send_one_time_event(_create_one_time_event_request(args.filepath, args.ves_server_url))
-
-
-def get_configuration(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.get_configuration()
-
-
-def edit_configuration(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.edit_configuration(args.ves_server_url)
-
-
-def perform_template_action(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- if args.list:
- client.list_templates()
- elif args.get_content:
- client.get_template_by_name(args.get_content)
- elif args.upload:
- client.upload_template(_create_upload_template_request(args.upload), args.override)
- elif args.force:
- raise Exception(Messages.OVERRIDE_VALID_ONLY_WITH_UPLOAD)
-
-
-def list_all_templates(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.list_templates()
-
-
-def filter_templates(args):
- client = SimulatorClient(args.address, verbose=args.verbose)
- client.search_for_templates(args.criteria)
-
-
-def _create_upload_template_request(template_filename):
- with open(template_filename) as json_template:
- template_body = json.load(json_template)
- return TemplateUploadRequest(path_leaf(template_filename), template_body)
-
-
-def _create_scheduled_event_request(args):
- simulator_params = SimulatorParams(args.repeats, args.interval, args.ves_server_url)
- return PersistedEventRequest(simulator_params, args.name, json.loads(args.patch) if args.patch else {})
-
-
-def _create_one_time_event_request(event_filename, ves_server_url):
- with open(event_filename) as json_event:
- event_body = json.load(json_event)
- return FullEventRequest(event_body, ves_server_url)
-
-
-def __configure_persisted_send_parser(send_parser):
- send_parser.add_argument("--address", required=True, help="IP address of simulator")
- send_parser.add_argument("--name", required=True, help="Name of template file which should be used as a base for event")
- send_parser.add_argument("--patch", help="Json which should be merged into template to override parameters")
- send_parser.add_argument("--repeats", help="Number of events to be send", type=int)
- send_parser.add_argument("--interval", help="Interval between two consecutive events (in seconds)", type=int)
- send_parser.add_argument("--ves_server_url",
- help="Well-formed URL which will override current VES endpoint stored in simulator's DB")
- send_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- send_parser.set_defaults(func=_perform_send_action)
-
-
-def __configure_one_time_send_parser(send_parser):
- send_parser.add_argument("--address", required=True, help="IP address of simulator")
- send_parser.add_argument("--filepath", required=True, help="Name of file with complete event for direct sending.")
- send_parser.add_argument("--ves_server_url",
- help="Well-formed URL which will override current VES endpoint stored in simulator's DB")
- send_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- send_parser.set_defaults(func=_perform_one_time_send_action)
-
-
-def __configure_config_parser(config_parser):
- config_parser.add_argument("--address", required=True, help="IP address of simulator")
- config_parser.add_argument("--ves-server-url", required=True,
- help="Well-formed URL which should be set as a default VES Server URL in simulator")
- config_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- config_parser.set_defaults(func=edit_configuration)
-
-
-def __configure_get_config_parser(get_config_parser):
- get_config_parser.add_argument("--address", required=True, help="IP address of simulator")
- get_config_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- get_config_parser.set_defaults(func=get_configuration)
-
-
-def __configure_template_parser(template_config_parser):
- group = template_config_parser.add_mutually_exclusive_group(required=True)
- group.add_argument("--list", action='store_true', help="List all templates")
- group.add_argument("--get-content", help="Gets the template by name")
- group.add_argument("--upload", help="Uploads the template given in parameter file.")
-
- template_config_parser.add_argument("--override", action='store_true', help="Overwrites the template in case it exists.")
- template_config_parser.add_argument("--address", required=True, help="IP address of simulator")
- template_config_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- template_config_parser.set_defaults(func=perform_template_action)
-
-
-def __configure_template_filter_parser(template_filter_parser):
- template_filter_parser.add_argument("--criteria", required=True, help="Json string with key-value search criteria")
- template_filter_parser.add_argument("--address", required=True, help="IP address of simulator")
- template_filter_parser.add_argument("--verbose", action='store_true', help="Displays additional logs")
- template_filter_parser.set_defaults(func=filter_templates)
-
-
-def path_leaf(path):
- head, tail = ntpath.split(path)
- return tail or ntpath.basename(head)
-
-
-if __name__ == "__main__":
- argument_parser = create_argument_parser()
- result = argument_parser.parse_args()
- if hasattr(result, 'func'):
- result.func(result)
- else:
- argument_parser.parse_args(['-h'])
diff --git a/test/mocks/pnfsimulator/simulator-cli/tests/resources/notification.json b/test/mocks/pnfsimulator/simulator-cli/tests/resources/notification.json
deleted file mode 100644
index bdba8ae59..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/tests/resources/notification.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "commonEventHeader": {
- "domain": "notification",
- "eventName": "#RandomString(20)",
- "version": "4.0.1"
- },
- "notificationFields": {
- "arrayOfNamedHashMap": [{
- "name": "A20161221.1031-1041.bin.gz",
- "hashMap": {
- "fileformatType": "org.3GPP.32.435#measCollec"
- }
- }]
- }
-} \ No newline at end of file
diff --git a/test/mocks/pnfsimulator/simulator-cli/tests/test_netconf_simulator.py b/test/mocks/pnfsimulator/simulator-cli/tests/test_netconf_simulator.py
deleted file mode 100644
index 46ce84623..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/tests/test_netconf_simulator.py
+++ /dev/null
@@ -1,165 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-import logging
-import unittest
-import os
-from mock import patch
-
-from cli.netconf_simulator import create_argument_parser, NetconfSimulatorClient
-
-
-class TestArgumentParser(unittest.TestCase):
-
- def test_should_properly_parse_edit_config_with_all_params(self):
- parser = create_argument_parser()
- args = parser.parse_args(
- ['edit-config', '--address', '127.0.0.1', '--config', 'sample_path',
- "--verbose"]
- )
-
- self.assertEqual(args.address, '127.0.0.1')
- self.assertEqual(args.config, 'sample_path')
- self.assertTrue(args.verbose)
-
- def test_should_properly_parse_load_yang_model(self):
- parser = create_argument_parser()
-
- args = parser.parse_args(
- ['load-model', '--address', '127.0.0.1', '--module-name',
- 'sample_name', '--yang-model', 'sample_model', '--config',
- 'sample_config',
- "--verbose"]
- )
-
- self.assertEqual(args.address, '127.0.0.1')
- self.assertEqual(args.config, 'sample_config')
- self.assertEqual(args.yang_model, 'sample_model')
- self.assertEqual(args.module_name, 'sample_name')
- self.assertTrue(args.verbose)
-
- def test_should_properly_parse_delete_yang_model(self):
- parser = create_argument_parser()
-
- args = parser.parse_args(
- ['delete-model', '--address', '127.0.0.1', '--model-name',
- 'sample_name', "--verbose"]
- )
-
- self.assertEqual(args.address, '127.0.0.1')
- self.assertEqual(args.model_name, 'sample_name')
- self.assertTrue(args.verbose)
-
- def test_should_properly_parse_get_config(self):
- parser = create_argument_parser()
- args = parser.parse_args(
- ['get-config', '--address', '127.0.0.1', '--verbose']
- )
-
- self.assertEqual(args.address, '127.0.0.1')
- self.assertTrue(args.verbose)
-
-
-class TestNetconfSimulatorClient(unittest.TestCase):
-
- @classmethod
- def setUpClass(cls):
- with open("example", "w+") as f:
- f.write("sampleContent")
-
- @classmethod
- def tearDownClass(cls):
- os.remove("example")
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_get_config(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.get_config()
-
- requests.get.assert_called_with('http://localhost:8080/netconf/get')
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_get_config_for_given_module(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.get_config("module", "container")
-
- requests.get.assert_called_with('http://localhost:8080/netconf/get/module/container')
-
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_raise_exception_when_module_is_present_and_container_is_absent(self, logger):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- with self.assertRaises(AttributeError) as context:
- client.get_config(module_name="test")
-
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_raise_exception_when_module_is_absent_and_container_is_present(self, logger):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- with self.assertRaises(AttributeError) as context:
- client.get_config(container="test")
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_load_yang_model(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.load_yang_model('sample_module_name', 'example', 'example')
-
- requests.post.assert_called()
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_delete_yang_model(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.delete_yang_model('sample_model_name')
-
- requests.delete.assert_called()
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_edit_config(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.edit_config('example')
-
- requests.post.assert_called()
-
- @patch('cli.netconf_simulator.requests')
- @patch('cli.netconf_simulator.NetconfSimulatorClient._configure_logger')
- def test_should_properly_run_less_like_mode(self, logger, requests):
- client = NetconfSimulatorClient('localhost')
- client.logger = logging.getLogger()
-
- client.less_like_func(100)
-
- requests.get.assert_called_with(
- params={"offset": 100}, url="http://localhost:8080/store/less")
diff --git a/test/mocks/pnfsimulator/simulator-cli/tests/test_pnf_simulator.py b/test/mocks/pnfsimulator/simulator-cli/tests/test_pnf_simulator.py
deleted file mode 100644
index 50f220038..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/tests/test_pnf_simulator.py
+++ /dev/null
@@ -1,270 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-import json
-import os
-import unittest
-from http.client import HTTPResponse, HTTPConnection
-from unittest import mock
-from unittest.mock import patch, Mock
-
-from cli.pnf_simulator import SimulatorClient, FullEventRequest, Messages
-from cli.pnf_simulator import create_argument_parser, SimulatorParams, PersistedEventRequest
-
-
-class TestArgumentParser(unittest.TestCase):
-
- def test_should_properly_parse_send_template_action_with_all_params(self):
- parser = create_argument_parser()
-
- result = parser.parse_args(
- ['send', 'template', '--address', '127.0.0.1', "--name", 'sample_template', '--patch', '"{}"', '--repeats', '2',
- "--interval", '5', '--verbose', '--ves_server_url', 'sample_url'])
-
- self.assertEqual(result.address, '127.0.0.1')
- self.assertEqual(result.name, "sample_template")
- self.assertEqual(result.patch, "\"{}\"")
- self.assertEqual(result.repeats, 2)
- self.assertEqual(result.interval, 5)
- self.assertEqual(result.ves_server_url, 'sample_url')
- self.assertTrue(result.verbose)
-
- def test_should_properly_parse_send_event_action_with_all_params(self):
- parser = create_argument_parser()
-
- result = parser.parse_args(
- ['send', 'event', '--address', '127.0.0.1', "--filepath", 'sample_filepath.json', '--verbose', '--ves_server_url', 'sample_url'])
-
- self.assertEqual(result.address, '127.0.0.1')
- self.assertEqual(result.filepath, "sample_filepath.json")
- self.assertEqual(result.ves_server_url, 'sample_url')
- self.assertTrue(result.verbose)
-
- def test_should_properly_parse_configure_action_with_all_params(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['configure', '--address', '127.0.0.1', "--verbose", '--ves-server-url', 'sample_url']
- )
-
- self.assertEqual(result.address, '127.0.0.1')
- self.assertTrue(result.verbose)
- self.assertEqual(result.ves_server_url, 'sample_url')
-
- def test_should_properly_parse_get_config_action_with_all_params(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['get-config', '--address', '127.0.0.1', '--verbose']
- )
-
- self.assertEqual(result.address, '127.0.0.1')
- self.assertTrue(result.verbose)
-
- def test_should_not_parse_arguments_when_mandatory_params_are_missing_for_template(self):
- parser = create_argument_parser()
-
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(['send', 'template'])
- self.assertTrue('the following arguments are required: --address, --name' in context.exception)
-
- def test_should_not_parse_arguments_when_mandatory_params_are_missing_for_event(self):
- parser = create_argument_parser()
-
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(['send', 'event'])
- self.assertTrue('the following arguments are required: --address, --filepath' in context.exception)
-
- def test_should_not_parse_arguments_when_mandatory_template_params_are_missing(self):
- parser = create_argument_parser()
-
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(['template'])
- self.assertTrue('one of the arguments --list --get-content is required' in context.exception)
-
- def test_should_not_parse_template_action_with_all_params(self):
- parser = create_argument_parser()
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(
- ['template', '--address', '127.0.0.1', "--list", '--get-content', 'sample']
- )
- self.assertTrue('argument --get-content: not allowed with argument --list' in context.exception)
-
- def test_should_properly_parse_template_action_with_list_param(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['template', '--address', '127.0.0.1', "--list"]
- )
-
- self.assertTrue(result.list)
- self.assertEqual(result.address, '127.0.0.1')
- self.assertFalse(result.verbose)
-
- def test_should_properly_parse_template_action_with_get_content_param(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['template', '--address', '127.0.0.1', "--get-content", "sample"]
- )
-
- self.assertTrue(result.get_content)
- self.assertEqual(result.address, '127.0.0.1')
- self.assertFalse(result.verbose)
-
- def test_should_not_parse_template_action_with_empty_get_content_param(self):
- parser = create_argument_parser()
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(
- ['template', '--address', '127.0.0.1', "--list", '--get-content']
- )
- self.assertTrue('argument --get-content: expected one argument' in context.exception)
-
- def test_should_not_parse_template_action_when_only_override_is_given(self):
- parser = create_argument_parser()
- with self.assertRaises(SystemExit) as context:
- parser.parse_args(
- ['template', '--address', '127.0.0.1', "--override"]
- )
- self.assertTrue(Messages.OVERRIDE_VALID_ONLY_WITH_UPLOAD in context.exception)
-
- def test_should_parse_template_action_with_upload(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['template', '--address', '127.0.0.1', "--upload", "resources/notification.json"]
- )
-
- self.assertFalse(result.override)
- self.assertEqual(result.upload, 'resources/notification.json')
-
- def test_should_parse_template_action_with_upload_and_override(self):
- parser = create_argument_parser()
- result = parser.parse_args(
- ['template', '--address', '127.0.0.1', "--upload", "resources/notification.json", "--override"]
- )
-
- self.assertTrue(result.override)
- self.assertEqual(result.upload, 'resources/notification.json')
-
-
- def test_should_properly_parse_filter_templates_action_with_all_params(self):
- parser = create_argument_parser()
-
- result = parser.parse_args(
- ['filter', '--address', '127.0.0.1', '--criteria', '"{}"', '--verbose'])
-
- self.assertEqual(result.address, '127.0.0.1')
- self.assertEqual(result.criteria, "\"{}\"")
- self.assertTrue(result.verbose)
-
-class TestSimulatorClient(unittest.TestCase):
-
- @patch('cli.pnf_simulator.http.client.HTTPConnection')
- def test_should_properly_send_event(self, http_connection):
- request = self._create_request()
- mocked_connection = Mock(HTTPConnection)
- http_connection.return_value = mocked_connection
- mocked_response = Mock(HTTPResponse)
- mocked_connection.getresponse.return_value = mocked_response
- mocked_response.status = '200'
- mocked_response.headers = {}
-
- client = SimulatorClient('localhost')
- client.send_event(request)
-
- mocked_connection.close.assert_called_with()
- mocked_connection.request.assert_called_with('POST', '/simulator/start',
- body=mock.ANY,
- headers={'Content-Type': 'application/json'})
-
- @patch('cli.pnf_simulator.http.client.HTTPConnection')
- def test_should_properly_send_one_time_event(self, http_connection):
- event_abs_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)),"resources/notification.json")
- request = self._create_one_time_request(event_abs_filepath)
- mocked_connection = Mock(HTTPConnection)
- http_connection.return_value = mocked_connection
- mocked_response = Mock(HTTPResponse)
- mocked_connection.getresponse.return_value = mocked_response
- mocked_response.status = '202'
- mocked_response.headers = {}
-
- client = SimulatorClient('localhost')
- client.send_one_time_event(request)
-
- mocked_connection.close.assert_called_with()
- mocked_connection.request.assert_called_with('POST', '/simulator/event',
- body=mock.ANY,
- headers={'Content-Type': 'application/json'})
-
- @patch('cli.pnf_simulator.http.client.HTTPConnection')
- def test_should_properly_update_configuration(self, http_connection):
- mocked_connection = Mock(HTTPConnection)
- http_connection.return_value = mocked_connection
- mocked_response = Mock(HTTPResponse)
- mocked_connection.getresponse.return_value = mocked_response
- mocked_response.status = '200'
- mocked_response.headers = {}
-
- client = SimulatorClient('localhost')
- client.edit_configuration("sample_url")
-
- mocked_connection.close.assert_called_with()
- mocked_connection.request.assert_called_with('PUT', '/simulator/config',
- body=json.dumps({"vesServerUrl": "sample_url"}),
- headers={'Content-Type': 'application/json'})
-
- @patch('cli.pnf_simulator.http.client.HTTPConnection')
- def test_should_properly_retrieve_configuration(self, http_connection):
- mocked_connection = Mock(HTTPConnection)
- http_connection.return_value = mocked_connection
- mocked_response = Mock(HTTPResponse)
- mocked_connection.getresponse.return_value = mocked_response
- mocked_response.status = '200'
- mocked_response.headers = {}
-
- client = SimulatorClient('localhost')
- client.get_configuration()
- mocked_connection.close.assert_called_with()
- mocked_connection.request.assert_called_with('GET', '/simulator/config')
-
-
- @patch('cli.pnf_simulator.http.client.HTTPConnection')
- def test_should_properly_trigger_filter_template_action(self, http_connection):
- request = '{"sampleSearchString": "sampleSearchValue"}'
- mocked_connection = Mock(HTTPConnection)
- http_connection.return_value = mocked_connection
- mocked_response = Mock(HTTPResponse)
- mocked_connection.getresponse.return_value = mocked_response
- mocked_response.status = '200'
- mocked_response.headers = {}
-
- client = SimulatorClient('localhost')
- client.search_for_templates(request)
-
- mocked_connection.close.assert_called_with()
- mocked_connection.request.assert_called_with('POST', '/template/search',
- body=json.dumps({"searchExpr": {"sampleSearchString": "sampleSearchValue"}}),
- headers={'Content-Type': 'application/json'})
-
-
- @classmethod
- def _create_request(cls):
- return PersistedEventRequest(SimulatorParams(), 'sample_template')
-
- @classmethod
- def _create_one_time_request(cls, event_filepath):
- with open(event_filepath) as json_event:
- event_body = json.load(json_event)
- return FullEventRequest(event_body, 'sample_url')
diff --git a/test/mocks/pnfsimulator/simulator-cli/tests/test_tailf_client.py b/test/mocks/pnfsimulator/simulator-cli/tests/test_tailf_client.py
deleted file mode 100644
index da8bd624e..000000000
--- a/test/mocks/pnfsimulator/simulator-cli/tests/test_tailf_client.py
+++ /dev/null
@@ -1,47 +0,0 @@
-###
-# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-import unittest
-import asynctest
-
-from cli.client.tailf_client import TailfClient
-
-
-class TestTailfClient(unittest.TestCase):
-
- def __init__(self, methodName='runTest'):
- super().__init__(methodName)
- self._client = TailfClient('ws://localhost:9999')
-
- @asynctest.mock.patch('cli.client.tailf_client.websockets')
- def test_should_connect_to_server_and_receive_message(self, websockets_mock):
- recv_mock = asynctest.CoroutineMock(side_effect=self.interrupt)
- aenter_mock = asynctest.MagicMock()
- connection_mock = asynctest.MagicMock()
- websockets_mock.connect.return_value = aenter_mock
- aenter_mock.__aenter__.return_value = connection_mock
- connection_mock.recv = recv_mock
-
- self._client.tailf_messages()
-
- recv_mock.assert_awaited_once()
-
- def interrupt(self):
- self._client._is_running = False
- return 'test'
diff --git a/test/mocks/prov-mns-provider/Dockerfile b/test/mocks/prov-mns-provider/Dockerfile
new file mode 100644
index 000000000..fef09b239
--- /dev/null
+++ b/test/mocks/prov-mns-provider/Dockerfile
@@ -0,0 +1,13 @@
+FROM python:3.6
+
+WORKDIR /app
+
+COPY src/requirements.txt ./
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY src /app
+
+EXPOSE 8000
+
+CMD ["python", "ProvMnSProvider.py"]
diff --git a/test/mocks/prov-mns-provider/README.txt b/test/mocks/prov-mns-provider/README.txt
new file mode 100644
index 000000000..130d3b383
--- /dev/null
+++ b/test/mocks/prov-mns-provider/README.txt
@@ -0,0 +1,60 @@
+Python Dependence: python 3.6.x
+
+
+1. To specify the supported NRM function in DefinedNRMFunction.json
+
+
+2. To specify the HTTP server configuration info in ConfigInfo.json
+
+
+3. To specify the User info in UserInfo.json
+
+
+4. To specify the pre-set-MOI info in preSetMOI.json
+
+
+5. To run the HTTP EMS simulator: python ProvMnSProvider.py
+
+Build the image by using the command: docker build . -t prov-mns-provider
+Create the container and start the service by using the command: docker-compose up -d
+
+The default port number of ProvMnSProvider is : 8000
+
+The default username&password of ProvMnSProvider is : root&root
+
+ProvMnSProvider provdies four RESTful APIs:
+
+1. Sample PUT request to Create MOI
+ PUT /ProvisioningMnS/v1500/GNBCUCPFunction/35c369d0-2681-4225-9755-daf98fd20805
+ {
+ "data": {
+ "attributes": {
+ "pLMNId": {
+ "mnc": "01",
+ "mcc": "001"
+ },
+ "gNBId": "1",
+ "gNBIdLength": "5",
+ "gNBCUName": "gnb-01"
+ },
+ "href": "/GNBCUCPFunction/35c369d0-2681-4225-9755-daf98fd20805",
+ "class": "GNBCUCPFunction",
+ "id": "35c369d0-2681-4225-9755-daf98fd20805"
+ }
+ }
+
+2. Sample GET request to get MOI attributes
+ GET /ProvisioningMnS/v1500/GNBCUCPFunction/35c369d0-2681-4225-9755-daf98fd20805?scope=BASE_ONLY&filter=GNBCUCPFunction&fields=gNBId&fields=gNBIdLength
+
+3. Sample PATCH request to modify MOI attributes
+ PATCH /ProvisioningMnS/v1500/GNBCUCPFunction/35c369d0-2681-4225-9755-daf98fd20805?scope=BASE_ONLY&filter=GNBCUCPFunction
+ {
+ "data": {
+ "pLMNId": "xxx",
+ "gNBId": "1234",
+ "gNBIdLength": "4"
+ }
+ }
+
+4. Sample DELETE request to delete MOI
+ DELETE /ProvisioningMnS/v1500/GNBCUCPFunction/35c369d0-2681-4225-9755-daf98fd20805?scope=BASE_ONLY&filter=GNBCUCPFunction
diff --git a/test/mocks/prov-mns-provider/docker-compose.yaml b/test/mocks/prov-mns-provider/docker-compose.yaml
new file mode 100644
index 000000000..c44f21b42
--- /dev/null
+++ b/test/mocks/prov-mns-provider/docker-compose.yaml
@@ -0,0 +1,9 @@
+version: '3.3'
+
+services:
+ ProvMnSProvider:
+ image: prov-mns-provider:latest
+ container_name: ProvMnSProvider
+ ports:
+ - "8000:8000"
+ restart: always
diff --git a/test/mocks/prov-mns-provider/src/ConfigInfo.json b/test/mocks/prov-mns-provider/src/ConfigInfo.json
new file mode 100644
index 000000000..e64a73142
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/ConfigInfo.json
@@ -0,0 +1,5 @@
+{
+ "ipAddress": "0.0.0.0",
+ "portNumber": 8000,
+ "prefix": "/ProvisioningMnS/v1500"
+}
diff --git a/test/mocks/prov-mns-provider/src/DefinedNRMFunction.json b/test/mocks/prov-mns-provider/src/DefinedNRMFunction.json
new file mode 100644
index 000000000..8c685a4a0
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/DefinedNRMFunction.json
@@ -0,0 +1,11 @@
+{
+ "NRMFunction": [
+ "GNBDUFunction",
+ "GNBCUCPFunction",
+ "GNBCUUPFunction",
+ "EP_XnU",
+ "EP_NgC",
+ "EP_NgU",
+ "EP_XnC"
+ ]
+}
diff --git a/test/mocks/prov-mns-provider/src/ProvMnSProvider.py b/test/mocks/prov-mns-provider/src/ProvMnSProvider.py
new file mode 100644
index 000000000..d61b4494e
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/ProvMnSProvider.py
@@ -0,0 +1,282 @@
+from http.server import HTTPServer, BaseHTTPRequestHandler
+import re
+import json
+import base64
+from urllib.parse import urlparse, parse_qs
+
+with open("DefinedNRMFunction.json",'r') as f:
+ jsonFile = json.loads(f.read())
+SupportingFunctionList = jsonFile["NRMFunction"]
+
+with open("UserInfo.json",'r') as f:
+ UserFile = json.loads(f.read())
+
+with open("ConfigInfo.json",'r') as f:
+ ConfigFile = json.loads(f.read())
+
+with open("preSetMOI.json",'r') as f:
+ Cretaed_MOIs = json.loads(f.read())
+Cretaed_MOIs_list = Cretaed_MOIs['preSetMOI']
+
+ipAddress = ConfigFile["ipAddress"]
+portNumber = ConfigFile["portNumber"]
+prefix = ConfigFile["prefix"]
+
+username = UserFile['userName']
+password = UserFile['password']
+Auth_str = username+":"+password
+print(Auth_str)
+base64string = base64.b64encode(bytes(Auth_str,'utf-8'))
+authheader = "Basic %s" % base64string.decode('utf-8')
+print(authheader)
+
+class ServerHTTP(BaseHTTPRequestHandler):
+ def do_GET(self):
+ path = self.path
+ print("\n**************************** NEW GET REQUEST ********************************")
+ request = urlparse(path)
+ print("the PATH of the received GET request:" + request.path)
+ pathlist = request.path.split('/')
+ prefix_check = True
+ try:
+ if "/" + pathlist[1] + "/"+ pathlist[2] != prefix:
+ prefix_check = False
+ className = pathlist[3]
+ idName = pathlist[4]
+ except IndexError:
+ prefix_check = False
+ response = {}
+ query_params = parse_qs(request.query)
+ if self.headers['Authorization'] == authheader and prefix_check is True:
+ if className in SupportingFunctionList:
+ try:
+ print("the value of the scope : "+ str(query_params['scope']))
+ print("the value of the filter : "+ str(query_params['filter']))
+ print("the value of the fields : "+ str(query_params['fields']))
+ except:
+ print("the request body doesn't follow the standard format")
+ response['error'] = "the request body doesn't follow the standard format"
+ print("Fail to get MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ find_moi = False
+ for MOI in Cretaed_MOIs_list:
+ if (idName == MOI['id'] and className == MOI['class']):
+ find_moi = True
+ try:
+ attributes = {}
+ for field in query_params['fields']:
+ attributes[field] = MOI['attributes'][field]
+ except:
+ print("the createed MOI doesn't contain the required attribute")
+ response['error'] = "the createed MOI doesn't contain the required attribute"
+ print("Fail to get MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ print("Successfully get MOI object: "+ className+'_'+idName)
+ response = {"data":[{"href":"/"+className+"/"+idName,"class":className,"id":idName,"attributes":attributes}]}
+ self.send_response(200)
+ if (find_moi is False):
+ response['error'] = {"errorInfo":"MOI does not exist"}
+ print("Fail to get MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ response['error'] = {"errorInfo":"MOI class not support"}
+ print("Fail to get MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ if prefix_check is True:
+ self.send_response(401)
+ response['error'] = {"errorInfo":"not Authorized"}
+ else:
+ self.send_response(404)
+ response['error'] = {"errorInfo":"wrong prefix"}
+ self.send_header("Content-type","application/json")
+ self.end_headers()
+ buf = json.dumps(response)
+ self.wfile.write(bytes(buf,'utf-8'))
+
+ def do_PATCH(self):
+ path = self.path
+ print("\n**************************** NEW PATCH REQUEST ********************************")
+ request = urlparse(path)
+ print("the PATH of the received GET request:" + request.path)
+ pathlist = request.path.split('/')
+ prefix_check = True
+ try:
+ if "/" + pathlist[1] + "/"+ pathlist[2] != prefix:
+ prefix_check = False
+ className = pathlist[3]
+ idName = pathlist[4]
+ except IndexError:
+ prefix_check = False
+ response = {}
+ query_params = parse_qs(request.query)
+ if self.headers['Authorization'] == authheader and prefix_check is True:
+ if className in SupportingFunctionList:
+ datas = self.rfile.read(int(self.headers['content-length']))
+ json_str = datas.decode('utf-8')
+ json_str = re.sub('\'','\"', json_str)
+ json_dict = json.loads(json_str)
+ try:
+ print("the value of the scope : "+ str(query_params['scope']))
+ print("the value of the filter : "+ str(query_params['filter']))
+ print("the modified attribute values : "+json.dumps(json_dict['data']))
+ except:
+ print("the request body doesn't follow the standard format")
+ response['error'] = "the request body doesn't follow the standard format"
+ print("Fail to modify MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ find_moi = False
+ for MOI in Cretaed_MOIs_list:
+ if (idName == MOI['id'] and className == MOI['class']):
+ find_moi = True
+ wrong_attribute = False
+ for key, value in json_dict['data'].items():
+ if (key in MOI['attributes']):
+ MOI['attributes'][key] = value
+ else:
+ wrong_attribute = True
+ if (wrong_attribute is True):
+ print("the createed MOI doesn't contain the required attribute")
+ response['error'] = "the createed MOI doesn't contain the required attribute"
+ print("Fail to get modify object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ print("Successfully modify MOI object: "+ className+'_'+idName)
+ response = {"data":[MOI]}
+ self.send_response(200)
+ if (find_moi is False):
+ response['error'] = {"errorInfo":"MOI does not exist"}
+ print("Fail to get MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ response['error'] = {"errorInfo":"MOI class not support"}
+ print("Fail to modify MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ if prefix_check is True:
+ self.send_response(401)
+ response['error'] = {"errorInfo":"not Authorized"}
+ else:
+ self.send_response(404)
+ response['error'] = {"errorInfo":"wrong prefix"}
+ self.send_header("Content-type","application/json")
+ self.end_headers()
+ buf = json.dumps(response)
+ self.wfile.write(bytes(buf,'utf-8'))
+
+ def do_DELETE(self):
+ path = self.path
+ print("\n**************************** NEW DELETE REQUEST ********************************")
+ request = urlparse(path)
+ print("the PATH of the received DELETE request:" + request.path)
+ pathlist = request.path.split('/')
+ prefix_check = True
+ try:
+ if "/" + pathlist[1] + "/"+ pathlist[2] != prefix:
+ prefix_check = False
+ className = pathlist[3]
+ idName = pathlist[4]
+ except IndexError:
+ prefix_check = False
+ response = {}
+ query_params = parse_qs(request.query)
+ if self.headers['Authorization'] == authheader and prefix_check is True:
+ if className in SupportingFunctionList:
+ try:
+ print("the value of the scope : "+ str(query_params['scope']))
+ print("the value of the filter : "+ str(query_params['filter']))
+ except:
+ print("the request body doesn't follow the standard format")
+ response['error'] = "the request body doesn't follow the standard format"
+ print("Fail to delete MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ find_moi = False
+ for MOI in Cretaed_MOIs_list:
+ if (idName == MOI['id'] and className == MOI['class']):
+ find_moi = True
+ Cretaed_MOIs_list.remove(MOI)
+ print("Successfully delete MOI object: "+ className+'_'+idName)
+ response = {"data":["/"+className+"/"+idName]}
+ self.send_response(200)
+ if (find_moi is False):
+ response['error'] = {"errorInfo":"MOI does not exist"}
+ print("Fail to delete MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ response['error'] = {"errorInfo":"MOI class not support"}
+ print("Fail to delete MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ if prefix_check is True:
+ self.send_response(401)
+ response['error'] = {"errorInfo":"not Authorized"}
+ else:
+ self.send_response(404)
+ response['error'] = {"errorInfo":"wrong prefix"}
+ self.send_header("Content-type","application/json")
+ self.end_headers()
+ buf = json.dumps(response)
+ self.wfile.write(bytes(buf,'utf-8'))
+
+ def do_PUT(self):
+ path = self.path
+ print("\n**************************** NEW PUT REQUEST ********************************")
+ print("the PATH of the received PUT request:" + path)
+ pathlist = path.split('/')
+ prefix_check = True
+ try:
+ if "/" + pathlist[1] + "/"+ pathlist[2] != prefix:
+ prefix_check = False
+ className = pathlist[3]
+ idName = pathlist[4]
+ except IndexError:
+ prefix_check = False
+ response = {}
+ if self.headers['Authorization'] == authheader and prefix_check is True:
+ if className in SupportingFunctionList:
+ datas = self.rfile.read(int(self.headers['content-length']))
+ json_str = datas.decode('utf-8')
+ json_str = re.sub('\'','\"', json_str)
+ json_dict = json.loads(json_str)
+ try:
+ print("the class of the New MOI : "+json_dict['data']['class'])
+ print("the ID of the New MOI : "+json_dict['data']['id'])
+ print("the href of the New MOI : "+json_dict['data']['href'])
+ print("the attributes of the New MOI : "+json.dumps(json_dict['data']['attributes']))
+ except:
+ print("the request body doesn't follow the standard format")
+ response['error'] = "the request body doesn't follow the standard format"
+ print("Fail to create MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ print("Successfully create MOI object: "+ className+'/'+idName)
+ Cretaed_MOIs_list.append(json_dict['data'])
+ response = json_dict
+ self.send_response(201)
+ self.send_header("Location",path)
+ else:
+ response['error'] = {"errorInfo":"MOI class not support"}
+ print("Fail to create MOI object: "+'/' +className+'/'+idName)
+ self.send_response(406)
+ else:
+ if prefix_check is True:
+ self.send_response(401)
+ response['error'] = {"errorInfo":"not Authorized"}
+ else:
+ self.send_response(404)
+ response['error'] = {"errorInfo":"wrong prefix"}
+ self.send_header("Content-type","application/json")
+ self.end_headers()
+ buf = json.dumps(response)
+ self.wfile.write(bytes(buf,'utf-8'))
+
+def start_server(port):
+ http_server = HTTPServer((ipAddress, int(port)), ServerHTTP)
+ http_server.serve_forever()
+
+if __name__ == "__main__":
+ start_server(int(portNumber))
diff --git a/test/mocks/prov-mns-provider/src/UserInfo.json b/test/mocks/prov-mns-provider/src/UserInfo.json
new file mode 100644
index 000000000..f7f2ba2d8
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/UserInfo.json
@@ -0,0 +1,4 @@
+{
+ "userName": "root",
+ "password": "root"
+}
diff --git a/test/mocks/prov-mns-provider/src/preSetMOI.json b/test/mocks/prov-mns-provider/src/preSetMOI.json
new file mode 100644
index 000000000..7d78dbd3d
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/preSetMOI.json
@@ -0,0 +1,18 @@
+{
+ "preSetMOI": [
+ {
+ "attributes": {
+ "gNBCUName": "gnb-01",
+ "gNBId": "1",
+ "gNBIdLength": "5",
+ "pLMNId": {
+ "mcc": "001",
+ "mnc": "01"
+ }
+ },
+ "class": "GNBCUCPFunction",
+ "href": "/GNBCUCPFunction/e65d3f05-9558-4e58-aeb0-3a1eae1db742",
+ "id": "e65d3f05-9558-4e58-aeb0-3a1eae1db742"
+ }
+ ]
+}
diff --git a/test/mocks/prov-mns-provider/src/requirements.txt b/test/mocks/prov-mns-provider/src/requirements.txt
new file mode 100644
index 000000000..f622334ac
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/requirements.txt
@@ -0,0 +1,2 @@
+pip==20.0.2
+wheel==0.29.0
diff --git a/test/mocks/prov-mns-provider/src/tests/ConfigInfo.json b/test/mocks/prov-mns-provider/src/tests/ConfigInfo.json
new file mode 120000
index 000000000..66ede7d7a
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/ConfigInfo.json
@@ -0,0 +1 @@
+../ConfigInfo.json \ No newline at end of file
diff --git a/test/mocks/prov-mns-provider/src/tests/DefinedNRMFunction.json b/test/mocks/prov-mns-provider/src/tests/DefinedNRMFunction.json
new file mode 120000
index 000000000..a56d54d9d
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/DefinedNRMFunction.json
@@ -0,0 +1 @@
+../DefinedNRMFunction.json \ No newline at end of file
diff --git a/test/mocks/prov-mns-provider/src/tests/ProvMnSProvider.py b/test/mocks/prov-mns-provider/src/tests/ProvMnSProvider.py
new file mode 120000
index 000000000..96f10491e
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/ProvMnSProvider.py
@@ -0,0 +1 @@
+../ProvMnSProvider.py \ No newline at end of file
diff --git a/test/mocks/prov-mns-provider/src/tests/UserInfo.json b/test/mocks/prov-mns-provider/src/tests/UserInfo.json
new file mode 120000
index 000000000..cc7354bc5
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/UserInfo.json
@@ -0,0 +1 @@
+../UserInfo.json \ No newline at end of file
diff --git a/test/mocks/prov-mns-provider/src/tests/common.py b/test/mocks/prov-mns-provider/src/tests/common.py
new file mode 100644
index 000000000..62fd80702
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/common.py
@@ -0,0 +1,37 @@
+import requests # pylint: disable=W0611
+from uuid import uuid4
+import ProvMnSProvider
+import logging
+from json import dumps # pylint: disable=W0611
+
+logging.basicConfig(level=logging.DEBUG)
+logger = logging.getLogger(__name__)
+
+MOI_ID = str(uuid4())
+MOI_CLASS = ProvMnSProvider.Cretaed_MOIs_list[0]['class']
+MOI_DATA_TMPL = { 'data': ProvMnSProvider.Cretaed_MOIs_list[0] }
+MOI_DATA_PATCH = { "data": { "pLMNId": "xxx", "gNBId": "1234", "gNBIdLength": "4" }}
+URI_SCHEMA = 'http'
+AUTH_STRING = (ProvMnSProvider.username, ProvMnSProvider.password)
+INVALID_AUTH_STRING = (str(uuid4()).split('-')[0], str(uuid4()).split('-')[0])
+URI_BASE_STRING = URI_SCHEMA + '://' + ProvMnSProvider.ipAddress + ':' + \
+ str(ProvMnSProvider.portNumber) + ProvMnSProvider.prefix + \
+ '/' + MOI_CLASS + '/' + MOI_ID
+URI_PUT_STRING = URI_BASE_STRING
+URI_GET_STRING = URI_BASE_STRING + '?scope=BASE_ONLY&filter=' + MOI_CLASS + \
+ '&fields=gNBId&fields=gNBIdLength'
+URI_PATCH_STRING = URI_BASE_STRING + '?scope=BASE_ONLY&filter=' + MOI_CLASS
+URI_DELETE_STRING = URI_PATCH_STRING
+BAD_CLASS_URI_BASE_STRING = URI_SCHEMA + '://' + ProvMnSProvider.ipAddress + \
+ ':' + str(ProvMnSProvider.portNumber) + \
+ ProvMnSProvider.prefix + '/' + 'invalidMoiClass' + \
+ '/' + MOI_ID
+BAD_PREFIX_URI_BASE_STRING = URI_SCHEMA + '://' + ProvMnSProvider.ipAddress + \
+ ':' + str(ProvMnSProvider.portNumber) + \
+ '/bad/prefix' + '/' + MOI_CLASS + '/' + MOI_ID
+BAD_PREFIX1_URI_BASE_STRING = URI_SCHEMA + '://' + ProvMnSProvider.ipAddress + \
+ ':' + str(ProvMnSProvider.portNumber) + \
+ '/badprefix' + '/' + MOI_CLASS + '/' + MOI_ID
+UNAUTHORIZED_MSG="not Authorized"
+INVALID_CLASS_MSG = "MOI class not support"
+INVALID_PREFIX_MSG = "wrong prefix"
diff --git a/test/mocks/prov-mns-provider/src/tests/preSetMOI.json b/test/mocks/prov-mns-provider/src/tests/preSetMOI.json
new file mode 120000
index 000000000..053256d80
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/preSetMOI.json
@@ -0,0 +1 @@
+../preSetMOI.json \ No newline at end of file
diff --git a/test/mocks/prov-mns-provider/src/tests/test-requirements.txt b/test/mocks/prov-mns-provider/src/tests/test-requirements.txt
new file mode 100644
index 000000000..547de5c5b
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/test-requirements.txt
@@ -0,0 +1,2 @@
+pytest
+requests
diff --git a/test/mocks/prov-mns-provider/src/tests/test_invalid_requests.py b/test/mocks/prov-mns-provider/src/tests/test_invalid_requests.py
new file mode 100644
index 000000000..6240c660a
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/test_invalid_requests.py
@@ -0,0 +1,49 @@
+import pytest
+from common import * # pylint: disable=W0614
+
+@pytest.mark.parametrize(('req_method', 'url', 'req_params'), [
+ (getattr(requests, 'get'), URI_GET_STRING, {"auth": INVALID_AUTH_STRING}),
+ (getattr(requests, 'put'), URI_PUT_STRING, {"auth": INVALID_AUTH_STRING,
+ "json": MOI_DATA_TMPL}),
+ (getattr(requests, 'patch'), URI_PATCH_STRING, {"auth": INVALID_AUTH_STRING,
+ "json": MOI_DATA_PATCH}),
+ (getattr(requests, 'delete'), URI_DELETE_STRING, {"auth": INVALID_AUTH_STRING})
+ ])
+def test_unauthorized(req_method, url, req_params):
+ '''Check service denies access if
+ invalid credentials provided'''
+ req = req_method(url, **req_params)
+ assert req.status_code == requests.codes.unauthorized
+ assert UNAUTHORIZED_MSG in req.text
+
+@pytest.mark.parametrize(('req_method', 'req_params'), [
+ (getattr(requests, 'get'), {"auth": AUTH_STRING}),
+ (getattr(requests, 'put'), {"auth": AUTH_STRING, "json": MOI_DATA_TMPL}),
+ (getattr(requests, 'patch'), {"auth": AUTH_STRING, "json": MOI_DATA_PATCH}),
+ (getattr(requests, 'delete'), {"auth": AUTH_STRING})
+ ])
+def test_bad_moi_class(req_method, req_params):
+ '''Check service returns proper
+ http code and error msg if MOI class
+ is invalid'''
+ req = req_method(BAD_CLASS_URI_BASE_STRING, **req_params)
+ assert req.status_code == requests.codes.not_acceptable
+ assert INVALID_CLASS_MSG in req.text
+
+
+@pytest.mark.parametrize(('url'), [BAD_PREFIX_URI_BASE_STRING,
+ BAD_PREFIX1_URI_BASE_STRING])
+@pytest.mark.parametrize(('req_method', 'req_params'), [
+ (getattr(requests, 'get'), {"auth": AUTH_STRING}),
+ (getattr(requests, 'put'), {"auth": AUTH_STRING, "json": MOI_DATA_TMPL}),
+ (getattr(requests, 'patch'), {"auth": AUTH_STRING, "json": MOI_DATA_PATCH}),
+ (getattr(requests, 'delete'), {"auth": AUTH_STRING})
+ ])
+def test_bad_prefix(url, req_method, req_params):
+ '''Check service returns proper
+ http code and error msg if URI prefix
+ is invalid'''
+
+ req = req_method(url, **req_params)
+ assert req.status_code == requests.codes.not_found
+ assert INVALID_PREFIX_MSG in req.text
diff --git a/test/mocks/prov-mns-provider/src/tests/test_rest_api_endpoints.py b/test/mocks/prov-mns-provider/src/tests/test_rest_api_endpoints.py
new file mode 100644
index 000000000..155ed9dd5
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tests/test_rest_api_endpoints.py
@@ -0,0 +1,35 @@
+import pytest
+from common import * # pylint: disable=W0614
+
+def test_put():
+ '''Validate PUT request'''
+
+ MOI_DATA = MOI_DATA_TMPL
+ MOI_DATA['data']['id'] = MOI_ID
+ MOI_DATA['data']['href'] = '/' + MOI_CLASS + '/' + MOI_ID
+ req_put = requests.put('{0}'.format(URI_PUT_STRING), auth=AUTH_STRING,
+ json=MOI_DATA)
+
+ if req_put.status_code != requests.codes.created:
+ logger.error('PUT request to {0} failed'.format(URI_PUT_STRING))
+ logger.debug('MOI data payload: {0}'.format(dumps(MOI_DATA,indent=2)))
+ logger.debug('Response content: {0}'.format(req_put.text))
+
+ assert req_put.status_code == requests.codes.created
+
+@pytest.mark.parametrize(('url', 'req_method', 'req_params'),[
+ (URI_GET_STRING, getattr(requests, 'get'), { "auth": AUTH_STRING }),
+ (URI_PATCH_STRING, getattr(requests, 'patch'), { "auth": AUTH_STRING,
+ "json": MOI_DATA_PATCH}),
+ (URI_DELETE_STRING, getattr(requests, 'delete'), { "auth": AUTH_STRING })
+ ])
+def test_api_methods(url, req_method, req_params):
+ '''Valide request'''
+ req = req_method(url, **req_params)
+
+ if req.status_code != requests.codes.ok:
+ logger.error('{0} request to {1} failed'.format(
+ req_method.__name__.upper(), url))
+ logger.debug('Response content: {0}'.format(req.text))
+
+ assert req.status_code == requests.codes.ok
diff --git a/test/mocks/prov-mns-provider/src/tox.ini b/test/mocks/prov-mns-provider/src/tox.ini
new file mode 100644
index 000000000..2f2976307
--- /dev/null
+++ b/test/mocks/prov-mns-provider/src/tox.ini
@@ -0,0 +1,10 @@
+[tox]
+envlist = py3
+skipsdist = True
+
+[testenv]
+deps = -r{toxinidir}/tests/test-requirements.txt
+commands = pytest
+
+[pytest]
+addopts = -v
diff --git a/test/mocks/ran-nssmf-simulator/.gitignore b/test/mocks/ran-nssmf-simulator/.gitignore
new file mode 100644
index 000000000..2b5a0df16
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/.gitignore
@@ -0,0 +1,4 @@
+__pycache__
+.tox
+*.pyc
+RanNssmfSimulator.egg-info/
diff --git a/test/mocks/ran-nssmf-simulator/Dockerfile b/test/mocks/ran-nssmf-simulator/Dockerfile
new file mode 100644
index 000000000..ed3656a95
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/Dockerfile
@@ -0,0 +1,25 @@
+FROM onap/integration-python:10.0.0
+
+USER root
+
+WORKDIR /home/onap
+COPY ./requirements.txt ./
+
+RUN python -m pip install --upgrade pip && \
+ pip install --no-cache-dir -r requirements.txt
+
+ENV user=onap group=onap
+USER onap
+
+ENV PATH=$PATH:/home/onap/.local/bin
+
+COPY --chown=onap:onap main.py /home/onap
+COPY --chown=onap:onap setup.py /home/onap
+COPY --chown=onap:onap RanNssmfSimulator /home/onap/RanNssmfSimulator
+
+RUN chmod 770 /home/onap/main.py && \
+ chmod 770 /home/onap/setup.py && \
+ chmod 770 -R /home/onap/RanNssmfSimulator
+
+CMD [ "python3", "main.py" ]
+
diff --git a/test/mocks/ran-nssmf-simulator/README.md b/test/mocks/ran-nssmf-simulator/README.md
new file mode 100644
index 000000000..5a8958740
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/README.md
@@ -0,0 +1,19 @@
+# External RAN NSSMF Simulator for Network Slicing Use Case
+
+There are two options to run the simulator:
+
+## Option 1. Directly run it in the current directory:
+
+```
+1. pip3 install -r requirements.txt
+
+2. python3 main.py
+```
+
+## Option 2. Install it using setuptools, and run it in any directory:
+
+```
+1. python3 setup.py install --user
+
+2. python3 -m RanNssmfSimulator.MainApp
+```
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py
new file mode 100644
index 000000000..6a52f516f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py
@@ -0,0 +1,127 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+"""
+ Used to get and check Access Token by SO NSSMF adapter.
+"""
+
+import json
+import uuid
+import time
+import sched
+import threading
+from schematics.types import StringType
+from schematics.models import Model
+
+from .utils import getLogger, AUTH_DB, TOKEN_EXPIRES_TIME, TOKEN_CLEAN_TIME
+
+
+logger = getLogger("AuthManager")
+lock = threading.Lock()
+
+
+class AuthRequest(Model):
+ grantType = StringType(required=True)
+ userName = StringType(required=True)
+ value = StringType(required=True)
+
+
+class AuthInfo(object):
+ def __init__(self, authRequest, expires):
+ self.authRequest = authRequest
+ self.expiredTime = int(time.time()) + expires * 60
+
+
+class AuthError(ValueError):
+ pass
+
+
+class TokenError(ValueError):
+ pass
+
+
+_AUTH_TOKEN = {}
+
+
+def cleanExpiredToken():
+ s = sched.scheduler(time.time, time.sleep)
+
+ def doCleanExpiredToken():
+ current_time = int(time.time())
+
+ expiredTokens = []
+ for authToken in _AUTH_TOKEN:
+ if current_time > _AUTH_TOKEN[authToken].expiredTime:
+ expiredTokens.append(authToken)
+ logger.debug("Auth token %s is expired and will be deleted" % authToken)
+
+ with lock:
+ for authToken in expiredTokens:
+ del _AUTH_TOKEN[authToken]
+
+ s.enter(TOKEN_CLEAN_TIME, 1, doCleanExpiredToken)
+
+ s.enter(TOKEN_CLEAN_TIME, 1, doCleanExpiredToken)
+
+ s.run()
+
+
+def checkAuth(authRequest):
+ with open(AUTH_DB) as f:
+ authDB = json.load(f)
+
+ if authRequest["grantType"].lower() != "password":
+ raise AuthError("Unsupported grantType %s" % authRequest["grantType"])
+
+ for authItem in authDB:
+ if authItem["userName"].lower() == authRequest["userName"].lower() \
+ and authItem["value"] == authRequest["value"]:
+ break
+ else:
+ raise AuthError("userName or password is error")
+
+
+def generateAuthToken(authRequest):
+ token = uuid.uuid4().hex
+ with lock:
+ _AUTH_TOKEN[token] = AuthInfo(authRequest, TOKEN_EXPIRES_TIME)
+
+ return {
+ "accessToken": token,
+ "expires": TOKEN_EXPIRES_TIME
+ }
+
+
+def checkAuthToken(requestHeaders):
+ authToken = requestHeaders.get("X-Auth-Token")
+ logger.debug("X-Auth-Token: %s" % authToken)
+
+ if not authToken:
+ raise TokenError("Auth token is missing")
+
+ if authToken not in _AUTH_TOKEN:
+ raise TokenError("Auth token is error")
+
+ current_time = int(time.time())
+ if current_time > _AUTH_TOKEN[authToken].expiredTime:
+ raise TokenError("Auth token is expired")
+
+
+def startAuthManagerJob():
+ cleanThread = threading.Thread(target=cleanExpiredToken)
+ cleanThread.daemon = True
+
+ cleanThread.start()
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py
new file mode 100644
index 000000000..05edfa872
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py
@@ -0,0 +1,150 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import json
+from flask import Flask, request, Response
+from schematics.exceptions import DataError
+
+from .utils import REST_PORT, LOGGING_LEVEL
+from .SliceDataType import AllocateNssi, DeAllocateNssi, ActivateNssi, DeActivateNssi
+from . import AuthManager
+from . import NssManager
+
+
+app = Flask(__name__)
+app.logger.setLevel(LOGGING_LEVEL)
+
+
+@app.errorhandler(DataError)
+def handleRequestException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.errorhandler(AuthManager.AuthError)
+def handleAuthException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.errorhandler(AuthManager.TokenError)
+def handleAuthException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 401
+ return response
+
+
+@app.errorhandler(NssManager.NssError)
+def handleNssException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.route("/api/rest/securityManagement/v1/oauth/token", methods=['POST'])
+def handleAuthToken():
+ """
+ Used to get Access Token by SO NSSMF adapter.
+ """
+ app.logger.debug("Receive request:\n%s" % json.dumps(request.json, indent=2))
+
+ AuthManager.AuthRequest(request.json).validate()
+ AuthManager.checkAuth(request.json)
+
+ return AuthManager.generateAuthToken(request.json), 201
+
+
+@app.route("/ObjectManagement/NSS/SliceProfiles", methods=['POST'])
+def handleAllocateNssi():
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive AllocateNssi request:\n%s" % json.dumps(request.json, indent=2))
+
+ AllocateNssi(request.json).validate()
+
+ return NssManager.allocateNssi(request.json), 200
+
+
+@app.route("/ObjectManagement/NSS/SliceProfiles/<string:sliceProfileId>", methods=['DELETE'])
+def handleDeallocateNssi(sliceProfileId):
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive DeallocateNssi request for sliceProfileId %s:\n%s"
+ % (sliceProfileId, json.dumps(request.json, indent=2)))
+
+ DeAllocateNssi(request.json).validate()
+
+ return NssManager.deallocateNssi(sliceProfileId, request.json), 200
+
+@app.route("/api/rest/provMns/v1/an/NSS/<string:snssai>/activations", methods=['PUT'])
+def handleActivateNssi(snssai):
+ """
+ Method: handleActivateNssi
+ This method handles slice activation event generated by SO NSSMF adapter.
+ As part of this event, SO NSSMF adapter will send the associated 'snssai'.
+ 'snssai' is string type value and example is: "01-2557D9". Wherein,
+ sst: "01" and sd: "2557D9".
+ Argument: snssai
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ Return value: http status 200
+ """
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive ActivateNssi request for snssai:%s\n%s"
+ % (snssai, json.dumps(request.json, indent=2)))
+
+ ActivateNssi(request.json).validate()
+
+ return NssManager.activateNssi(snssai, request.json), 200
+
+@app.route("/api/rest/provMns/v1/an/NSS/<string:snssai>/deactivation", methods=['PUT'])
+def handleDeActivateNssi(snssai):
+ """
+ Method: handleDeActivateNssi
+ This method handles slice deactivation event generated by SO NSSMF adapter.
+ As part of this event, SO NSSMF adapter will send the associated 'snssai'.
+ Example 'snssai' : "01-2557D9".
+ Argument: snssai
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ Return value: http status 200
+ """
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive DeActivateNssi request for snssai:%s\n%s"
+ % (snssai, json.dumps(request.json, indent=2)))
+
+ DeActivateNssi(request.json).validate()
+
+ return NssManager.deactivateNssi(snssai, request.json), 200
+
+def main():
+ AuthManager.startAuthManagerJob()
+ app.run("0.0.0.0", REST_PORT, False, ssl_context="adhoc")
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py
new file mode 100644
index 000000000..817f5d92f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py
@@ -0,0 +1,87 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import uuid
+
+from .utils import getLogger
+
+
+logger = getLogger("NssManager")
+
+
+class NssError(ValueError):
+ pass
+
+
+def allocateNssi(requestBody):
+ sliceProfile = requestBody["attributeListIn"]
+ sliceProfileId = sliceProfile["sliceProfileId"]
+
+ nSSId = uuid.uuid4().hex
+
+ responseBody = {
+ "attributeListOut": {},
+ "href": nSSId
+ }
+
+ logger.info("Allocate NSSI for sliceProfileId %s success, nSSId: %s" % (sliceProfileId, nSSId))
+ return responseBody
+
+
+def deallocateNssi(sliceProfileId, requestBody):
+ nSSId = requestBody["nSSId"]
+
+ logger.info("Deallocate NSSI for sliceProfileId %s success, nSSId: %s" % (sliceProfileId, nSSId))
+ return ""
+
+def activateNssi(snssai, requestBody):
+ """
+ Method: activateNssi
+ This method is internal and invoked from handleActivateNssi()
+ callflow. As part of this, it logs the activate snssai, nssiId
+ values from incoming request.
+ Arguments: snssai, requestBody
+ snssai represents below:
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ requestBody: Incoming http request payload.
+ Return value: ''
+ """
+ nssiId = requestBody["nssiId"]
+ #nsiId = requestBody["nsiId"]
+
+ logger.info("Activate NSSI for snssai %s successful, nssiId: %s" % (snssai, nssiId))
+ return ""
+
+def deactivateNssi(snssai, requestBody):
+ """
+ Method: deactivateNssi
+ This method is internal and invoked from handleDeActivateNssi()
+ callflow. As part of this, it logs the deactivate snssai, nssiId
+ values from incoming request.
+ Argument: snssai, requestBody
+ snssai represents below:
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ requestBody: Incoming http request payload.
+ Return value: ''
+ """
+ nssiId = requestBody["nssiId"]
+ #nsiId = requestBody["nsiId"]
+
+ logger.info("DeActivate NSSI for snssai %s successful, nssiId: %s" % (snssai, nssiId))
+ return ""
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py
new file mode 100644
index 000000000..37ec0a3af
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py
@@ -0,0 +1,75 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from schematics.types import BaseType, StringType, IntType, LongType
+from schematics.types.compound import ModelType, ListType, DictType
+from schematics.models import Model
+
+
+class PerfReqEmbb(Model):
+ """Reference 3GPP TS 28.541 V16.5.0, Section 6.4.1."""
+ expDataRateDL = IntType()
+ expDataRateUL = IntType()
+ areaTrafficCapDL = IntType()
+ areaTrafficCapUL = IntType()
+ overallUserDensity = IntType()
+ activityFactor = IntType()
+
+
+class PerfReqUrllc(Model):
+ """TODO"""
+ pass
+
+
+class PerfReq(Model):
+ """Reference 3GPP TS 28.541 V16.5.0."""
+ perfReqEmbbList = ListType(ModelType(PerfReqEmbb))
+ # perfReqUrllcList = ListType(ModelType(PerfReqUrllc))
+ perfReqUrllcList = ListType(DictType(BaseType))
+
+
+class SliceProfile(Model):
+ """Reference 3GPP TS 28.541 V16.5.0, Section 6.3.4."""
+ sliceProfileId = StringType(required=True)
+ sNSSAIList = ListType(StringType(required=True))
+ pLMNIdList = ListType(StringType(required=True))
+ perfReq = ModelType(PerfReq, required=True)
+ maxNumberofUEs = LongType()
+ coverageAreaTAList = ListType(IntType())
+ latency = IntType()
+ uEMobilityLevel = StringType()
+ resourceSharingLevel = StringType()
+
+
+class AllocateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ attributeListIn = ModelType(SliceProfile)
+
+
+class DeAllocateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nSSId = StringType(required=True)
+
+class ActivateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nsiId = StringType(required=True)
+ nssiId = StringType(required=True)
+
+class DeActivateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nsiId = StringType(required=True)
+ nssiId = StringType(required=True)
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json
new file mode 100644
index 000000000..23e9376e0
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json
@@ -0,0 +1,7 @@
+[
+ {
+ "grantType": "password",
+ "userName": "admin",
+ "value": "123456"
+ }
+]
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py
new file mode 100644
index 000000000..f0b99119a
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py
@@ -0,0 +1,44 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+
+REST_PORT = int(os.getenv("RAN_NSSMF_REST_PORT", "8443"))
+LOGGING_LEVEL = os.getenv("RAN_NSSMF_LOGGING_LEVEL", "INFO")
+
+TOKEN_EXPIRES_TIME = int(os.getenv("RAN_NSSMF_TOKEN_EXPIRES_TIME", "30"))
+TOKEN_CLEAN_TIME = int(os.getenv("RAN_NSSMF_TOKEN_CLEAN_TIME", "180"))
+
+MAIN_DIR = os.path.dirname(os.path.abspath(__file__))
+AUTH_DB_FILE = os.path.join(MAIN_DIR, "etc", "auth.json")
+
+AUTH_DB = os.getenv("RAN_NSSMF_AUTH_DB", AUTH_DB_FILE)
+
+
+LOGGER_FORMAT = "[%(asctime)-15s] %(levelname)s in %(name)s: %(message)s"
+
+
+def getLogger(name, level=LOGGING_LEVEL, fmt=LOGGER_FORMAT):
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ formatter = logging.Formatter(fmt)
+ cmd_handler = logging.StreamHandler()
+ cmd_handler.setFormatter(formatter)
+ logger.addHandler(cmd_handler)
+
+ return logger
diff --git a/test/mocks/ran-nssmf-simulator/container-tag.yaml b/test/mocks/ran-nssmf-simulator/container-tag.yaml
new file mode 100644
index 000000000..b7dea024a
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/container-tag.yaml
@@ -0,0 +1 @@
+tag: "1.0.0"
diff --git a/test/mocks/ran-nssmf-simulator/main.py b/test/mocks/ran-nssmf-simulator/main.py
new file mode 100644
index 000000000..c2473de09
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/main.py
@@ -0,0 +1,23 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from RanNssmfSimulator.MainApp import main
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/ran-nssmf-simulator/requirements.txt b/test/mocks/ran-nssmf-simulator/requirements.txt
new file mode 100644
index 000000000..303f79e4f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/requirements.txt
@@ -0,0 +1,3 @@
+Flask
+schematics
+cryptography
diff --git a/test/mocks/ran-nssmf-simulator/setup.py b/test/mocks/ran-nssmf-simulator/setup.py
new file mode 100644
index 000000000..65467ee13
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/setup.py
@@ -0,0 +1,35 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from setuptools import setup, find_packages
+
+setup(
+ name="RanNssmfSimulator",
+ version="0.1.0",
+ description="RAN NSSMF Simulator",
+ license="Apache License, Version 2.0",
+ packages=find_packages(),
+ data_files=[
+ ('RanNssmfSimulator/etc', ['RanNssmfSimulator/etc/auth.json'])
+ ],
+ install_requires=[
+ 'Flask',
+ 'schematics',
+ 'cryptography'
+ ]
+)
diff --git a/test/mocks/ran-nssmf-simulator/test-requirements.txt b/test/mocks/ran-nssmf-simulator/test-requirements.txt
new file mode 100644
index 000000000..547de5c5b
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test-requirements.txt
@@ -0,0 +1,2 @@
+pytest
+requests
diff --git a/test/mocks/ran-nssmf-simulator/test/conftest.py b/test/mocks/ran-nssmf-simulator/test/conftest.py
new file mode 100644
index 000000000..cfa00cd24
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/conftest.py
@@ -0,0 +1,13 @@
+import pytest
+from test_settings import TEST_AUTH_DB_FILE
+from json import load
+import requests
+from requests.packages.urllib3.exceptions import InsecureRequestWarning
+
+requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
+
+@pytest.fixture(scope="module")
+def auth_credentials():
+ '''A fixture returning credentials for the simulator request'''
+ with open(TEST_AUTH_DB_FILE) as creds:
+ return load(creds)
diff --git a/test/mocks/ran-nssmf-simulator/test/test_auth.json b/test/mocks/ran-nssmf-simulator/test/test_auth.json
new file mode 100644
index 000000000..b8f6f93bd
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_auth.json
@@ -0,0 +1,7 @@
+[
+ {
+ "grantType": "password",
+ "userName": "testuser",
+ "value": "Vue&W{ah0uch|ae&"
+ }
+]
diff --git a/test/mocks/ran-nssmf-simulator/test/test_main.py b/test/mocks/ran-nssmf-simulator/test/test_main.py
new file mode 100644
index 000000000..337b99997
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_main.py
@@ -0,0 +1,10 @@
+from requests import post, codes
+from test_settings import TEST_REST_URL, TEST_REST_GET_ACCESS_TOKEN_ENDPOINT, TEST_REST_HEADERS
+
+def test_get_auth_token(auth_credentials):
+ url = f"{TEST_REST_URL}{TEST_REST_GET_ACCESS_TOKEN_ENDPOINT}"
+ response = post(url, headers=TEST_REST_HEADERS, verify=False, json=auth_credentials[0])
+ json_response = response.json()
+ assert "accessToken" in json_response
+ assert "expires" in json_response
+ assert response.status_code == codes.created
diff --git a/test/mocks/ran-nssmf-simulator/test/test_settings.py b/test/mocks/ran-nssmf-simulator/test/test_settings.py
new file mode 100644
index 000000000..445d9728f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_settings.py
@@ -0,0 +1,6 @@
+TEST_AUTH_DB_FILE = "test/test_auth.json"
+TEST_REST_PORT = 8443
+TEST_REST_IP = "127.0.0.1"
+TEST_REST_URL = f"https://{TEST_REST_IP}:{TEST_REST_PORT}"
+TEST_REST_GET_ACCESS_TOKEN_ENDPOINT = "/api/rest/securityManagement/v1/oauth/token"
+TEST_REST_HEADERS = { "Content-Type": "application/json" }
diff --git a/test/mocks/ran-nssmf-simulator/tox.ini b/test/mocks/ran-nssmf-simulator/tox.ini
new file mode 100644
index 000000000..0eae155a4
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/tox.ini
@@ -0,0 +1,10 @@
+[tox]
+envlist =nssmf
+
+[testenv]
+basepython = python3.8
+deps = -r{toxinidir}/test-requirements.txt
+
+[testenv:nssmf]
+commands_pre = /bin/bash -c "RAN_NSSMF_REST_PORT=8443 RAN_NSSMF_AUTH_DB=test/test_auth.json python main.py &"
+commands = pytest -v
diff --git a/test/onaptests_bench/MANIFEST.in b/test/onaptests_bench/MANIFEST.in
new file mode 100644
index 000000000..2ca3ee6e2
--- /dev/null
+++ b/test/onaptests_bench/MANIFEST.in
@@ -0,0 +1,2 @@
+recursive-include src/onaptests_bench/templates *
+recursive-include src/onaptests_bench/artifacts *
diff --git a/test/onaptests_bench/requirements.txt b/test/onaptests_bench/requirements.txt
new file mode 100644
index 000000000..61c203a86
--- /dev/null
+++ b/test/onaptests_bench/requirements.txt
@@ -0,0 +1,6 @@
+pyopenssl
+kubernetes
+matplotlib
+jinja2
+docker
+xtesting
diff --git a/test/onaptests_bench/setup.cfg b/test/onaptests_bench/setup.cfg
new file mode 100644
index 000000000..b4a62e2a9
--- /dev/null
+++ b/test/onaptests_bench/setup.cfg
@@ -0,0 +1,22 @@
+[metadata]
+name = onaptests_bench
+version = 0.1
+description = Addon to run simultaenously several pyhtonsdk_tests basic_* tests
+author = Orange OpenSource
+license = Apache 2.0
+classifiers =
+ Programming Language :: Python :: 3
+
+[options]
+zip_safe = False
+include_package_data = True
+package_dir=
+ =src
+packages=find_namespace:
+
+[options.packages.find]
+where=src
+
+[entry_points]
+console_scripts =
+ run_stability_tests = onaptests_bench.launcher:main
diff --git a/test/mocks/pnfsimulator/simulator-cli/tests/__init__.py b/test/onaptests_bench/setup.py
index aa8b4f995..0dea62494 100644
--- a/test/mocks/pnfsimulator/simulator-cli/tests/__init__.py
+++ b/test/onaptests_bench/setup.py
@@ -1,8 +1,7 @@
-###
+#!/usr/bin/env python3
+
# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
+# Copyright (C) 2022 Orange, Ltd.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,5 +14,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-###
+
+import setuptools
+setuptools.setup(
+ setup_requires=['pbr', 'setuptools'],
+ pbr=True,
+ include_package_data=True)
diff --git a/test/mocks/pnfsimulator/simulator-cli/cli/__init__.py b/test/onaptests_bench/src/onaptests_bench/__init__.py
index aa8b4f995..a6921067f 100644
--- a/test/mocks/pnfsimulator/simulator-cli/cli/__init__.py
+++ b/test/onaptests_bench/src/onaptests_bench/__init__.py
@@ -1,8 +1,5 @@
-###
# ============LICENSE_START=======================================================
-# Simulator
-# ================================================================================
-# Copyright (C) 2019 Nokia. All rights reserved.
+# Copyright (C) 2022 Orange, Ltd.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,5 +12,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-# ============LICENSE_END=========================================================
-###
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END========================================================= \ No newline at end of file
diff --git a/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py b/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py
new file mode 100644
index 000000000..79801a806
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py
@@ -0,0 +1,81 @@
+"""Specific settings module."""
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+######################
+# #
+# ONAP INPUTS DATAS #
+# #
+######################
+
+# Variables to set logger information
+# Possible values for logging levels in onapsdk: INFO, DEBUG , WARNING, ERROR
+LOG_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "default": {
+ "class": "logging.Formatter",
+ "format": "%(asctime)s %(levelname)s %(lineno)d:%(filename)s(%(process)d) - %(message)s"
+ }
+ },
+ "handlers": {
+ "console": {
+ "level": "WARN",
+ "class": "logging.StreamHandler",
+ "formatter": "default"
+ },
+ "file": {
+ "level": "DEBUG",
+ "class": "logging.FileHandler",
+ "formatter": "default",
+ "filename": "/var/lib/xtesting/results/pythonsdk.debug.log",
+ "mode": "w"
+ }
+ },
+ "root": {
+ "level": "INFO",
+ "handlers": ["console", "file"]
+ }
+}
+CLEANUP_FLAG = False
+SDC_CLEANUP = False
+
+# SOCK_HTTP = "socks5h://127.0.0.1:8080"
+REPORTING_FILE_PATH = "/var/lib/xtesting/results/reporting.html"
+K8S_REGION_TYPE = "k8s"
+TILLER_HOST = "localhost"
+K8S_CONFIG = None # None means it will use default config (~/.kube/config)
+K8S_NAMESPACE = "onap" # Kubernetes namespace
+ORCHESTRATION_REQUEST_TIMEOUT = 60.0 * 30 # 30 minutes in seconds
+
+AAI_URL = "https://aai-api.simpledemo.onap.org"
+CDS_URL = "https://cds-blueprintsprocessor-api.simpledemo.onap.org"
+K8SPLUGIN_URL = "https://multicloud-k8s-api.simpledemo.onap.org"
+MSB_URL = "https://msb-iag-ui.simpledemo.onap.org"
+SDC_BE_URL = "https://sdc-be-api.simpledemo.onap.org"
+SDC_FE_URL = "https://sdc-fe-ui.simpledemo.onap.org"
+SDNC_URL = "https://sdnc-api.simpledemo.onap.org"
+SO_URL = "https://so-api.simpledemo.onap.org"
+CLAMP_URL = "https://policy-ui.simpledemo.onap.org"
+VES_URL = "https://dcae-ves-collector-api.simpledemo.onap.org"
+DMAAP_URL = "https://dmaap-mr-api.simpledemo.onap.org"
+NBI_URL = "https://nbi-api.simpledemo.onap.org"
+HOLMES_URL = "https://holmes-rule-mgmt-ui.simpledemo.onap.org"
+AAI_GUI_URL = "https://aai-sparkybe-api.simpledemo.onap.org"
diff --git a/test/onaptests_bench/src/onaptests_bench/launcher.py b/test/onaptests_bench/src/onaptests_bench/launcher.py
new file mode 100644
index 000000000..fda9699ad
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/launcher.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+#
+# Launch basic_* tests in parallel and report results
+# the possible basic tests are:
+# - basic_onboarding
+# - basic_vm
+# - basic_network
+# - basic_cnf
+# - ...
+
+# Dependencies:
+# See requirements.txt
+# The dashboard is based on bulma framework
+#
+# Environment:
+#
+# Example usage:
+# python launcher.py
+# -t <test>
+# -s <nb simultaneous occurences>
+# -d <duration>
+# -r <reporting path>
+#
+# the summary html page will be generated where the script is launched
+"""
+Check ONAP certificates
+"""
+import argparse
+import logging
+import os
+import sys
+import random
+import string
+import time
+import docker # pylint: disable=import-error
+
+import onaptests_bench.reporting as Reporting
+
+HOMEPATH = os.environ.get("HOME", "/home/ubuntu")
+
+sys.path.append(f"{HOMEPATH}/onaptests_bench/src/onaptests_bench")
+
+# Logger
+LOG_LEVEL = 'INFO'
+logging.basicConfig()
+LOGGER = logging.getLogger("onaptests_bench")
+LOGGER.setLevel(LOG_LEVEL)
+TEST_LIST = ['basic_onboard', 'basic_vm', 'basic_vm_macro',
+ 'basic_network', 'basic_cnf']
+DEFAULT_TEST = TEST_LIST[0]
+DEFAULT_SIMU_TESTS = 5
+DEFAULT_TEST_DURATION = 180 # duration in minutes
+RESULT_PATH = "/tmp"
+ONAPTEST_BENCH_WAIT_TIMER = 40
+ONAPTESTS_PATH = "/usr/lib/python3.8/site-packages/onaptests"
+ONAPTESTS_SETTINGS = f"{ONAPTESTS_PATH}/configuration/settings.py"
+ONAPTESTS_SERVICE_DIR = f"{ONAPTESTS_PATH}/templates/vnf-services"
+
+CLUSTER_IP = "127.0.0.1"
+
+# Get arguments
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument(
+ '-t',
+ '--test',
+ choices=TEST_LIST,
+ help=('Select your test (basic_onboard, basic_vm, basic_network, basic_cnf).' +
+ 'If not set, basic_onboarding is considered'),
+ default=DEFAULT_TEST)
+PARSER.add_argument(
+ '-s',
+ '--simu',
+ type=int,
+ help='Number of simultaneous tests',
+ default=DEFAULT_SIMU_TESTS)
+PARSER.add_argument(
+ '-d',
+ '--duration',
+ type=int,
+ help='Test duration (in minutes)',
+ default=DEFAULT_TEST_DURATION)
+PARSER.add_argument(
+ '-r',
+ '--reporting',
+ help='Result directory',
+ default=RESULT_PATH)
+PARSER.add_argument(
+ '-i',
+ '--ip',
+ help='Cluster IP',
+ default=CLUSTER_IP)
+
+ARGS = PARSER.parse_args()
+
+def prepare_test_config():
+ """Check the test execution.
+ We supposed that basic_vm tests are already available in /tmp/xtesting
+ If not the tests cannot be executed."""
+ LOGGER.info("Prepare the test, verify that the test can be run")
+
+def get_container_name():
+ """Set Container name."""
+ result_str = ''.join(random.choice(string.ascii_letters) for i in range(8))
+ container_name = ARGS.test + "_" + result_str
+ return container_name
+
+def clean_test_device(docker_client, test):
+ """Clean test resources."""
+ container_list = docker_client.containers.list(
+ all=True,
+ filters={'label':'test='+test})
+ LOGGER.info("Containers cleanup before: %s containers", len(container_list))
+
+ for container in container_list:
+ container.stop()
+ container.remove()
+
+def retrieve_onap_ip():
+ """Retrieve ONAP IP from /etc/hosts"""
+ filepath = '/etc/hosts'
+ with open(filepath) as fp_config:
+ line = fp_config.readline()
+ while line:
+ line = fp_config.readline()
+ if "so.api.simpledemo.onap.org" in line:
+ onap_ip = line.split()[0]
+ return onap_ip
+ return None
+
+def execute_test(serie_number, test_number,
+ docker_client):
+ """Execute one test."""
+ LOGGER.info("Execute test n° %s", test_number + 1)
+
+ volume_reporting = (ARGS.reporting + '/serie' + str(serie_number) +
+ '/test' + str(test_number + 1))
+ if ARGS.ip == CLUSTER_IP:
+ onap_ip = retrieve_onap_ip()
+ else:
+ onap_ip = ARGS.ip
+
+ this_container = docker_client.containers.run(
+ "nexus3.onap.org:10003/onap/xtesting-smoke-usecases-pythonsdk:master",
+ command="run_tests -t " + ARGS.test,
+ name=get_container_name(),
+ labels={"test":ARGS.test},
+ stdout=True,
+ stderr=True,
+ stream=False,
+ detach=True,
+ extra_hosts={'portal-ui.simpledemo.onap.org':onap_ip,
+ 'vid-ui.simpledemo.onap.org':onap_ip,
+ 'sdc-fe-ui.simpledemo.onap.org':onap_ip,
+ 'sdc-be-api.simpledemo.onap.org':onap_ip,
+ 'aai-api.simpledemo.onap.org':onap_ip,
+ 'so-api.simpledemo.onap.org':onap_ip,
+ 'sdnc-api.simpledemo.onap.org':onap_ip,
+ 'sdc.workflow.plugin.simpledemo.onap.org':onap_ip,
+ 'sdc.dcae.plugin.simpledemo.onap.org':onap_ip,
+ 'multicloud-k8s-api.simpledemo.onap.org':onap_ip},
+ volumes={'/tmp/xtesting/smoke-usecases/' + ARGS.test + '/env':{'bind': '/var/lib/xtesting/conf/env_file', 'mode': 'rw'}, # pylint: disable=line-too-long
+ f'{HOMEPATH}/.config/openstack/clouds.yaml':{'bind': '/root/.config/openstack/clouds.yaml', 'mode': 'rw'}, # pylint: disable=line-too-long
+ volume_reporting:{'bind':'/var/lib/xtesting/results', 'mode': 'rw'},
+ f'{HOMEPATH}/.kube/config':{'bind':'/root/.kube/config', 'mode': 'rw'},
+ os.path.dirname(os.path.abspath(__file__)) + '/artifacts/settings.py':{'bind': ONAPTESTS_SETTINGS, 'mode': 'rw'}, # pylint: disable=line-too-long
+ f'/tmp/xtesting/smoke-usecases/{ARGS.test}/{ARGS.test}-service.yaml': {'bind': f'{ONAPTESTS_SERVICE_DIR}/{ARGS.test}-service.yaml', 'mode': 'rw'}}) # pylint: disable=line-too-long
+
+ return this_container
+
+def launch_test_serie(serie_number,
+ docker_client, serie_containers):
+ """Launch a serie of n tests."""
+ for test_number in range(ARGS.simu):
+ container = execute_test(serie_number, test_number,
+ docker_client)
+ serie_containers.append(container)
+ return serie_containers
+
+def get_terminated_serie_status(running_containers):
+ """Check if the dockers in the list are terminated and get exit codes"""
+ LOGGER.info("check terminated dockers")
+ exit_codes = []
+ exit_codes.clear()
+
+ for container in running_containers:
+ try:
+ # wait for the container to finish within a certain time
+ result = container.wait(timeout=60*ONAPTEST_BENCH_WAIT_TIMER)
+ exit_code = result["StatusCode"]
+ except Exception as timeout: # pylint: disable=broad-except
+ #if the container didn't finish in the allocated time
+ # raise timeout exception and sto the container
+ LOGGER.error(timeout)
+ LOGGER.error("docker not terminating in allocated time")
+ container.stop()
+ exit_code = -1
+ LOGGER.info("exit code : %s", str(exit_code))
+ exit_codes.append(exit_code)
+ return exit_codes
+
+def generate_report():
+ """Build reporting."""
+ LOGGER.info("Generate the report")
+ test = Reporting.OnaptestBenchReporting(
+ nb_simultaneous_tests=ARGS.simu,
+ duration=ARGS.duration,
+ res_dir_path=ARGS.reporting,
+ reporting_dir=ARGS.reporting)
+ test.generate_reporting()
+
+def main():
+ """Entry point"""
+ # ***************************************************************************
+ # ***************************************************************************
+ # start of the test
+ # ***************************************************************************
+ # ***************************************************************************
+ test_client = docker.from_env()
+ serie_containers = []
+ exit_codes = []
+
+ prepare_test_config()
+
+ t_end = time.time() + 60 * float(ARGS.duration)
+
+ # clean previous container no longer used to avoid saturation
+
+
+ LOGGER.info("****************************")
+ LOGGER.info("Launch the tests")
+ LOGGER.info("Testcase: %s", ARGS.test)
+ LOGGER.info("Number of simultaneous tests : %s", ARGS.simu)
+ LOGGER.info("Test duration : %s m", ARGS.duration)
+ LOGGER.info("Reporting path : %s", ARGS.reporting)
+ LOGGER.info("****************************")
+
+ try:
+ # keep on launching series until we reached the duration expected by the tester
+ serie_number = 1
+ while time.time() < t_end:
+ clean_test_device(test_client, ARGS.test)
+ LOGGER.info("Serie : %s", str(serie_number))
+ serie_containers.clear()
+ # launch the serie
+ serie_containers = launch_test_serie(
+ serie_number,
+ test_client,
+ serie_containers)
+ LOGGER.info("Containers of serie %s created", str(serie_number))
+ exit_codes = get_terminated_serie_status(serie_containers)
+ LOGGER.info("Serie terminated")
+ LOGGER.debug(exit_codes)
+ remaining_time = int(t_end - time.time())
+ if remaining_time > 0:
+ LOGGER.info("%s s remaining, restart a serie...", remaining_time)
+ serie_number += 1
+
+ except Exception as error: # pylint: disable=broad-except
+ LOGGER.error(error)
+ LOGGER.error(">>>> Onaptests_bench FAIL")
+ LOGGER.error("do you have the correct env file?")
+ LOGGER.error("do you have the correctcluster IP?")
+ sys.exit(1)
+
+ else:
+ LOGGER.info(">>>> Onaptests_bench successfully executed")
+
+ finally:
+ generate_report()
diff --git a/test/onaptests_bench/src/onaptests_bench/reporting.py b/test/onaptests_bench/src/onaptests_bench/reporting.py
new file mode 100644
index 000000000..f46465936
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/reporting.py
@@ -0,0 +1,351 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+"""
+Aggregate test results
+"""
+import logging
+import os
+import re
+
+from dataclasses import dataclass
+from datetime import datetime
+import matplotlib.pyplot as plt # pylint: disable=import-error
+
+from jinja2 import Environment, select_autoescape, PackageLoader # pylint: disable=import-error
+
+# Logger
+LOG_LEVEL = 'INFO'
+logging.basicConfig()
+LOGGER = logging.getLogger("onaptests_bench")
+LOGGER.setLevel(LOG_LEVEL)
+
+RESULT_DIR_PATH = "/tmp/mytest"
+RESULT_LOG_FILE = "xtesting.log"
+RESULT_LOG_REPORTING_FILE = "reporting.html"
+FIGURE_NAME = "mygraph.png"
+USE_CASE_NAME = "unknwown" # could be checked with result parsing
+TIMEOUT_RUN = 1200 # parameter to be provided by the launcher
+TEST_DURATION = 120 # parameter to be provided by the launcher
+NB_SIMULTANEOUS_TESTS = 10 # parameter to be provided by the launcher
+REPORTING_DIR = "/tmp/"
+
+@dataclass
+class TestResult:
+ """Test results retrieved from xtesting."""
+ case_name: str
+ status: str = "FAIL"
+ start_date: datetime = "2000-01-01 00:00:01,123"
+ duration: int = 0
+
+@dataclass
+class SerieResult:
+ """Serie of tests."""
+ serie_id: str
+ success_rate: int = 0
+ min: int = 0
+ max: int = 0
+ mean: float = 0.0
+ median: float = 0.0
+ nb_occurences: int = 0
+
+class OnaptestBenchReporting:
+ """Build html summary page."""
+
+ def __init__(self, nb_simultaneous_tests=NB_SIMULTANEOUS_TESTS,
+ duration=TEST_DURATION,
+ res_dir_path=RESULT_DIR_PATH,
+ reporting_dir=REPORTING_DIR) -> None:
+ """Initialization of the report."""
+ self._case_name = USE_CASE_NAME
+ self._nb_simultaneous_tests = nb_simultaneous_tests
+ self._test_duration = duration
+ self._result_dir_path = res_dir_path
+ self._reporting_dir = reporting_dir
+
+ def parse_xtesting_results(self, file_result):
+ """Retrieve data from a xtesting file."""
+ # we need to retrieve:
+ # (- the name)
+ # - the start date
+ # - the status
+ # - the duration
+ # note Data could be in DB but let's aggreage based on the log to avoid
+ # dependency to the DB
+ # 2021-01-22 07:01:58,467 - xtesting.ci.run_tests - INFO - Test result:
+ #
+ # +------------------------+---------------------+------------------+----------------+
+ # | TEST CASE | PROJECT | DURATION | RESULT |
+ # +------------------------+---------------------+------------------+----------------+
+ # | basic_onboard | integration | 19:53 | PASS |
+ # +------------------------+---------------------+------------------+----------------+
+ #
+ # 2021-01-22 07:01:58 - xtesting.ci.run_tests - INFO - Execution exit value: Result.EX_OK
+ start_date = ""
+ case_name = ""
+ duration = TIMEOUT_RUN
+ status = 0
+ with open(file_result) as xtesting_result:
+ for cnt, line in enumerate(xtesting_result):
+ LOGGER.debug(cnt)
+
+ if "Running test case" in line:
+ start_date = line.split()[0] + " " + line.split()[1]
+ self._case_name = (re.search('\'(.*)\'', line)).group(1)
+
+ # if test ends properly, overwrite start tile with end time
+ # for a better display
+ if "Execution exit value" in line:
+ start_date = line.split()[0] + " " + line.split()[1]
+
+ # Look for the result table
+ if "|" in line and self._case_name in line:
+ duration_str = line.split()[5]
+ duration = int(
+ duration_str.split(":")[0])*60 + int(
+ duration_str.split(":")[1])
+ if line.split()[7] == "PASS":
+ status = 100
+ else:
+ status = 0
+
+ testresult = TestResult(
+ case_name=case_name,
+ status=status,
+ start_date=datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S,%f'),
+ duration=duration)
+ return testresult
+
+ @staticmethod
+ def calculate_stats(durations):
+ """From a duration results, retrieve the min, max, mean & median value."""
+
+ min_val = min(durations)
+ max_val = max(durations)
+
+ # Mean
+ total = sum(durations)
+ length = len(durations)
+ for nums in [durations]:
+ LOGGER.debug(nums)
+ mean_val = total / length
+
+ # Median
+ lst = sorted(durations)
+ med_val = sorted(lst)
+ lst_len = len(lst)
+ index = (lst_len - 1) // 2
+ median_val = 0
+ if lst_len % 2:
+ median_val = med_val[index]
+ else:
+ median_val = (med_val[index] + med_val[index + 1])/2.0
+
+ return min_val, max_val, mean_val, median_val
+
+ @staticmethod
+ def calculate_success_rate(criterias):
+ """Calculate Serie success rate."""
+ # calculate success rate
+ score = 0
+ for criteria in criterias:
+ score += criteria
+ try:
+ rate = score/len(criterias)
+ except ZeroDivisionError:
+ rate = 0
+ return rate
+
+
+ def parse_serie_durations(self): # pylint: disable=too-many-locals
+ """Find result series."""
+ # from the res directory find all the subdirectory and build an array of results
+ series = []
+ serie_names = []
+ serie_durations = {}
+ serie_criteria = {}
+
+ for root, dirs, files in os.walk(self._result_dir_path):
+ try:
+ dirs.sort(key=lambda x: int(x.split("/")[-1][5:]))
+ except ValueError:
+ LOGGER.debug("sort only what is sortable")
+
+ LOGGER.debug("Root: %s, Dirs: %s, Files: %s", root, dirs, files)
+
+ for name in files:
+ if name == RESULT_LOG_FILE:
+ serie_name = root.split("/")[-2]
+ # if new serie detected, initialize it
+ if serie_name not in serie_names:
+ serie_names.append(serie_name)
+ serie_durations[serie_name] = []
+ serie_criteria[serie_name] = []
+ serie_raw_results = self.parse_xtesting_results(
+ root + "/" + RESULT_LOG_FILE)
+ serie_durations[serie_name].append(
+ serie_raw_results.duration)
+ serie_criteria[serie_name].append(
+ serie_raw_results.status)
+ for serie in serie_names:
+ LOGGER.info("Calculate stats and success rate of serie %s", serie)
+ LOGGER.debug(serie_durations[serie])
+ LOGGER.debug(serie_criteria[serie])
+ # calculate stats
+ min_val, max_val, mean_val, med_val = self.calculate_stats(
+ serie_durations[serie])
+ success_rate = self.calculate_success_rate(
+ serie_criteria[serie])
+ series.append(SerieResult(
+ serie_id=serie,
+ min=min_val,
+ max=max_val,
+ mean=mean_val,
+ median=med_val,
+ success_rate=success_rate,
+ nb_occurences=len(serie_durations[serie])))
+
+ return series
+
+ def create_duration_time_serie(self):
+ """Create Histogram and scattered figure."""
+ # duration,success = f(time)
+ x_array_pass = []
+ x_array_fail = []
+ y_array_pass = []
+ y_array_fail = []
+ for root, dirs, files in os.walk(self._result_dir_path):
+ LOGGER.debug("Root: %s, Dirs: %s, Files: %s", root, dirs, files)
+ for name in files:
+ if name == RESULT_LOG_FILE:
+ serie_raw_results = self.parse_xtesting_results(
+ root + "/" + RESULT_LOG_FILE)
+ LOGGER.debug("Date %s", serie_raw_results.start_date)
+ LOGGER.debug("Status %s", serie_raw_results.status)
+ LOGGER.debug("Duration %s", serie_raw_results.duration)
+ # x_array.append(serie_raw_results.start_date)
+ if serie_raw_results.status < 100:
+ y_array_fail.append(serie_raw_results.duration)
+ x_array_fail.append(serie_raw_results.start_date)
+ else:
+ y_array_pass.append(serie_raw_results.duration)
+ x_array_pass.append(serie_raw_results.start_date)
+ plt.scatter(x_array_pass, y_array_pass, color='blue', label='PASS')
+ plt.scatter(x_array_fail, y_array_fail, color='red', label='FAIL')
+ plt.xlabel("time")
+ plt.ylabel("Duration of the test (s)")
+ plt.legend()
+ plt.savefig(self._reporting_dir + FIGURE_NAME)
+ plt.close()
+
+ # Create Histogramme
+ plt.hist(y_array_pass)
+ plt.xlabel("Duration of the test")
+ plt.ylabel("Number of tests")
+ plt.savefig(self._reporting_dir + "histo_" + FIGURE_NAME)
+ plt.close()
+
+ def create_success_rate(self, series_bench):
+ """Draw success rate = f(serie ID)"""
+ # Create a vizualisation of success rate
+ # success_rate = f(time)
+ x_array_success_rate = []
+ y_array_success_rate = []
+
+ for serie in series_bench:
+ x_array_success_rate.append(serie.serie_id)
+ y_array_success_rate.append(int(serie.success_rate))
+ LOGGER.info(" Success rate vector: %s", y_array_success_rate)
+ plt.bar(range(len(y_array_success_rate)),
+ y_array_success_rate,
+ width=0.5,
+ color='blue')
+ # plt.plot(x_array_success_rate, y_array_success_rate, '-o', color='orange')
+ plt.xlabel("Series")
+ plt.ylabel("Success rate (%)")
+ plt.savefig(self._reporting_dir + "bar_" + FIGURE_NAME)
+ plt.close()
+
+ def create_cumulated_success_rate(self, series_bench):
+ """Draw success rate = f(nb executed tests)"""
+ # Create success_rate=f(nb test executed)
+ x_array_cumulated_success_rate = []
+ y_array_cumulated_success_rate = []
+ nb_test = 0
+ nb_success_test = 0
+ for serie in series_bench:
+ # calculate the number of tests
+ nb_test += self._nb_simultaneous_tests
+ # recalculate success rate
+ nb_success_test += int(serie.success_rate)*self._nb_simultaneous_tests
+ success_rate = nb_success_test / nb_test
+ x_array_cumulated_success_rate.append(nb_test)
+ y_array_cumulated_success_rate.append(success_rate)
+ plt.plot(
+ x_array_cumulated_success_rate,
+ y_array_cumulated_success_rate,
+ '-o', color='blue')
+ plt.xlabel("Nb of executed tests")
+ plt.ylabel("Success rate (%)")
+ plt.savefig(self._reporting_dir + "rate_" + FIGURE_NAME)
+ plt.close()
+
+
+ def generate_reporting(self):
+ """Generate Serie reporting."""
+ series_bench = self.parse_serie_durations()
+ LOGGER.info(series_bench)
+
+ # create html page
+ jinja_env = Environment(
+ autoescape=select_autoescape(['html']),
+ loader=PackageLoader('onaptests_bench'))
+
+ page_info = {}
+ page_info['usecase_name'] = self._case_name
+ page_info['nb_series'] = str(len(series_bench))
+ page_info['nb_simu_tests'] = str(self._nb_simultaneous_tests)
+ page_info['test_duration'] = self._test_duration
+ page_info['nb_tests'] = self._nb_simultaneous_tests * len(series_bench)
+ success_rate_vector = []
+ min_durations = []
+ max_durations = []
+ mean_durations = []
+
+ for serie in series_bench:
+ success_rate_vector.append(int(serie.success_rate))
+ min_durations.append(int(serie.min))
+ max_durations.append(int(serie.max))
+ mean_durations.append(int(serie.mean))
+
+ page_info['global_success_rate'] = int(self.calculate_success_rate(
+ success_rate_vector))
+ page_info['min_duration'] = min(min_durations)
+ page_info['max_duration'] = max(max_durations)
+ page_info['mean_duration'] = int(
+ self.calculate_success_rate(mean_durations))
+ jinja_env.get_template(
+ 'onaptests_bench.html.j2').stream(
+ info=page_info,
+ data=series_bench).dump(
+ '{}/onaptests_bench.html'.format(self._reporting_dir))
+
+ self.create_duration_time_serie()
+ self.create_success_rate(series_bench)
+ self.create_cumulated_success_rate(series_bench)
diff --git a/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2 b/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2
new file mode 100644
index 000000000..cbb4e4428
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2
@@ -0,0 +1,231 @@
+{% macro color(failing, total) %}
+{% if failing == 0 %}
+is-success
+{% else %}
+{% if (failing / total) <= 0.1 %}
+is-warning
+{% else %}
+is-danger
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+{% macro percentage(failing, total) %}
+{{ ((total - failing) / total) | round }}
+{% endmacro %}
+
+{% macro statistic(resource_name, failing, total) %}
+{% set success = total - failing %}
+<div class="level-item has-text-centered">
+ <div>
+ <p class="heading">{{ resource_name | capitalize }}</p>
+ <p class="title">{{ success }}/{{ total }}</p>
+ <progress class="progress {{ color(failing, total) }}" value="{{ success }}" max="{{ total }}">{{ percentage(failing, total) }}</progress>
+ </div>
+ </div>
+{% endmacro %}
+
+{% macro pods_table(pods) %}
+<div id="pods" class="table-container">
+ <table class="table is-fullwidth is-striped is-hoverable">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Ready</th>
+ <th>Status</th>
+ <th>Reason</th>
+ <th>Restarts</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for pod in pods %}
+ <tr>
+ <td><a href="./pod-{{ pod.name }}.html" title="{{ pod.name }}">{{ pod.k8s.metadata.name }}</a></td>
+ {% if pod.init_done %}
+ <td>{{ pod.running_containers }}/{{ (pod.containers | length) }}</td>
+ {% else %}
+ <td>Init:{{ pod.runned_init_containers }}/{{ (pod.init_containers | length) }}</td>
+ {% endif %}
+ <td>{{ pod.k8s.status.phase }}</td>
+ <td>{{ pod.k8s.status.reason }}</td>
+ {% if pod.init_done %}
+ <td>{{ pod.restart_count }}</td>
+ {% else %}
+ <td>{{ pod.init_restart_count }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endmacro %}
+
+{% macro key_value_description_list(title, dict) %}
+<dt><strong>{{ title | capitalize }}:</strong></dt>
+<dd>
+ {% if dict %}
+ {% for key, value in dict.items() %}
+ {% if loop.first %}
+ <dl>
+ {% endif %}
+ <dt>{{ key }}:</dt>
+ <dd>{{ value }}</dd>
+ {% if loop.last %}
+ </dl>
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+</dd>
+{% endmacro %}
+
+{% macro description(k8s) %}
+<div class="container">
+ <h1 class="title is-1">Description</h1>
+ <div class="content">
+ <dl>
+ {% if k8s.spec.type %}
+ <dt><strong>Type:</strong></dt>
+ <dd>{{ k8s.spec.type }}</dd>
+ {% if (k8s.spec.type | lower) == "clusterip" %}
+ <dt><strong>Headless:</strong></dt>
+ <dd>{% if (k8s.spec.cluster_ip | lower) == "none" %}Yes{% else %}No{% endif %}</dd>
+ {% endif %}
+ {% endif %}
+ {{ key_value_description_list('Labels', k8s.metadata.labels) | indent(width=6) }}
+ {{ key_value_description_list('Annotations', k8s.metadata.annotations) | indent(width=6) }}
+ {% if k8s.spec.selector %}
+ {% if k8s.spec.selector.match_labels %}
+ {{ key_value_description_list('Selector', k8s.spec.selector.match_labels) | indent(width=6) }}
+ {% else %}
+ {{ key_value_description_list('Selector', k8s.spec.selector) | indent(width=6) }}
+ {% endif %}
+ {% endif %}
+ {% if k8s.phase %}
+ <dt><strong>Status:</strong></dt>
+ <dd>{{ k8s.phase }}</dd>
+ {% endif %}
+ {% if k8s.metadata.owner_references %}
+ <dt><strong>Controlled By:</strong></dt>
+ <dd>{{ k8s.metadata.owner_references[0].kind }}/{{ k8s.metadata.owner_references[0].name }}</dd>
+ {% endif %}
+ </dl>
+ </div>
+</div>
+{% endmacro %}
+
+{% macro pods_container(pods, parent, has_title=True) %}
+<div class="container">
+ {% if has_title %}
+ <h1 class="title is-1">Pods</h1>
+ {% endif %}
+ {% if (pods | length) > 0 %}
+ {{ pods_table(pods) | indent(width=2) }}
+ {% else %}
+ <div class="notification is-warning">{{ parent }} has no pods!</div>
+ {% endif %}
+</div>
+{% endmacro %}
+
+{% macro two_level_breadcrumb(title, name) %}
+<section class="section">
+ <div class="container">
+ <nav class="breadcrumb" aria-label="breadcrumbs">
+ <ul>
+ <li><a href="./index.html">Summary</a></li>
+ <li class="is-active"><a href="#" aria-current="page">{{ title | capitalize }} {{ name }}</a></li>
+ </ul>
+ </nav>
+ </div>
+</section>
+{% endmacro %}
+
+{% macro pod_parent_summary(title, name, failed_pods, pods) %}
+{{ summary(title, name, [{'title': 'Pod', 'failing': failed_pods, 'total': (pods | length)}]) }}
+{% endmacro %}
+
+{% macro number_ok(number, none_value, total=None) %}
+{% if number %}
+{% if total and number < total %}
+<span class="tag is-warning">{{ number }}</span>
+{% else %}
+{{ number }}
+{% endif %}
+{% else %}
+<span class="tag is-warning">{{ none_value }}</span>
+{% endif %}
+{% endmacro %}
+
+{% macro summary(title, name, statistics) %}
+<section class="hero is-light">
+ <div class="hero-body">
+ <div class="container">
+ <h1 class="title is-1">
+ {{ title | capitalize }} {{ name }} Summary
+ </h1>
+ <nav class="level">
+ {% for stat in statistics %}
+ {% if stat.total > 0 %}
+ {{ statistic(stat.title, stat.failing, stat.total) | indent(width=8) }}
+ {% endif %}
+ {% endfor %}
+ </nav>
+ </div>
+ </div>
+</section>
+{% endmacro %}
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Tests results - {% block title %}{% endblock %}</title>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.0/css/bulma.min.css">
+ <script defer src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"></script>
+ {% block more_head %}{% endblock %}
+ </head>
+ <body>
+ <nav class="navbar" role="navigation" aria-label="main navigation">
+ <div class="navbar-brand">
+ <a class="navbar-item" href="https://www.onap.org">
+ <img src="https://www.onap.org/wp-content/uploads/sites/20/2017/02/logo_onap_2017.png" width="234" height="50">
+ </a>
+
+ <a role="button" class="navbar-burger burger" aria-label="menu" aria-expanded="false" data-target="navbarBasicExample">
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ </a>
+ </div>
+
+ <div id="navbarBasicExample" class="navbar-menu">
+ <div class="navbar-start">
+ <a class="navbar-item">
+ Summary
+ </a>
+ </div>
+ </div>
+ </nav>
+
+ {% block content %}{% endblock %}
+
+ <footer class="footer">
+ <div class="container">
+ <div class="columns">
+ <div class="column">
+ <p class="has-text-grey-light">
+ <a href="https://bulma.io/made-with-bulma/">
+ <img src="https://bulma.io/images/made-with-bulma.png" alt="Made with Bulma" width="128" height="24">
+ </a>
+ </div>
+ <div class="column">
+ <a class="has-text-grey" href="https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status" style="border-bottom: 1px solid currentColor;">
+ Improve this page on Gitlab
+ </a>
+ </p>
+ </div>
+ </div>
+ </div>
+ </footer>
+ </body>
+</html>
diff --git a/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2 b/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2
new file mode 100644
index 000000000..154bed285
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2
@@ -0,0 +1,79 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAPTEST Bench{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">ONAPTEST Bench</h1>
+<section class="section">
+ <div class="container">
+ <h3 class="subtitle">{{ info.usecase_name }}</h3>
+
+ <div class="block">
+ <div class="box">
+ Number of tests: {{ info.nb_tests }} <br>
+ Global success rate: {{ info.global_success_rate }} % <br>
+ Number of simultaneous tests: {{ info.nb_simu_tests }} <br>
+ Test duration: {{ info.test_duration }} m <br>
+ Number of executed series: {{ info.nb_series }} <br>
+ Min duration: {{ info.min_duration}} <br>
+ Max duration: {{ info.max_duration}} <br>
+ Mean duration: {{ info.mean_duration}} <br>
+ </div>
+</div>
+
+<div class="columns">
+ <div class="column">
+ <figure class="image">
+ <img src="./rate_mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./bar_mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./histo_mygraph.png">
+ </figure>
+ </div>
+</div>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th><center>Serie</center></th>
+ <th><center>Success Rate</center></th>
+ <th><center>Min</center></th>
+ <th><center>Max</center></th>
+ <th><center>Mean</center></th>
+ <th><center>Median</center></th>
+ </tr>
+ </thead>
+
+ <tbody>
+
+ {% for serie in data %}
+ <tr {% if serie.success_rate >= 80 %} class="has-background-success-light" {%elif serie.success_rate > 0 %} class="has-background-warning-light" {% else %} class="has-background-danger-light" {% endif %}>
+ <td><center>{{ serie.serie_id }}</center></td>
+ <td><center>{{ serie.success_rate }}%</center></td>
+ <td><center>{{ serie.min }}</center></td>
+ <td><center>{{ serie.max }}</center></td>
+ <td><center>{{ serie.mean }}</center></td>
+ <td><center>{{ serie.median }}</center></td>
+ <tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+</div>
+
+</section>
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/onaptests_bench/test-requirements.txt b/test/onaptests_bench/test-requirements.txt
new file mode 100644
index 000000000..a0679b703
--- /dev/null
+++ b/test/onaptests_bench/test-requirements.txt
@@ -0,0 +1,6 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+coverage!=4.4,>=4.0 # Apache-2.0
+flake8 # MIT
+pylint # GPLv2
diff --git a/test/onaptests_bench/tox.ini b/test/onaptests_bench/tox.ini
new file mode 100644
index 000000000..9745d4fe6
--- /dev/null
+++ b/test/onaptests_bench/tox.ini
@@ -0,0 +1,15 @@
+[tox]
+envlist = py3, pylint
+
+[testenv]
+deps =
+ -r{toxinidir}/requirements.txt
+
+[testenv:py3]
+commands = python {toxinidir}/setup.py develop
+
+[testenv:pylint]
+deps =
+ -r{toxinidir}/test-requirements.txt
+
+commands = pylint src
diff --git a/test/postman/01_Onboard_Vendor.postman_collection.json b/test/postman/01_Onboard_Vendor.postman_collection.json
deleted file mode 100644
index 17d23d320..000000000
--- a/test/postman/01_Onboard_Vendor.postman_collection.json
+++ /dev/null
@@ -1,363 +0,0 @@
-{
- "info": {
- "_postman_id": "338c7225-c4d5-40c8-8619-ae904f41a83d",
- "name": "01_Onboard_Vendor",
- "description": "onboard new vendor",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "Get Vendor before create",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2249b072-2689-45f3-87ea-a75da5802752",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "var vendor_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vendor_name\")) {",
- " vendor_found = true;",
- " pm.environment.set(\"auto_vendor_id\", \"\"+jsonData.results[i].id+\"\");",
- " }",
- "}",
- "if (vendor_found === false) {",
- " tests[pm.environment.get(\"vendor_name\")+\" not yet declared\"] = true;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"vendor_name\")+\" already exists, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Create Vendor",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "80c1d190-d480-4150-ae91-8ce4111c0f9d",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_vendor_id\", \"\"+jsonData.itemId+\"\");",
- "pm.environment.set(\"auto_vendor_version_id\", \"\"+jsonData.version.id+\"\");",
- "",
- "tests[\"Vendor status is : \"+jsonData.version.status] = jsonData.version.status === \"Draft\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"iconRef\": \"icon\",\r\n \"vendorName\": \"{{vendor_name}}\",\r\n \"description\": \"Vendor\"\r\n}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Vendor after create",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "aa6fd7c5-c941-43ce-8300-a492c2d62a6c",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "var vendor_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vendor_name\")) {",
- " vendor_found = true;",
- " pm.environment.set(\"auto_vendor_id\", \"\"+jsonData.results[i].id+\"\");",
- " }",
- "}",
- "tests[pm.environment.get(\"vendor_name\")+\" found\"] = vendor_found === vendor_found;",
- "",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Submit Vendor",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "db905800-2754-491a-9d78-1ffebc12fb18",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"action\":\"Submit\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models/{{auto_vendor_id}}/versions/{{auto_vendor_version_id}}/actions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models",
- "{{auto_vendor_id}}",
- "versions",
- "{{auto_vendor_version_id}}",
- "actions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Vendor additional info after Submit",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "f9003c80-add1-4581-9a9d-661972f7d6d5",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Vendor is : \"+jsonData.status] = jsonData.status === \"Certified\";",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/items/{{auto_vendor_id}}/versions/{{auto_vendor_version_id}}",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "items",
- "{{auto_vendor_id}}",
- "versions",
- "{{auto_vendor_version_id}}"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/02_Onboard_VSP_part1.postman_collection.json b/test/postman/02_Onboard_VSP_part1.postman_collection.json
deleted file mode 100644
index 0e461930e..000000000
--- a/test/postman/02_Onboard_VSP_part1.postman_collection.json
+++ /dev/null
@@ -1,424 +0,0 @@
-{
- "info": {
- "_postman_id": "7f61efa5-8b7f-4593-9d57-26da61bb8604",
- "name": "02_Onboard_VSP_part1",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "check VSP exists",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "dc866867-2909-4844-9072-01c9e2e4e856",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vsp_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vsp_name\")) {",
- " vsp_found = true;",
- " }",
- "}",
- "",
- "if (vsp_found === false) {",
- " tests[pm.environment.get(\"vsp_name\")+\" does not yet exist, we continue\"] = true;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"vsp_name\")+\" already exists, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Vendor infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "dbd0ea7f-5f66-4431-8a51-2e5d757647a0",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "",
- "var jsonData = pm.response.json();",
- "var vendor_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vendor_name\")) {",
- " vendor_found = true;",
- " pm.environment.set(\"auto_vendor_id\", \"\"+jsonData.results[i].id+\"\");",
- " }",
- "}",
- "if (vendor_found === false) {",
- " tests[pm.environment.get(\"vendor_name\")+\" does not exists : we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"vendor_name\")+\" exists\"] = true;",
- "}",
- "",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Create VSP",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "739f040c-f4b1-4790-bfce-4f5dae60b637",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_vsp_id\", \"\"+jsonData.itemId+\"\");",
- "pm.environment.set(\"auto_vsp_version_id\", \"\"+jsonData.version.id+\"\");",
- "pm.environment.set(\"auto_vsp_version_name\", \"\"+jsonData.version.name+\"\");",
- "",
- "tests[\"VSP version is : \"+jsonData.version.status] = jsonData.version.status === \"Draft\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"name\": \"{{vsp_name}}\",\r\n \"description\": \"vendor software product\",\r\n \"icon\": \"icon\",\r\n \"category\": \"resourceNewCategory.generic\",\r\n \"subCategory\": \"resourceNewCategory.generic.abstract\",\r\n \"vendorName\": \"{{vendor_name}}\",\r\n \"vendorId\": \"{{auto_vendor_id}}\",\r\n \"licensingData\": {},\r\n \"onboardingMethod\": \"NetworkPackage\"\r\n}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products"
- ]
- }
- },
- "response": [
- {
- "name": "Create Vendor Software Product",
- "originalRequest": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": ""
- }
- },
- "status": "OK",
- "code": 200,
- "_postman_previewlanguage": "json",
- "header": [
- {
- "key": "access-control-allow-credentials",
- "value": "true",
- "name": "access-control-allow-credentials",
- "description": "Indicates whether or not the response to the request can be exposed when the credentials flag is true. When used as part of a response to a preflight request, this indicates whether or not the actual request can be made using credentials."
- },
- {
- "key": "access-control-allow-origin",
- "value": "chrome-extension://fhbjgbiflinjbdggehcddcbncdddomop",
- "name": "access-control-allow-origin",
- "description": "Specifies a URI that may access the resource. For requests without credentials, the server may specify '*' as a wildcard, thereby allowing any origin to access the resource."
- },
- {
- "key": "access-control-expose-headers",
- "value": "",
- "name": "access-control-expose-headers",
- "description": "Lets a server whitelist headers that browsers are allowed to access."
- },
- {
- "key": "content-type",
- "value": "application/json",
- "name": "content-type",
- "description": "The mime type of this content"
- },
- {
- "key": "date",
- "value": "Wed, 06 Jun 2018 15:02:46 GMT",
- "name": "date",
- "description": "The date and time that the message was sent"
- },
- {
- "key": "server",
- "value": "Jetty(9.3.21.v20170918)",
- "name": "server",
- "description": "A name for the server"
- },
- {
- "key": "transfer-encoding",
- "value": "chunked",
- "name": "transfer-encoding",
- "description": "The form of encoding used to safely transfer the entity to the user. Currently defined methods are: chunked, compress, deflate, gzip, identity."
- }
- ],
- "cookie": [],
- "body": "{\"vspId\":\"b3267b0aefbd4e2ea52be0e414139b1c\"}"
- }
- ]
- },
- {
- "name": "Get VSP versions",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "5e55115d-c797-4b34-bfaf-d2af24d62c01",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/items/{{auto_vsp_id}}/versions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "items",
- "{{auto_vsp_id}}",
- "versions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get VSP status",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2492d06b-4b31-4d93-aaf2-3417419c6e14",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/items/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "items",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/03_Onboard_VSP_part2.postman_collection.json b/test/postman/03_Onboard_VSP_part2.postman_collection.json
deleted file mode 100644
index d4b3a2802..000000000
--- a/test/postman/03_Onboard_VSP_part2.postman_collection.json
+++ /dev/null
@@ -1,87 +0,0 @@
-{
- "info": {
- "_postman_id": "4779261e-f678-4383-b3a9-dfe63f080371",
- "name": "03_Onboard_VSP_part2",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "Upload zip file containing Heat files",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "033f8cad-de66-4de9-8858-f3c72629c838",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "if (jsonData.status === \"Success\") {",
- " tests[\"Upload Success\"] = true; ",
- "}",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "multipart/form-data"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "upload",
- "type": "file",
- "src": ""
- }
- ]
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}/orchestration-template-candidate",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}",
- "orchestration-template-candidate"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/04_Onboard_VSP_part3.postman_collection.json b/test/postman/04_Onboard_VSP_part3.postman_collection.json
deleted file mode 100644
index 3f6514869..000000000
--- a/test/postman/04_Onboard_VSP_part3.postman_collection.json
+++ /dev/null
@@ -1,398 +0,0 @@
-{
- "info": {
- "_postman_id": "458aefd8-1c13-4d22-859b-38757f935cca",
- "name": "04_Onboard_VSP_part3",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "Process VSP zip file",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "1dceb509-a7e0-426f-b6fc-c50e9e9376e3",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}/orchestration-template-candidate/process",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}",
- "orchestration-template-candidate",
- "process"
- ]
- }
- },
- "response": [
- {
- "name": "Checkin VSP",
- "originalRequest": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": ""
- }
- },
- "status": "OK",
- "code": 200,
- "_postman_previewlanguage": "json",
- "header": [
- {
- "key": "access-control-allow-credentials",
- "value": "true",
- "name": "access-control-allow-credentials",
- "description": "Indicates whether or not the response to the request can be exposed when the credentials flag is true. When used as part of a response to a preflight request, this indicates whether or not the actual request can be made using credentials."
- },
- {
- "key": "access-control-allow-origin",
- "value": "chrome-extension://fhbjgbiflinjbdggehcddcbncdddomop",
- "name": "access-control-allow-origin",
- "description": "Specifies a URI that may access the resource. For requests without credentials, the server may specify '*' as a wildcard, thereby allowing any origin to access the resource."
- },
- {
- "key": "access-control-expose-headers",
- "value": "",
- "name": "access-control-expose-headers",
- "description": "Lets a server whitelist headers that browsers are allowed to access."
- },
- {
- "key": "content-type",
- "value": "application/json",
- "name": "content-type",
- "description": "The mime type of this content"
- },
- {
- "key": "date",
- "value": "Wed, 06 Jun 2018 16:14:24 GMT",
- "name": "date",
- "description": "The date and time that the message was sent"
- },
- {
- "key": "server",
- "value": "Jetty(9.3.21.v20170918)",
- "name": "server",
- "description": "A name for the server"
- },
- {
- "key": "transfer-encoding",
- "value": "chunked",
- "name": "transfer-encoding",
- "description": "The form of encoding used to safely transfer the entity to the user. Currently defined methods are: chunked, compress, deflate, gzip, identity."
- }
- ],
- "cookie": [],
- "body": "{}"
- }
- ]
- },
- {
- "name": "Commit VSP",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "0f0417e6-2b03-44a4-9416-9f709077a525",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"action\":\"Commit\",\"commitRequest\":{\"message\":\"ok\"}}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/items/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}/actions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "items",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}",
- "actions"
- ]
- }
- },
- "response": [
- {
- "name": "Checkin VSP",
- "originalRequest": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": ""
- }
- },
- "status": "OK",
- "code": 200,
- "_postman_previewlanguage": "json",
- "header": [
- {
- "key": "access-control-allow-credentials",
- "value": "true",
- "name": "access-control-allow-credentials",
- "description": "Indicates whether or not the response to the request can be exposed when the credentials flag is true. When used as part of a response to a preflight request, this indicates whether or not the actual request can be made using credentials."
- },
- {
- "key": "access-control-allow-origin",
- "value": "chrome-extension://fhbjgbiflinjbdggehcddcbncdddomop",
- "name": "access-control-allow-origin",
- "description": "Specifies a URI that may access the resource. For requests without credentials, the server may specify '*' as a wildcard, thereby allowing any origin to access the resource."
- },
- {
- "key": "access-control-expose-headers",
- "value": "",
- "name": "access-control-expose-headers",
- "description": "Lets a server whitelist headers that browsers are allowed to access."
- },
- {
- "key": "content-type",
- "value": "application/json",
- "name": "content-type",
- "description": "The mime type of this content"
- },
- {
- "key": "date",
- "value": "Wed, 06 Jun 2018 16:14:24 GMT",
- "name": "date",
- "description": "The date and time that the message was sent"
- },
- {
- "key": "server",
- "value": "Jetty(9.3.21.v20170918)",
- "name": "server",
- "description": "A name for the server"
- },
- {
- "key": "transfer-encoding",
- "value": "chunked",
- "name": "transfer-encoding",
- "description": "The form of encoding used to safely transfer the entity to the user. Currently defined methods are: chunked, compress, deflate, gzip, identity."
- }
- ],
- "cookie": [],
- "body": "{}"
- }
- ]
- },
- {
- "name": "Submit VSP",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "78d1f69f-e8aa-47aa-a456-45496847ace4",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"action\": \"Submit\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}/actions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}",
- "actions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "CSAR VSP",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "5900576b-42d4-4b68-ae67-770e76e6ba97",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"action\": \"Create_Package\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products/{{auto_vsp_id}}/versions/{{auto_vsp_version_id}}/actions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products",
- "{{auto_vsp_id}}",
- "versions",
- "{{auto_vsp_version_id}}",
- "actions"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/05_Onboard_VF.postman_collection.json b/test/postman/05_Onboard_VF.postman_collection.json
deleted file mode 100644
index 50ae5f89f..000000000
--- a/test/postman/05_Onboard_VF.postman_collection.json
+++ /dev/null
@@ -1,632 +0,0 @@
-{
- "info": {
- "_postman_id": "8f0c4ff0-c762-4289-8547-daa0def307c1",
- "name": "05_Onboard_VF",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "check vf exists and get id",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "a7d64193-edcb-4540-bcac-dde0f33a637f",
- "exec": [
- "",
- "var vf_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"vf_name\")+\" does not exists\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = responseCode.code === 200;",
- " }",
- " var jsonData = pm.response.json();",
- " for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"vf_name\")) {",
- " vf_found = true;",
- " pm.environment.set(\"auto_vf_uuid\", \"\"+jsonData[i].uuid+\"\");",
- " pm.environment.set(\"auto_vf_invariant_uuid\", \"\"+jsonData[i].invariantUUID+\"\");",
- " }",
- " }",
- " if (vf_found === false) {",
- " tests[pm.environment.get(\"vf_name\")+\" does not yet exists, we continue the run\"] = true;",
- " }",
- " ",
- " else {",
- " tests[pm.environment.get(\"vf_name\")+\" already exists, stop the run\"] = false;",
- " postman.setNextRequest(null);",
- " }",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/resources?resourceType=VF",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "resources"
- ],
- "query": [
- {
- "key": "resourceType",
- "value": "VF"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "check VSP exists and get infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ff2f00ca-80fc-40d6-b6ec-aac08eb91759",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vsp_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vsp_name\")) {",
- " vsp_found = true;",
- " pm.environment.set(\"auto_vsp_id\", \"\"+jsonData.results[i].id+\"\");",
- "",
- " }",
- "}",
- "if (vsp_found === false) {",
- " tests[\"VSP \"+pm.environment.get(\"vsp_name\")+\" does not exists, we stop the run\"] = false;",
- " postman.setNextRequest(null); ",
- "}",
- "",
- "else {",
- " tests[\"VSP \"+pm.environment.get(\"vsp_name\")+\" exists, we can continue\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-software-products",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-software-products"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get VSP versions",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "f16c158f-aceb-490c-924d-3ea83c2b9431",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vsp_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i].name === pm.environment.get(\"vsp_name\")) {",
- " vsp_found = true;",
- " pm.environment.set(\"auto_vsp_version_id\", \"\"+jsonData.results[i].id+\"\");",
- " pm.environment.set(\"auto_vsp_version_name\", \"\"+jsonData.results[i].name+\"\");",
- " }",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/items/{{auto_vsp_id}}/versions",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "items",
- "{{auto_vsp_id}}",
- "versions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Vendor infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ed512660-e7af-450f-aa7e-b503c7e21a72",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vendor_found = false;",
- "for (var i = 0; i < jsonData.results.length; i++) { ",
- " if (jsonData.results[i][\"name\"] === pm.environment.get(\"vendor_name\")) {",
- " vendor_found = true;",
- " pm.environment.set(\"auto_vendor_id\", \"\"+jsonData.results[i][\"id\"]+\"\");",
- " }",
- "}",
- "if (vendor_found === false) {",
- " tests[pm.environment.get(\"vendor_name\")+\" does not exists : we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"vendor_name\")+\" exists\"] = true;",
- "}",
- "",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "robot-ete-ba84612d-c1c6-4c53-9967-7b1dff276c7a"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "robot-ete"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/onboarding-api/v1.0/vendor-license-models",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "onboarding-api",
- "v1.0",
- "vendor-license-models"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Create VF resource",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "78a4913a-db8c-4c41-8fae-28e5bf6d90b4",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_vf_invariant_uuid\", \"\"+jsonData.invariantUUID+\"\");",
- "pm.environment.set(\"auto_vf_uuid\", \"\"+jsonData.uuid+\"\");",
- "pm.environment.set(\"auto_vf_unique_id\", \"\"+jsonData.uniqueId+\"\");",
- "",
- "tests[\"VF status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"NOT_CERTIFIED_CHECKOUT\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"artifacts\": {},\r\n\t\"toscaArtifacts\": {},\r\n\t\"contactId\": \"cs0008\",\r\n\t\"categories\": [{\r\n\t\t\"name\": \"Generic\",\r\n\t\t\"normalizedName\": \"generic\",\r\n\t\t\"uniqueId\": \"resourceNewCategory.generic\",\r\n\t\t\"icons\": null,\r\n\t\t\"subcategories\": [{\r\n\t\t\t\"name\": \"Abstract\",\r\n\t\t\t\"normalizedName\": \"abstract\",\r\n\t\t\t\"uniqueId\": \"resourceNewCategory.generic.abstract\",\r\n\t\t\t\"icons\": [\"objectStorage\", \"compute\"],\r\n\t\t\t\"groupings\": null,\r\n\t\t\t\"ownerId\": null,\r\n\t\t\t\"empty\": false\r\n\t\t}],\r\n\t\t\"ownerId\": null,\r\n\t\t\"empty\": false\r\n\t}],\r\n\t\"description\": \"VF named {{vf_name}}\",\r\n\t\"icon\": \"defaulticon\",\r\n\t\"componentInstancesProperties\": {},\r\n\t\"componentInstancesAttributes\": {},\r\n\t\"name\": \"{{vf_name}}\",\r\n\t\"tags\": [\"{{vf_name}}\"],\r\n\t\"capabilities\": {},\r\n\t\"requirements\": {},\r\n\t\"deploymentArtifacts\": {},\r\n\t\"componentType\": \"RESOURCE\",\r\n\t\"vendorName\": \"{{vendor_name}}\",\r\n\t\"vendorRelease\": \"1.0\",\r\n\t\"componentInstances\": [],\r\n\t\"properties\": [],\r\n\t\"attributes\": [],\r\n\t\"groups\": [],\r\n\t\"resourceType\": \"VF\",\r\n\t\"csarUUID\": \"{{auto_vsp_id}}\",\r\n\t\"csarVersion\": \"{{auto_vsp_version_name}}\"\r\n}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/resources",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "resources"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Checkin VF resource",
- "event": [
- {
- "listen": "test",
- "script": {
- "type": "text/javascript",
- "exec": [
- "var jsonData = pm.response.json();",
- "",
- "tests[\"VF status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"NOT_CERTIFIED_CHECKIN\";"
- ]
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "X-ECOMP-InstanceID",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\": \"ONAP-Test checkin\"}"
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/resources/{{auto_vf_uuid}}/lifecycleState/checkin",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "resources",
- "{{auto_vf_uuid}}",
- "lifecycleState",
- "checkin"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Certify VF resource",
- "event": [
- {
- "listen": "test",
- "script": {
- "type": "text/javascript",
- "exec": [
- "var jsonData = pm.response.json();",
- "",
- "tests[\"VF status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"CERTIFIED\";"
- ]
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "X-ECOMP-InstanceID",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\": \"certify\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/resources/{{auto_vf_unique_id}}/lifecycleState/certify",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "resources",
- "{{auto_vf_unique_id}}",
- "lifecycleState",
- "certify"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get VF infos by uniqueId to get new vf_unique_Id (=new version id)",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "8d831aa8-7760-44b4-aab4-da4724e8dfc9",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_vf_new_unique_id\", \"\"+jsonData[\"metadata\"][\"allVersions\"][\"1.0\"]+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/resources/{{auto_vf_unique_id}}/filteredDataByParams?include=metadata",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "resources",
- "{{auto_vf_unique_id}}",
- "filteredDataByParams"
- ],
- "query": [
- {
- "key": "include",
- "value": "metadata"
- }
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/06_Onboard_Service.postman_collection.json b/test/postman/06_Onboard_Service.postman_collection.json
deleted file mode 100644
index eb4724c97..000000000
--- a/test/postman/06_Onboard_Service.postman_collection.json
+++ /dev/null
@@ -1,1410 +0,0 @@
-{
- "info": {
- "_postman_id": "dcfc4671-1cb4-453a-8fef-97a0ba533daa",
- "name": "06_Onboard_Service",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "check service exists and get infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4f27b99e-23ac-4e7c-a1dc-c42ad63d6f65",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " pm.environment.set(\"auto_service_uuid\", \"\"+jsonData[i].uuid+\"\");",
- " pm.environment.set(\"auto_service_invariant_uuid\", \"\"+jsonData[i].invariantUUID+\"\");",
- " }",
- "}",
- "if (service_found === false) {",
- " tests[pm.environment.get(\"service\")+\" does not exist yet, we continue the run\"] = true;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" already exists, stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/services",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare an aLaCarte Service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e6118e78-e476-4049-8e81-f81b53ba625a",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_service_invariant_uuid\", \"\"+jsonData[\"invariantUUID\"]+\"\");",
- "pm.environment.set(\"auto_service_uuid\", \"\"+jsonData[\"uuid\"]+\"\");",
- "pm.environment.set(\"auto_service_unique_id\", \"\"+jsonData[\"uniqueId\"]+\"\");",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"NOT_CERTIFIED_CHECKOUT\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"artifacts\": {},\r\n\t\"toscaArtifacts\": {},\r\n\t\"contactId\": \"cs0008\",\r\n\t\"categories\": [{\r\n\t\t\"name\": \"Network Service\",\r\n\t\t\"normalizedName\": \"network service\",\r\n\t\t\"uniqueId\": \"serviceNewCategory.network service\",\r\n\t\t\"icons\": [\"network_l_1-3\"],\r\n\t\t\"subcategories\": null,\r\n\t\t\"version\": null,\r\n\t\t\"ownerId\": null,\r\n\t\t\"empty\": false,\r\n\t\t\"type\": null\r\n\t}],\r\n\t\"description\": \"Service {{service}}\",\r\n\t\"icon\": \"defaulticon\",\r\n\t\"componentInstancesProperties\": {},\r\n\t\"componentInstancesAttributes\": {},\r\n\t\"name\": \"{{service}}\",\r\n\t\"tags\": [\"{{service}}\"],\r\n\t\"capabilities\": {},\r\n\t\"requirements\": {},\r\n\t\"deploymentArtifacts\": {},\r\n\t\"componentType\": \"SERVICE\",\r\n\t\"projectCode\": \"123456\",\r\n\t\"componentInstances\": [],\r\n\t\"properties\": [],\r\n\t\"attributes\": [],\r\n\t\"forwardingPaths\": {},\r\n\t\"ecompGeneratedNaming\": true,\r\n\t\"serviceApiArtifacts\": {},\r\n\t\"instantiationType\": \"A-la-carte\",\r\n\t\"environmentContext\": \"General_Revenue-Bearing\"\r\n}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check VF exists and get uuid",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "87b90661-aefb-4ec9-94fc-fb5277f8c283",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vf_found = false;",
- "if (jsonData.name === pm.environment.get(\"vf_name\")) {",
- " vf_found = true;",
- " pm.environment.set(\"auto_vf_uuid\", \"\"+jsonData.uuid+\"\");",
- " pm.environment.set(\"auto_vf_invariant_uuid\", \"\"+jsonData.invariantUUID+\"\");",
- " ",
- "}",
- "",
- "if (vf_found === false) {",
- " tests[\"VF \"+pm.environment.get(\"vf_name\")+\" does not exists, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "",
- "}",
- "",
- "else {",
- " tests[\"VF \"+pm.environment.get(\"vf_name\")+\" exists, we continue the run\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/resources/{{auto_vf_uuid}}/metadata",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "resources",
- "{{auto_vf_uuid}}",
- "metadata"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get VF uniqueId",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "16c73747-c973-40d9-ba3f-6061199386ec",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var vf_found = false;",
- "for (var i = 0; i < jsonData.resources.length; i++) { ",
- " if (jsonData.resources[i].name === pm.environment.get(\"vf_name\")) {",
- " vf_found = true;",
- " pm.environment.set(\"auto_vf_unique_id\", \"\"+jsonData.resources[i].uniqueId+\"\");",
- " }",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/screen?excludeTypes=VFCMT&excludeTypes=Configuration",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "screen"
- ],
- "query": [
- {
- "key": "excludeTypes",
- "value": "VFCMT"
- },
- {
- "key": "excludeTypes",
- "value": "Configuration"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add VF to Service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "897b4588-b2e6-4af6-b469-09ae6ea3b797",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_vf_customization_uuid\", \"\"+jsonData.customizationUUID+\"\");",
- "pm.environment.set(\"auto_vf_name_for_model\", \"\"+jsonData.name+\"\");",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"name\": \"{{vf_name}}\",\r\n\t\"componentVersion\": \"1.0\",\r\n\t\"posY\": 100,\r\n\t\"posX\": 200,\r\n\t\"uniqueId\": \"{{auto_vf_unique_id}}\",\r\n\t\"originType\": \"VF\",\r\n\t\"componentUid\": \"{{auto_vf_unique_id}}\",\r\n\t\"icon\": \"defaulticon\"\r\n}\r\n"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/resourceInstance",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "resourceInstance"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Generic Neutron Virtual Link infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "8bc90be4-f039-44cb-af75-7f3addd63bdd",
- "exec": [
- "var vl_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"virtual_link_type\")+\" does not exists\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- " var jsonData = pm.response.json();",
- " for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"virtual_link_type\")) {",
- " vl_found = true;",
- " pm.environment.set(\"auto_virtual_link_uuid\", \"\"+jsonData[i].uuid+\"\");",
- " pm.environment.set(\"auto_virtual_link_invariant_uuid\", \"\"+jsonData[i].invariantUUID+\"\");",
- " }",
- " }",
- " if (vl_found === false) {",
- " tests[pm.environment.get(\"virtual_link_type\")+\" does not exists, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- " }",
- " ",
- " else {",
- " tests[pm.environment.get(\"virtual_link_type\")+\" exists, we continue the run\"] = true;",
- " }",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/resources?resourceType=VL",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "resources"
- ],
- "query": [
- {
- "key": "resourceType",
- "value": "VL"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Generic Neutron Virtual Link uniqueId",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "d906fb7e-04f9-445e-be5a-9644520171e1",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vl_found = false;",
- "for (var i = 0; i < jsonData.resources.length; i++) { ",
- " if (jsonData.resources[i].name === pm.environment.get(\"virtual_link_type\")) {",
- " vl_found = true;",
- " pm.environment.set(\"auto_virtual_link_unique_id\", \"\"+jsonData.resources[i].uniqueId+\"\");",
- " tests[\"Virtual Link : \"+pm.environment.get(\"virtual_link_type\")+\" found in SDC catalog with uniqueId = \"+pm.environment.get(\"auto_virtual_link_unique_id\")] = true;",
- " }",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/screen?excludeTypes=VFCMT&excludeTypes=Configuration",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "screen"
- ],
- "query": [
- {
- "key": "excludeTypes",
- "value": "VFCMT"
- },
- {
- "key": "excludeTypes",
- "value": "Configuration"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add a Generic Neutron VirtualLink to Service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "0f07c2d5-49a1-4bf8-a3b5-6502674821af",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_virtual_link_customization_uuid\", \"\"+jsonData.customizationUUID+\"\");",
- "pm.environment.set(\"auto_virtual_link_name_for_model\", \"\"+jsonData.name+\"\");",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"name\": \"{{virtual_link_type}}\",\r\n\t\"componentVersion\": \"1.0\",\r\n\t\"posY\": 200,\r\n\t\"posX\": 300,\r\n\t\"uniqueId\": \"{{auto_virtual_link_unique_id}}\",\r\n\t\"originType\": \"VL\",\r\n\t\"componentUid\": \"{{auto_virtual_link_unique_id}}\",\r\n\t\"icon\": \"defaulticon\"\r\n}\r\n"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/resourceInstance",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "resourceInstance"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Submit for testing",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "f3215da0-5a6c-474c-9cc2-6733b36f1347",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"READY_FOR_CERTIFICATION\";",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"please test\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/certificationRequest",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "certificationRequest"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Start Certification",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e9143951-e32e-4d93-b970-20b54ea64fd4",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"CERTIFICATION_IN_PROGRESS\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "jm0007"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/startCertification",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "startCertification"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Certify",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2ca11efa-040f-4927-b30f-e58912f0db83",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"CERTIFIED\";",
- "",
- "pm.environment.set(\"auto_service_new_unique_id\", \"\"+jsonData.uniqueId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "jm0007"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"ok\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/certify",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "certify"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Service infos by uniqueID to get new_service_unique_id",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "cecdbf68-2176-4dc1-8321-6285d57a2e8f",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_service_new_unique_id\", \"\"+jsonData[\"metadata\"][\"allVersions\"][\"1.0\"]+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/filteredDataByParams?include=metadata",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "filteredDataByParams"
- ],
- "query": [
- {
- "key": "include",
- "value": "metadata"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Approve distribution",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ef5105ec-e9ea-454a-954a-3398364ecb89",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Distribution status is : \"+jsonData.distributionStatus] = jsonData.distributionStatus === \"DISTRIBUTION_APPROVED\";",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "gv0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"ok\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_new_unique_id}}/distribution-state/approve",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_new_unique_id}}",
- "distribution-state",
- "approve"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Distribute service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "cf7fe737-3858-4235-88b8-73640775d871",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Distribution status is : \"+jsonData.distributionStatus] = jsonData.distributionStatus === \"DISTRIBUTED\";",
- "",
- "pm.environment.set(\"auto_vf_module_model_name\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupName+\"\");",
- "pm.environment.set(\"auto_vf_module_model_customization_name\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupName+\"\");",
- "pm.environment.set(\"auto_vf_module_model_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupUUID+\"\");",
- "pm.environment.set(\"auto_vf_module_model_invariant_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].invariantUUID+\"\");",
- "pm.environment.set(\"auto_vf_module_model_customization_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].customizationUUID+\"\");",
- "pm.environment.set(\"auto_virtual_link_model_customization_uuid\", \"\"+jsonData.componentInstances[1].customizationUUID+\"\");"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_new_unique_id}}/distribution/PROD/activate",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_new_unique_id}}",
- "distribution",
- "PROD",
- "activate"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check distribution",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "401f90b3-649f-4f3f-a2bd-ef996a54e47b",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var distrib_done = false;",
- "for (var i = 0; i < jsonData.distributionStatusOfServiceList.length; i++) { ",
- " if (jsonData.distributionStatusOfServiceList[i].deployementStatus === \"Distributed\") {",
- " distrib_done = true;",
- " pm.environment.set(\"auto_distribution_id\", \"\"+jsonData.distributionStatusOfServiceList[i].distributionID+\"\");",
- "",
- " }",
- "}",
- "if (distrib_done === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YmVlcDpib29w"
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_uuid}}/distribution",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_uuid}}",
- "distribution"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check distribution fully completed after 60s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "86932912-3b35-409d-bdbf-003550969f10",
- "exec": [
- "tests[\"Status code is 200\"] = responseCode.code === 200;",
- "",
- "var jsonData = pm.response.json();",
- "var distrib_SO = false;",
- "var distrib_AAI = false;",
- "var distrib_SDNC = false;",
- "var distrib_MultiCloud = false;",
- "var distrib_CDS = false;",
- "var distrib_policy = false;",
- "",
- "for (var i = 0; i < jsonData.distributionStatusList.length; i++) { ",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"SO-COpenSource-Env11\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_SO = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"aai-ml\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_AAI = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"sdc-COpenSource-Env11-sdnc-dockero\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_SDNC = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"multicloud-k8s-id\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_MultiCloud = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"cds\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_CDS = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"policy-id\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_policy = true;",
- " } ",
- "}",
- "",
- "",
- "if (distrib_SO === false) {",
- " tests[pm.globals.get(\"service\")+\" not distributed to SO\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to SO\"] = true;",
- "}",
- "",
- "if (distrib_AAI === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to AAI\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to AAI\"] = true;",
- "}",
- "",
- "if (distrib_SDNC === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to SDNC\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to SDNC\"] = true;",
- "}",
- "if (distrib_MultiCloud === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to MultiCloud\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to MultiCloud\"] = true;",
- "}",
- "if (distrib_CDS === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to CDS\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to CDS\"] = true;",
- "}",
- "if (distrib_policy === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to Policy\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to Policy\"] = true;",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "9a1093c2-5c2d-47f6-892e-1a049cff8931",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YmVlcDpib29w"
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/distribution/{{auto_distribution_id}}",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "distribution",
- "{{auto_distribution_id}}"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/06_Onboard_Service_Macro.postman_collection.json b/test/postman/06_Onboard_Service_Macro.postman_collection.json
deleted file mode 100644
index 0514a72c5..000000000
--- a/test/postman/06_Onboard_Service_Macro.postman_collection.json
+++ /dev/null
@@ -1,1149 +0,0 @@
-{
- "info": {
- "_postman_id": "aa0955ae-1dd8-48b8-ac63-01117a1c6c2e",
- "name": "06_Onboard_Service_Macro",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "check service exists and get infos",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4f27b99e-23ac-4e7c-a1dc-c42ad63d6f65",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service_name_macro\")) {",
- " service_found = true;",
- " pm.environment.set(\"auto_service_uuid\", \"\"+jsonData[i].uuid+\"\");",
- " pm.environment.set(\"auto_service_invariant_uuid\", \"\"+jsonData[i].invariantUUID+\"\");",
- " }",
- "}",
- "if (service_found === false) {",
- " tests[pm.environment.get(\"service_name_macro\")+\" does not exist yet, we continue the run\"] = true;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service_name_macro\")+\" already exists, stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/services",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare a Service in Macro mode",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e6118e78-e476-4049-8e81-f81b53ba625a",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_service_invariant_uuid\", \"\"+jsonData[\"invariantUUID\"]+\"\");",
- "pm.environment.set(\"auto_service_uuid\", \"\"+jsonData[\"uuid\"]+\"\");",
- "pm.environment.set(\"auto_service_unique_id\", \"\"+jsonData[\"uniqueId\"]+\"\");",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"NOT_CERTIFIED_CHECKOUT\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"artifacts\": {},\r\n\t\"toscaArtifacts\": {},\r\n\t\"contactId\": \"cs0008\",\r\n\t\"categories\": [{\r\n\t\t\"name\": \"Network Service\",\r\n\t\t\"normalizedName\": \"network service\",\r\n\t\t\"uniqueId\": \"serviceNewCategory.network service\",\r\n\t\t\"icons\": [\"network_l_1-3\"],\r\n\t\t\"subcategories\": null,\r\n\t\t\"version\": null,\r\n\t\t\"ownerId\": null,\r\n\t\t\"empty\": false,\r\n\t\t\"type\": null\r\n\t}],\r\n\t\"description\": \"Service {{service_name_macro}}\",\r\n\t\"icon\": \"defaulticon\",\r\n\t\"componentInstancesProperties\": {},\r\n\t\"componentInstancesAttributes\": {},\r\n\t\"name\": \"{{service_name_macro}}\",\r\n\t\"tags\": [\"{{service_name_macro}}\"],\r\n\t\"capabilities\": {},\r\n\t\"requirements\": {},\r\n\t\"deploymentArtifacts\": {},\r\n\t\"componentType\": \"SERVICE\",\r\n\t\"projectCode\": \"123456\",\r\n\t\"componentInstances\": [],\r\n\t\"properties\": [],\r\n\t\"attributes\": [],\r\n\t\"forwardingPaths\": {},\r\n\t\"ecompGeneratedNaming\": true,\r\n\t\"serviceApiArtifacts\": {},\r\n\t\"instantiationType\": \"Macro\",\r\n\t\"environmentContext\": \"General_Revenue-Bearing\"\r\n}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check VF exists and get uuid",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "87b90661-aefb-4ec9-94fc-fb5277f8c283",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var vf_found = false;",
- "if (jsonData.name === pm.environment.get(\"vf_name\")) {",
- " vf_found = true;",
- " pm.environment.set(\"auto_vf_uuid\", \"\"+jsonData.uuid+\"\");",
- " pm.environment.set(\"auto_vf_invariant_uuid\", \"\"+jsonData.invariantUUID+\"\");",
- " ",
- "}",
- "",
- "if (vf_found === false) {",
- " tests[\"VF \"+pm.environment.get(\"vf_name\")+\" does not exists, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "",
- "}",
- "",
- "else {",
- " tests[\"VF \"+pm.environment.get(\"vf_name\")+\" exists, we continue the run\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/resources/{{auto_vf_uuid}}/metadata",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "resources",
- "{{auto_vf_uuid}}",
- "metadata"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get VF uniqueId",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "16c73747-c973-40d9-ba3f-6061199386ec",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var vf_found = false;",
- "for (var i = 0; i < jsonData.resources.length; i++) { ",
- " if (jsonData.resources[i].name === pm.environment.get(\"vf_name\")) {",
- " vf_found = true;",
- " pm.environment.set(\"auto_vf_unique_id\", \"\"+jsonData.resources[i].uniqueId+\"\");",
- " }",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/screen?excludeTypes=VFCMT&excludeTypes=Configuration",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "screen"
- ],
- "query": [
- {
- "key": "excludeTypes",
- "value": "VFCMT"
- },
- {
- "key": "excludeTypes",
- "value": "Configuration"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add VF to Service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "897b4588-b2e6-4af6-b469-09ae6ea3b797",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_vf_customization_uuid\", \"\"+jsonData.customizationUUID+\"\");",
- "pm.environment.set(\"auto_vf_name_for_model\", \"\"+jsonData.name+\"\");",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\t\"name\": \"{{vf_name}}\",\r\n\t\"componentVersion\": \"1.0\",\r\n\t\"posY\": 100,\r\n\t\"posX\": 200,\r\n\t\"uniqueId\": \"{{auto_vf_unique_id}}\",\r\n\t\"originType\": \"VF\",\r\n\t\"componentUid\": \"{{auto_vf_unique_id}}\",\r\n\t\"icon\": \"defaulticon\"\r\n}\r\n"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/resourceInstance",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "resourceInstance"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Submit for testing",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "f3215da0-5a6c-474c-9cc2-6733b36f1347",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"READY_FOR_CERTIFICATION\";",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"please test\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/certificationRequest",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "certificationRequest"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Start Certification",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e9143951-e32e-4d93-b970-20b54ea64fd4",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"CERTIFICATION_IN_PROGRESS\";"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "jm0007"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/startCertification",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "startCertification"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Certify",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2ca11efa-040f-4927-b30f-e58912f0db83",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Service status is : \"+jsonData.lifecycleState] = jsonData.lifecycleState === \"CERTIFIED\";",
- "",
- "pm.environment.set(\"auto_service_new_unique_id\", \"\"+jsonData.uniqueId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "jm0007"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"ok\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/lifecycleState/certify",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "lifecycleState",
- "certify"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get Service infos by uniqueID to get new_service_unique_id",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "cecdbf68-2176-4dc1-8321-6285d57a2e8f",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_service_new_unique_id\", \"\"+jsonData[\"metadata\"][\"allVersions\"][\"1.0\"]+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_unique_id}}/filteredDataByParams?include=metadata",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_unique_id}}",
- "filteredDataByParams"
- ],
- "query": [
- {
- "key": "include",
- "value": "metadata"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Approve distribution",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ef5105ec-e9ea-454a-954a-3398364ecb89",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Distribution status is : \"+jsonData.distributionStatus] = jsonData.distributionStatus === \"DISTRIBUTION_APPROVED\";",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "gv0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\"userRemarks\":\"ok\"}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_new_unique_id}}/distribution-state/approve",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_new_unique_id}}",
- "distribution-state",
- "approve"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Distribute service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "cf7fe737-3858-4235-88b8-73640775d871",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"Distribution status is : \"+jsonData.distributionStatus] = jsonData.distributionStatus === \"DISTRIBUTED\";",
- "",
- "pm.environment.set(\"auto_vf_module_model_name\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupName+\"\");",
- "pm.environment.set(\"auto_vf_module_model_customization_name\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupName+\"\");",
- "pm.environment.set(\"auto_vf_module_model_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].groupUUID+\"\");",
- "pm.environment.set(\"auto_vf_module_model_invariant_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].invariantUUID+\"\");",
- "pm.environment.set(\"auto_vf_module_model_customization_uuid\", \"\"+jsonData.componentInstances[0].groupInstances[0].customizationUUID+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{}"
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_new_unique_id}}/distribution/PROD/activate",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_new_unique_id}}",
- "distribution",
- "PROD",
- "activate"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check distribution",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "401f90b3-649f-4f3f-a2bd-ef996a54e47b",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var distrib_done = false;",
- "for (var i = 0; i < jsonData.distributionStatusOfServiceList.length; i++) { ",
- " if (jsonData.distributionStatusOfServiceList[i].deployementStatus === \"Distributed\") {",
- " distrib_done = true;",
- " pm.environment.set(\"auto_distribution_id\", \"\"+jsonData.distributionStatusOfServiceList[i].distributionID+\"\");",
- "",
- " }",
- "}",
- "if (distrib_done === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed, we stop the run\"] = false;",
- " postman.setNextRequest(null);",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YmVlcDpib29w"
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/{{auto_service_uuid}}/distribution",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "{{auto_service_uuid}}",
- "distribution"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check distribution fully completed after 60s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "86932912-3b35-409d-bdbf-003550969f10",
- "exec": [
- "tests[\"Status code is 200\"] = responseCode.code === 200;",
- "",
- "var jsonData = pm.response.json();",
- "var distrib_SO = false;",
- "var distrib_AAI = false;",
- "var distrib_SDNC = false;",
- "var distrib_MultiCloud = false;",
- "var distrib_CDS = false;",
- "var distrib_policy = false;",
- "",
- "for (var i = 0; i < jsonData.distributionStatusList.length; i++) { ",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"SO-COpenSource-Env11\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_SO = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"aai-ml\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_AAI = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"sdc-COpenSource-Env11-sdnc-dockero\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_SDNC = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"multicloud-k8s-id\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_MultiCloud = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"cds\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_CDS = true;",
- " }",
- " if ((jsonData.distributionStatusList[i].omfComponentID === \"policy-id\") && (jsonData.distributionStatusList[i].status === \"DOWNLOAD_OK\"))",
- " {",
- " distrib_policy = true;",
- " } ",
- "}",
- "",
- "",
- "if (distrib_SO === false) {",
- " tests[pm.globals.get(\"service\")+\" not distributed to SO\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to SO\"] = true;",
- "}",
- "",
- "if (distrib_AAI === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to AAI\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to AAI\"] = true;",
- "}",
- "",
- "if (distrib_SDNC === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to SDNC\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to SDNC\"] = true;",
- "}",
- "if (distrib_MultiCloud === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to MultiCloud\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to MultiCloud\"] = true;",
- "}",
- "if (distrib_CDS === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to CDS\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to CDS\"] = true;",
- "}",
- "if (distrib_policy === false) {",
- " tests[pm.environment.get(\"service\")+\" not distributed to Policy\"] = false;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"service\")+\" distributed to Policy\"] = true;",
- "}",
- ""
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "9a1093c2-5c2d-47f6-892e-1a049cff8931",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "op0001"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YmVlcDpib29w"
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc2}}/sdc1/feProxy/rest/v1/catalog/services/distribution/{{auto_distribution_id}}",
- "host": [
- "{{url-sdc2}}"
- ],
- "path": [
- "sdc1",
- "feProxy",
- "rest",
- "v1",
- "catalog",
- "services",
- "distribution",
- "{{auto_distribution_id}}"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/07_Declare_owningEntity_LineOfBusiness_project_platform.postman_collection.json b/test/postman/07_Declare_owningEntity_LineOfBusiness_project_platform.postman_collection.json
deleted file mode 100644
index 840693516..000000000
--- a/test/postman/07_Declare_owningEntity_LineOfBusiness_project_platform.postman_collection.json
+++ /dev/null
@@ -1,302 +0,0 @@
-{
- "info": {
- "_postman_id": "4d04e8e7-d495-4dcc-a800-1cf4ab643403",
- "name": "07_Declare_owningEntity_LineOfBusiness_project_platform",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "Declare owningEntity in VID",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "0643260b-1c9a-450e-aa72-ac2115244a97",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept-Encoding",
- "value": "gzip, deflate"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"options\": [\"{{owning_entity_name}}\"]\n}"
- },
- "url": {
- "raw": "{{url-vid}}/vid/maintenance/category_parameter/owningEntity",
- "host": [
- "{{url-vid}}"
- ],
- "path": [
- "vid",
- "maintenance",
- "category_parameter",
- "owningEntity"
- ]
- }
- },
- "response": []
- },
- {
- "name": "declare Owning-Entity in AAI",
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"owning-entity-name\": \"{{owning_entity_name}}\",\n \"owning-entity-id\": \"{{owning_entity_id}}\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v13/business/owning-entities/owning-entity/{{owning_entity_id}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v13",
- "business",
- "owning-entities",
- "owning-entity",
- "{{owning_entity_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Get owning-entities",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "294b8cb1-67a3-435d-817f-7fd5497183db",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var oe_found = false;",
- "for (var i = 0; i < jsonData[\"owning-entity\"].length; i++) { ",
- " if (jsonData[\"owning-entity\"][i][\"owning-entity-name\"] === pm.environment.get(\"owning_entity\")) {",
- " oe_found = true;",
- " pm.environment.set(\"auto_owning_entity_id\", \"\"+jsonData[\"owning-entity\"][i][\"owning-entity-id\"]+\"\");",
- " }",
- "}",
- "tests[\"Owning-Entity \"+pm.environment.get(\"owning_entity\")+\" found and id saved\"] = oe_found === true;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/owning-entities",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "owning-entities"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare platform",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "6d0d2a8b-b756-4386-9113-d39722218bf5",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"options\": [\"{{platform}}\"]\n}"
- },
- "url": {
- "raw": "{{url-vid}}/vid/maintenance/category_parameter/platform",
- "host": [
- "{{url-vid}}"
- ],
- "path": [
- "vid",
- "maintenance",
- "category_parameter",
- "platform"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare lineOfBusiness",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e177b994-db59-4b13-b003-b6fc0dda6907",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"options\": [\"{{lineofbusiness}}\"]\n}"
- },
- "url": {
- "raw": "{{url-vid}}/vid/maintenance/category_parameter/lineOfBusiness",
- "host": [
- "{{url-vid}}"
- ],
- "path": [
- "vid",
- "maintenance",
- "category_parameter",
- "lineOfBusiness"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare project",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "a5eb2421-a949-44f9-a8f7-786b672aede3",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"options\": [\"{{project}}\"]\n}"
- },
- "url": {
- "raw": "{{url-vid}}/vid/maintenance/category_parameter/project",
- "host": [
- "{{url-vid}}"
- ],
- "path": [
- "vid",
- "maintenance",
- "category_parameter",
- "project"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/08_Declare_Customer_Service_Subscription_Cloud.postman_collection.json b/test/postman/08_Declare_Customer_Service_Subscription_Cloud.postman_collection.json
deleted file mode 100644
index d5e3bbf89..000000000
--- a/test/postman/08_Declare_Customer_Service_Subscription_Cloud.postman_collection.json
+++ /dev/null
@@ -1,1647 +0,0 @@
-{
- "info": {
- "_postman_id": "4e86ffd4-736b-441d-8ff2-56a584a96573",
- "name": "08_Declare_Customer_Service_Subscription_Cloud",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "List Customers",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "5c0ee107-621b-4b01-9506-cd8628b01179",
- "exec": [
- "var customer_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"customer_name\")+\" does not exists\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- " var jsonData = pm.response.json();",
- " for (var i = 0; i < jsonData.customer.length; i++) { ",
- " if (jsonData.customer[i][\"global-customer-id\"] === pm.environment.get(\"customer_name\")) {",
- " customer_found = true;",
- " pm.environment.set(\"auto_customer_id\", \"\"+jsonData.customer[i][\"global-customer-id\"]+\"\");",
- " }",
- " }",
- " if (customer_found === false) {",
- " tests[pm.environment.get(\"customer_name\")+\" does not exists\"] = true;",
- " }",
- " ",
- " else {",
- " tests[pm.environment.get(\"customer_name\")+\" already exists, we skip creation\"] = true;",
- " postman.setNextRequest(\"List Services from SDC catalog\");",
- " }",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare a Customer",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "3f8fbd67-36f6-4ae2-a2ab-d23f1f690133",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"global-customer-id\": \"{{customer_name}}\",\n \"subscriber-name\": \"{{customer_name}}\",\n \"subscriber-type\": \"INFRA\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers/customer/{{customer_name}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers",
- "customer",
- "{{customer_name}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check customer creation",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e459dbcf-bf32-4aef-a9b1-5d1adfb525fc",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var customer_found = false;",
- "for (var i = 0; i < jsonData.customer.length; i++) { ",
- " if (jsonData.customer[i][\"global-customer-id\"] === pm.environment.get(\"customer_name\")) {",
- " customer_found = true;",
- " pm.environment.set(\"auto_customer_id\", \"\"+jsonData.customer[i][\"global-customer-id\"]+\"\");",
- " }",
- "}",
- "tests[pm.environment.get(\"customer_name\")+\" found\"] = customer_found === true;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers"
- ]
- }
- },
- "response": []
- },
- {
- "name": "List Services from SDC catalog",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "a496d4dc-e343-42d4-8377-6d18d3570c82",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " pm.environment.set(\"auto_service_invariantUUID\", \"\"+jsonData[i].invariantUUID+\"\");",
- " }",
- "}",
- "",
- "if (service_found === false) {",
- " tests[\"Service : \"+pm.environment.get(\"service\")+\" does not exist in SDC catalog, we stop the run\"] = true;",
- " postman.setNextRequest(null);",
- "}",
- "",
- "else {",
- " tests[\"Service : \"+pm.environment.get(\"service\")+\" exists in SDC catalog, we can continue the run\"] = true;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "ONAP-Test"
- },
- {
- "key": "USER_ID",
- "value": "cs0008"
- },
- {
- "key": "X-FromAppId",
- "value": "ONAP-Test"
- },
- {
- "key": "Authorization",
- "value": "Basic YWFpOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="
- },
- {
- "key": "x-ecomp-instanceid",
- "value": "ONAP-Test"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-sdc}}/sdc/v1/catalog/services",
- "host": [
- "{{url-sdc}}"
- ],
- "path": [
- "sdc",
- "v1",
- "catalog",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check Subscription in AAI",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "654cc91a-920b-4b79-9177-15b622749cb4",
- "exec": [
- "var service_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"service\")+\" does not exists in AAI\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- " var jsonData = pm.response.json();",
- " for (var i = 0; i < jsonData.service.length; i++) { ",
- " if (jsonData.service[i][\"service-description\"] === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " }",
- " }",
- " ",
- " if (service_found === false) {",
- " tests[\"Service subscription : \"+pm.environment.get(\"service\")+\" does not exists in AAI\"] = true;",
- " }",
- " ",
- " else {",
- " tests[\"Service subscription : \"+pm.environment.get(\"service\")+\" already exists in AAI, we skip creation\"] = true;",
- " postman.setNextRequest(\"check cloud-region exists\");",
- " }",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/service-design-and-creation/services",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "service-design-and-creation",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Declare subscription in AAI",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "c82062d1-c394-47d8-ab3d-14f777c32971",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n\"service-id\": \"{{auto_service_invariantUUID}}\",\r\n\"service-description\": \"{{service}}\"\r\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/service-design-and-creation/services/service/{{auto_service_invariantUUID}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "service-design-and-creation",
- "services",
- "service",
- "{{auto_service_invariantUUID}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check Subscription creation in AAI",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ddb1ab24-cbf4-4d50-a237-614143e66a66",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData.service.length; i++) { ",
- " if (jsonData.service[i][\"service-description\"] === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " pm.environment.set(\"auto_service_resource_version\", \"\"+jsonData.service[i][\"resource-version\"]+\"\");",
- " }",
- "}",
- "tests[\"Service subscription : \"+pm.environment.get(\"service\")+\" found in AAI\"] = service_found === true;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/service-design-and-creation/services",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "service-design-and-creation",
- "services"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check complex exists",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "12df16bd-e361-45ec-8663-b8a404bb13ce",
- "exec": [
- "var complex_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"complex_name\")+\" does not exists in AAI\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- "var jsonData = pm.response.json();",
- "for (var i = 0; i < jsonData[\"complex\"].length; i++) { ",
- " if (jsonData[\"complex\"][i][\"complex-name\"] === pm.environment.get(\"complex_name\")) {",
- " complex_found = true;",
- " }",
- "}",
- "",
- "if (complex_found === false) {",
- " tests[pm.environment.get(\"complex_name\")+\" does not exist yet\"] = true;",
- "}",
- "else {",
- " tests[pm.environment.get(\"complex_name\")+\" already exists, we skip creation\"] = true;",
- " postman.setNextRequest(\"check cloud-region exists\");",
- "}",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/complexes",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "complexes"
- ]
- }
- },
- "response": []
- },
- {
- "name": "create Complex",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "c6021ddb-13b1-472b-acdd-55c4f550461f",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"physical-location-id\": \"{{complex_name}}\",\n \"data-center-code\": \"\",\n \"complex-name\": \"{{complex_name}}\",\n \"identity-url\": \"\",\n \"physical-location-type\": \"\",\n \"street1\": \"\",\n \"street2\": \"\",\n \"city\": \"\",\n \"state\": \"\",\n \"postal-code\": \"\",\n \"country\": \"\",\n \"region\": \"\",\n \"latitude\": \"\",\n \"longitude\": \"\",\n \"elevation\": \"\",\n \"lata\": \"\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/complexes/complex/{{complex_name}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "complexes",
- "complex",
- "{{complex_name}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check complex creation",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "eed67b4e-d3b5-4ba7-a254-6e8d44e10be0",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "var complex_found = false;",
- "for (var i = 0; i < jsonData[\"complex\"].length; i++) { ",
- " if (jsonData[\"complex\"][i][\"complex-name\"] === pm.environment.get(\"complex_name\")) {",
- " complex_found = true;",
- " }",
- "}",
- "tests[\"Complex : \"+pm.environment.get(\"complex_name\")+\" found\"] = complex_found === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/complexes",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "complexes"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check cloud-region exists",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "705695aa-21b8-4b3f-9e00-4f46178f37d0",
- "exec": [
- "var region_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"onap_cloud_region_id\")+\" does not exists in AAI\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- "var jsonData = pm.response.json();",
- "for (var i = 0; i < jsonData[\"cloud-region\"].length; i++) { ",
- " if ((jsonData[\"cloud-region\"][i][\"cloud-region-id\"] === pm.environment.get(\"onap_cloud_region_id\")) && (jsonData[\"cloud-region\"][i][\"cloud-owner\"] === pm.environment.get(\"cloud_owner_name\"))) {",
- " region_found = true;",
- " }",
- "}",
- "",
- "if (region_found === false) {",
- " tests[pm.environment.get(\"onap_cloud_region_id\")+\" does not exist yet for cloudOwner \"+pm.environment.get(\"cloud_owner_name\")] = true;",
- "}",
- "else {",
- " tests[pm.environment.get(\"onap_cloud_region_id\")+\" already exists, we skip creation\"] = true;",
- " postman.setNextRequest(\"check tenant in cloud region\");",
- "}",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "create cloud-region",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "41f77bea-f6cd-4bd9-961d-b02f42751db0",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"cloud-owner\": \"{{cloud_owner_name}}\",\r\n \"cloud-region-id\": \"{{onap_cloud_region_id}}\",\r\n \"cloud-type\": \"{{cloud_type}}\",\r\n \"owner-defined-type\": \"\",\r\n \"cloud-region-version\": \"{{cloud_region_version}}\",\r\n \"cloud-zone\": \"\",\r\n \"complex-name\": \"{{complex_name}}\",\r\n \"identity-url\": \"WillBeUpdatedByMultiCloud\",\r\n \"sriov-automation\": false,\r\n \"cloud-extra-info\": \"{\\\"openstack-region-id\\\":\\\"{{openstack_region_id}}\\\"}\",\r\n \"esr-system-info-list\": {\r\n \t\"esr-system-info\": [\r\n {\r\n \"esr-system-info-id\": \"{{random_uuid}}\",\r\n \"service-url\": \"{{keystone_url}}\",\r\n \"user-name\": \"{{user_name}}\",\r\n \"password\": \"{{keystone_password}}\",\r\n \"system-type\": \"VIM\",\r\n \"ssl-insecure\": true,\r\n \"cloud-domain\": \"Default\",\r\n \"default-tenant\": \"{{tenant_name}}\",\r\n \"system-status\": \"active\"\r\n }\r\n ]\r\n }\r\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check Cloud-region creation",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "a386bb95-c0fa-49b2-9837-efcdc6b40a86",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var region_found = false;",
- "for (var i = 0; i < jsonData[\"cloud-region\"].length; i++) { ",
- " if (jsonData[\"cloud-region\"][i][\"cloud-region-id\"] === pm.environment.get(\"onap_cloud_region_id\")) {",
- " region_found = true;",
- " }",
- "}",
- "tests[\"Cloud-Region : \"+pm.environment.get(\"onap_cloud_region_id\")+\" found\"] = region_found === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions"
- ]
- }
- },
- "response": []
- },
- {
- "name": "associate Cloud-region to a complex",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ad16fc86-7954-434f-bc45-25710ebeee57",
- "exec": [
- "tests[\"Status code is 200 : cloud-region associated to complex\"] = pm.response.code === 200;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"related-to\": \"complex\",\n \"related-link\": \"/aai/v16/cloud-infrastructure/complexes/complex/{{complex_name}}\",\n \"relationship-data\": [\n {\n \"relationship-key\": \"complex.physical-location-id\",\n \"relationship-value\": \"{{complex_name}}\"\n }\n ]\n }"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/relationship-list/relationship",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "relationship-list",
- "relationship"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check availability-zone in cloud region",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4367f5c4-0bf0-4e09-bc58-9a8bd6578685",
- "exec": [
- "var availability_zone_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"availability_zone_name\")+\" does not exists\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- " var jsonData = pm.response.json();",
- " for (var i = 0; i < jsonData[\"availability-zone\"].length; i++) { ",
- " if (jsonData[\"availability-zone\"][i][\"availability-zone-name\"] === pm.environment.get(\"availability_zone_name\")) {",
- " availability_zone_found = true;",
- " }",
- " } ",
- " tests[\"Availability Zone : \"+pm.environment.get(\"availability_zone_name\")+\" found\"] = availability_zone_found === true;",
- "}",
- "",
- "",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/availability-zones",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "availability-zones"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add availability-zone in cloud region",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4d1eee0e-6afd-44e8-b1cc-269a1249232d",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"availability-zone-name\": \"{{availability_zone_name}}\",\n \"hypervisor-type\": \"{{hypervisor_type}}\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/availability-zones/availability-zone/{{availability_zone_name}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "availability-zones",
- "availability-zone",
- "{{availability_zone_name}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check tenant in cloud region",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4367f5c4-0bf0-4e09-bc58-9a8bd6578685",
- "exec": [
- "var tenant_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"tenant_name\")+\" does not exists in AAI\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- "var jsonData = pm.response.json();",
- "",
- "if(jsonData.hasOwnProperty('tenant'))",
- " {",
- " for (var i = 0; i < jsonData.tenant.length; i++) ",
- " { ",
- " if (jsonData.tenant[i]['tenant-id'] === pm.environment.get(\"tenant_id\")) ",
- " {",
- " tenant_found = true;",
- " }",
- " }",
- " if (tenant_found === true) ",
- " {",
- " tests[pm.environment.get(\"tenant_name\")+\" already exists, we skip creation\"] = true;",
- " postman.setNextRequest(\"check customer-service-tenant relations\");",
- " }",
- "",
- "if (tenant_found === false)",
- " {",
- " tests[pm.environment.get(\"tenant_name\")+\" does not exists\"] = true;",
- " }",
- "}",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/tenants",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "tenants"
- ]
- }
- },
- "response": []
- },
- {
- "name": "add tenant to region",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "4d1eee0e-6afd-44e8-b1cc-269a1249232d",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"tenant-id\": \"{{tenant_id}}\",\n \"tenant-name\": \"{{tenant_name}}\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/tenants/tenant/{{tenant_id}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "tenants",
- "tenant",
- "{{tenant_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check tenant creation",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "716f0a8a-37b3-412f-8d7b-acef34597bee",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var tenant_found = false;",
- "for (var i = 0; i < jsonData.tenant.length; i++) { ",
- " if (jsonData.tenant[i][\"tenant-id\"] === pm.environment.get(\"tenant_id\")) {",
- " tenant_found = true;",
- " }",
- "}",
- "tests[pm.environment.get(\"tenant_name\")+\" found\"] = tenant_found === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/tenants",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "cloud-infrastructure",
- "cloud-regions",
- "cloud-region",
- "{{cloud_owner_name}}",
- "{{onap_cloud_region_id}}",
- "tenants"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check customer-service-tenant relations",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "587ed4e3-e181-49c1-9f21-08d366a52587",
- "exec": [
- "var relation_found = false;",
- "if (pm.response.code === 404) {",
- " tests[pm.environment.get(\"tenant_name\")+\" / \"+pm.environment.get(\"service\")+ \" relation does not exists\"] = true;",
- "}",
- "else {",
- " if (pm.response.code === 200) {",
- " tests[\"Status code is 200\"] = pm.response.code === 200;",
- " }",
- "",
- "var jsonData = pm.response.json();",
- "for (var i = 0; i < jsonData[\"service-subscription\"].length; i++) { ",
- " if (jsonData[\"service-subscription\"][i][\"service-type\"] === pm.environment.get(\"service\")) {",
- " relation_found = true;",
- " }",
- "}",
- "",
- "if (relation_found === false) {",
- " tests[pm.environment.get(\"tenant_name\")+\" / \"+pm.environment.get(\"service\")+ \" relation does not exists\"] = true;",
- "}",
- "",
- "else {",
- " tests[pm.environment.get(\"tenant_name\")+\" / \"+pm.environment.get(\"service\")+ \" already exists, we skip relation creation\"] = true;",
- " postman.setNextRequest(\"check customer-service-tenant relation creation\");",
- "}",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers/customer/{{customer_name}}/service-subscriptions?depth=all",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers",
- "customer",
- "{{customer_name}}",
- "service-subscriptions"
- ],
- "query": [
- {
- "key": "depth",
- "value": "all"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add service to customer",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "9a9da50d-49ce-4f9e-ae18-6131dce516eb",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"service-id\": \"{{auto_service_id}}\"\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers/customer/{{customer_name}}/service-subscriptions/service-subscription/{{service}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers",
- "customer",
- "{{customer_name}}",
- "service-subscriptions",
- "service-subscription",
- "{{service}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add tenant to service-customer",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "9a9da50d-49ce-4f9e-ae18-6131dce516eb",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(201);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "PUT",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"related-to\": \"tenant\",\n \"related-link\": \"/aai/v16/cloud-infrastructure/cloud-regions/cloud-region/{{cloud_owner_name}}/{{onap_cloud_region_id}}/tenants/tenant/{{tenant_id}}\",\n \"relationship-data\": [\n {\n \"relationship-key\": \"cloud-region.cloud-owner\",\n \"relationship-value\": \"{{cloud_owner_name}}\"\n },\n {\n \"relationship-key\": \"cloud-region.cloud-region-id\",\n \"relationship-value\": \"{{onap_cloud_region_id}}\"\n },\n {\n \"relationship-key\": \"tenant.tenant-id\",\n \"relationship-value\": \"{{tenant_id}}\"\n }\n ],\n \"related-to-property\": [\n {\n \"property-key\": \"tenant.tenant-name\",\n \"property-value\": \"{{tenant_name}}\"\n }\n ]\n}"
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers/customer/{{customer_name}}/service-subscriptions/service-subscription/{{service}}/relationship-list/relationship",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers",
- "customer",
- "{{customer_name}}",
- "service-subscriptions",
- "service-subscription",
- "{{service}}",
- "relationship-list",
- "relationship"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check customer-service-tenant relation creation",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "10e61ec5-243b-4ce6-b9b8-a29054df2504",
- "exec": [
- "tests[\"Status code is 200\"] = responseCode.code === 200;",
- "",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData[\"service-subscription\"].length; i++) { ",
- " if (jsonData[\"service-subscription\"][i][\"service-type\"] === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " }",
- "}",
- "tests[\"Service subscription : \"+pm.environment.get(\"service\")+\" found for customer : \"+pm.environment.get(\"customer_name\")] = service_found === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "808b54e3-e563-4144-a1b9-e24e2ed93d4f"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/business/customers/customer/{{customer_name}}/service-subscriptions?depth=all",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "business",
- "customers",
- "customer",
- "{{customer_name}}",
- "service-subscriptions"
- ],
- "query": [
- {
- "key": "depth",
- "value": "all"
- }
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/10_instantiate_service_vnf_vfmodule.postman_collection.json b/test/postman/10_instantiate_service_vnf_vfmodule.postman_collection.json
deleted file mode 100644
index ff2a040d2..000000000
--- a/test/postman/10_instantiate_service_vnf_vfmodule.postman_collection.json
+++ /dev/null
@@ -1,1360 +0,0 @@
-{
- "info": {
- "_postman_id": "200601a1-b07a-4a07-ba56-a3fc357e9f55",
- "name": "10_instantiate_service_vnf_vfmodule",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "GET ExternalAPI-NBI component Status (HealthCheck)",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ddeb2e4b-2ef6-4b58-8e5c-cc202fbb232a",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var NBI_status = false;",
- "var jsonData = pm.response.json();",
- "",
- "if (jsonData.status === \"ok\") {",
- " NBI_status = true;",
- "}",
- "tests[\"NBI status OK\"] = NBI_status === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/status",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "status"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET all service models from Service Catalog API",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "d020751d-1ca5-4c47-af33-40faea633e62",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var service_found = false;",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service\")) {",
- " service_found = true;",
- " pm.environment.set(\"auto_service_id\", \"\"+jsonData[i].id+\"\");",
- " }",
- "}",
- "tests[\"Service : \"+pm.environment.get(\"service\")+\" exists\"] = service_found === true;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/serviceSpecification",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "serviceSpecification"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET all service instances via ServiceInventory API",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "79e173a3-297e-467f-8c9b-be90035a91aa",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var service_instance_found = false;",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service_instance_name\")) {",
- " service_instance_found = true;",
- " }",
- "}",
- "tests[\"Service instance : \"+pm.environment.get(\"service_instance_name\")+\" does not exist\"] = service_instance_found === false;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/service?relatedParty.id={{customer_name}}",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "service"
- ],
- "query": [
- {
- "key": "relatedParty.id",
- "value": "{{customer_name}}"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Add service instance via ServiceOrder API",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "9e3790c5-456a-44aa-9579-de3e9be2b61a",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "tests[pm.environment.get(\"externalId\")+\" exists\"] = jsonData.externalId === pm.environment.get(\"externalId\");",
- "",
- "pm.environment.set(\"auto_service_order_id\", jsonData.id);",
- " "
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"externalId\": \"{{externalId}}\",\n \"priority\": \"1\",\n \"description\": \"{{service}} order for generic customer via Postman\",\n \"category\": \"Consumer\",\n \"requestedStartDate\": \"2018-04-26T08:33:37.299Z\",\n \"requestedCompletionDate\": \"2018-04-26T08:33:37.299Z\",\n \"relatedParty\": [\n {\n \"id\": \"{{customer_name}}\",\n \"role\": \"ONAPcustomer\",\n \"name\": \"{{customer_name}}\"\n }\n ],\n \"orderItem\": [\n {\n \"id\": \"1\",\n \"action\": \"add\",\n \"service\": {\n \"name\": \"{{service_instance_name}}\",\n \"serviceState\": \"active\",\n \"serviceSpecification\": {\n \"id\": \"{{auto_service_id}}\"\n }\n }\n }\n ]\n}"
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/serviceOrder",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "serviceOrder"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET previous serviceOrder after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "dc4fc1f9-cd6a-41c8-a972-06694d869384",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[pm.environment.get(\"externalId\")+\" exists\"] = jsonData.externalId === pm.environment.get(\"externalId\");",
- "tests[\"service_order_id exists\"] = jsonData.id === pm.environment.get(\"auto_service_order_id\");",
- "tests[\"service Order state is : \"+jsonData.state] = jsonData.state === \"completed\";",
- "tests[\"Service Instance state is : \"+jsonData.orderItem[0].service.serviceState] = jsonData.orderItem[0].service.serviceState === \"active\";",
- "",
- "",
- "if(jsonData.state == \"completed\"){",
- " pm.environment.set(\"auto_service_instance_id\", \"\"+jsonData.orderItem[0].service.id+\"\");",
- "}",
- "if (jsonData.orderMessage !== null){",
- " tests[\"order message is : \" +jsonData.orderMessage[0].messageInformation] = jsonData.orderMessage[0].messageInformation === \"\";",
- " pm.environment.set(\"auto_service_messageInformation\", \"\"+jsonData.orderMessage[0].messageInformation+\"\");",
- " }",
- ""
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "b2473649-e045-465e-84cc-0725d83d57ed",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/serviceOrder/{{auto_service_order_id}}",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "serviceOrder",
- "{{auto_service_order_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "1536607a-13f7-4c9a-97a6-af7f5dbc0593",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "if (jsonData.requestList.length > 0){",
- " tests[\"requestState is : \"+jsonData.requestList[0].request.requestStatus.requestState] = jsonData.requestList[0].request.requestStatus.requestState === \"COMPLETE\";",
- " tests[\"statusMessage is : \"+jsonData.requestList[0].request.requestStatus.statusMessage] = jsonData.requestList[0].request.requestStatus.statusMessage === \"Service Instance was created successfully.\";",
- "}",
- "else {",
- " tests[\"no order for that service instance\"] = true === false;",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7?filter=serviceInstanceName:EQUALS:{{service_instance_name}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7"
- ],
- "query": [
- {
- "key": "filter",
- "value": "serviceInstanceName:EQUALS:{{service_instance_name}}"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Check service instance in inventory via NBI request",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "f1cbf2f1-1ed2-4d21-9483-46602241705d",
- "exec": [
- "tests[\"Service Instance : \"+ pm.environment.get(\"service_instance_name\") +\" exists in AAI inventory\"] = pm.response.code === 200;",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/service/{{auto_service_instance_id}}?relatedParty.id={{customer_name}}&serviceSpecification.name={{service}}",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "service",
- "{{auto_service_instance_id}}"
- ],
- "query": [
- {
- "key": "relatedParty.id",
- "value": "{{customer_name}}"
- },
- {
- "key": "serviceSpecification.name",
- "value": "{{service}}"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "Instantiate vnf",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e779f618-dd36-474f-802d-a36abdf69708",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- "pm.environment.set(\"auto_vnf_instance_id\", \"\"+jsonData.requestReferences.instanceId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"requestDetails\": {\n \"requestInfo\": {\n \"instanceName\": \"{{vnf_instance_name}}\",\n \"source\": \"VID\",\n \"suppressRollback\": false,\n \"requestorId\": \"test\",\n\t \"productFamilyId\": \"1234\"\n },\n\t\"modelInfo\": {\n\t\t\"modelType\": \"vnf\",\n\t\t\"modelInvariantId\": \"{{auto_vf_invariant_uuid}}\",\n\t\t\t\"modelVersionId\": \"{{auto_vf_uuid}}\",\n\t\t\t\"modelName\": \"{{vf_name}}\",\n\t\t\t\"modelVersion\": \"1.0\",\n\t\t\t\"modelCustomizationId\": \"{{auto_vf_customization_uuid}}\",\n\t\t\t\"modelCustomizationName\": \"{{auto_vf_name_for_model}}\"\n\t\t},\n \"requestParameters\": {\n \"userParams\": [],\n \"aLaCarte\": true,\n\t \"testApi\": \"VNF_API\"\n },\n \"cloudConfiguration\": {\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"tenantId\": \"{{tenant_id}}\"\n },\n\t\t\"lineOfBusiness\": {\n\t\t\t\"lineOfBusinessName\": \"{{lineofbusiness}}\"\n\t\t},\n\t\t\"platform\": {\n\t\t\t\"platformName\": \"{{platform}}\"\n\t\t},\n\t\t\"relatedInstanceList\": [{\n\t\t\t\"relatedInstance\": {\n\t\t\t\t\"instanceId\": \"{{auto_service_instance_id}}\",\n\t\t\t\t\"modelInfo\": {\n\t\t\t\t\t\"modelType\": \"service\",\n\t\t\t\t\t\"modelName\": \"{{service}}\",\n\t\t\t\t\t\"modelInvariantId\": \"{{auto_service_invariant_uuid}}\",\n\t\t\t\t\t\"modelVersion\": \"1.0\",\n\t\t\t\t\t\"modelVersionId\": \"{{auto_service_uuid}}\"\n\t\t\t\t}\n\t\t\t}\n\t\t}]\n }\n}\n\n"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/vnfs",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "vnfs"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "750de5df-76ee-42c6-8f04-bfb9e1a9ea33",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_vnf_instance_id\", \"\"+jsonData.request.instanceReferences.vnfInstanceId+\"\");",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Vnf has been created successfully.\";",
- ""
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "15a5b018-1d8a-4326-9810-cf94a1f4d80f",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check vnf instance in inventory via AAI request",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "a08edc1f-6b8c-4a7e-91ce-6d774d0dfb1e",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var vnf_instance_found = false;",
- "for (var i = 0; i < jsonData[\"generic-vnf\"].length; i++) { ",
- " if (jsonData[\"generic-vnf\"][i][\"vnf-name\"] === pm.environment.get(\"vnf_instance_name\")) {",
- " vnf_instance_found = true;",
- " }",
- "}",
- "tests[\"VNF Instance : \"+ pm.environment.get(\"vnf_instance_name\") +\" exists in AAI inventory\"] = vnf_instance_found === true;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/generic-vnfs",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "generic-vnfs"
- ]
- }
- },
- "response": []
- },
- {
- "name": "preload for VFmodule",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "62a8ef7f-7fde-423c-8b70-5f74bb6bc00b",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "0a3f6713-ba96-4971-a6f8-c2da85a3176e"
- },
- {
- "key": "X-FromAppId",
- "value": "API client"
- },
- {
- "key": "Authorization",
- "value": "Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"input\": {\n \"request-information\": {\n \"notification-url\": \"onap.org\",\n \"order-number\": \"1\",\n \"order-version\": \"1\",\n \"request-action\": \"PreloadVNFRequest\",\n \"request-id\": \"test\"\n },\n \"sdnc-request-header\": {\n \"svc-action\": \"reserve\",\n \"svc-notification-url\": \"http:\\/\\/onap.org:8080\\/adapters\\/rest\\/SDNCNotify\",\n \"svc-request-id\": \"test\"\n },\n \"vnf-topology-information\": {\n \"vnf-assignments\": {\n \"availability-zones\": [],\n \"vnf-networks\": [],\n \"vnf-vms\": []\n },\n \"vnf-parameters\": [],\n \"vnf-topology-identifier\": {\n \"generic-vnf-name\": \"{{vnf_instance_name}}\",\n \"generic-vnf-type\": \"{{auto_vf_name_for_model}}\",\n \"service-type\": \"{{auto_service_instance_id}}\",\n \"vnf-name\": \"{{vfmodule_instance_name}}\",\n \"vnf-type\": \"{{auto_vf_module_model_name}}\"\n }\n }\n }\n}\n\n"
- },
- "url": {
- "raw": "{{url-sdnc}}/restconf/operations/VNF-API:preload-vnf-topology-operation",
- "host": [
- "{{url-sdnc}}"
- ],
- "path": [
- "restconf",
- "operations",
- "VNF-API:preload-vnf-topology-operation"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Instantiate vf-module",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "61781baf-f6d4-4229-95d0-32e85cde3e82",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "if (responseCode.code === 202){",
- " pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- " pm.environment.set(\"auto_vfmodule_instance_id\", \"\"+jsonData.requestReferences.instanceId+\"\");",
- "}"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"requestDetails\": {\n \"requestInfo\": {\n \"instanceName\": \"{{vfmodule_instance_name}}\",\n \"source\": \"VID\",\n \"suppressRollback\": false,\n \"requestorId\": \"test\"\n },\n\t\"modelInfo\": {\n\t\t\"modelType\": \"vfModule\",\n\t\t\"modelInvariantId\": \"{{auto_vf_module_model_invariant_uuid}}\",\n\t\t\"modelVersionId\": \"{{auto_vf_module_model_uuid}}\",\n\t\t\"modelName\": \"{{auto_vf_module_model_name}}\",\n\t\t\"modelVersion\": \"1\",\n\t\t\"modelCustomizationId\": \"{{auto_vf_module_model_customization_uuid}}\",\n\t\t\"modelCustomizationName\": \"{{auto_vf_module_model_name}}\"\n\t},\n \t\"requestParameters\": {\n \t\t\"userParams\": [],\n \t\t\"testApi\": \"VNF_API\",\n \t\t\"usePreload\": true\n \t},\n \"cloudConfiguration\": {\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"tenantId\": \"{{tenant_id}}\"\n },\n\t\t\"relatedInstanceList\": [{\n\t\t\t\"relatedInstance\": {\n\t\t\t\t\"instanceId\": \"{{auto_service_instance_id}}\",\n\t\t\t\t\"modelInfo\": {\n\t\t\t\t\t\"modelType\": \"service\",\n\t\t\t\t\t\"modelName\": \"{{service}}\",\n\t\t\t\t\t\"modelInvariantId\": \"{{auto_service_invariant_uuid}}\",\n\t\t\t\t\t\"modelVersion\": \"1.0\",\n\t\t\t\t\t\"modelVersionId\": \"{{auto_service_uuid}}\"\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"relatedInstance\": {\n\t\t\t\t\"instanceId\": \"{{auto_vnf_instance_id}}\",\n\t\t\t\t\"modelInfo\": {\n\t\t\t\t\t\"modelType\": \"vnf\",\n\t\t\t\t\t\"modelName\": \"{{vf_name}}\",\n\t\t\t\t\t\"modelInvariantId\": \"{{auto_vf_invariant_uuid}}\",\n\t\t\t\t\t\"modelVersion\": \"1.0\",\n\t\t\t\t\t\"modelVersionId\": \"{{auto_vf_uuid}}\",\n\t\t\t\t\t\"modelCustomizationId\": \"{{auto_vf_customization_uuid}}\",\n\t\t\t\t\t\"modelCustomizationName\": \"{{auto_vf_name_for_model}}\"\n\t\t\t\t}\n\t\t\t}\n\t\t}]\n }\n}\n\n"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/vnfs/{{auto_vnf_instance_id}}/vfModules",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "vnfs",
- "{{auto_vnf_instance_id}}",
- "vfModules"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 120s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ef3be415-7453-4d2d-91ce-de6e2df05dbe",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_vfmodule_instance_id\", \"\"+jsonData.request.instanceReferences.vfModuleInstanceId+\"\");",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Vf Module has been created successfully.\";"
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "8f7cbf38-cb21-45af-9648-1915c92bcb12",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check vfmodule instance in inventory via AAI request",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "81203009-3d68-426d-9491-1276983693e5",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "var vfmodule_instance_found = false;",
- "for (var i = 0; i < jsonData[\"vf-module\"].length; i++) { ",
- " if (jsonData[\"vf-module\"][i][\"vf-module-name\"] === pm.environment.get(\"vfmodule_instance_name\")) {",
- " vfmodule_instance_found = true;",
- " }",
- "}",
- "tests[\"vfmodule Instance : \"+ pm.environment.get(\"vfmodule_instance_name\") +\" exists in AAI inventory\"] = vfmodule_instance_found === true;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/generic-vnfs/generic-vnf/{{auto_vnf_instance_id}}/vf-modules",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "generic-vnfs",
- "generic-vnf",
- "{{auto_vnf_instance_id}}",
- "vf-modules"
- ]
- }
- },
- "response": []
- },
- {
- "name": "preload for Virtual Link",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "62a8ef7f-7fde-423c-8b70-5f74bb6bc00b",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "0a3f6713-ba96-4971-a6f8-c2da85a3176e"
- },
- {
- "key": "X-FromAppId",
- "value": "API client"
- },
- {
- "key": "Authorization",
- "value": "Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"input\": {\n \"request-information\": {\n \"request-id\": \"postman001\",\n \"notification-url\": \"http://so.onap.org\",\n \"order-number\": \"postman001\",\n \"request-sub-action\": \"SUPP\",\n \"request-action\": \"PreloadNetworkRequest\",\n \"source\": \"postman\",\n \"order-version\": \"1.0\"\n },\n \"network-topology-information\": {\n \"network-policy\": [],\n \"route-table-reference\": [],\n \"vpn-bindings\": [],\n \"network-topology-identifier\": {\n \"network-role\": \"integration_test_net\",\n \"network-technology\": \"neutron\",\n \"service-type\": \"{{service}}\",\n \"network-name\": \"rr01\",\n \"network-type\": \"Generic NeutronNet\"\n },\n \"provider-network-information\": {\n \"is-external-network\": \"false\",\n \"is-provider-network\": \"false\",\n \"is-shared-network\": \"false\"\n },\n \"subnets\": [\n {\n\t\t \"subnet-name\": \"rr01\",\n\t\t \"subnet-role\": \"OAM\",\n \"start-address\": \"192.168.90.0\",\n \"cidr-mask\": \"24\",\n \"ip-version\": \"4\",\n \"dhcp-enabled\": \"Y\",\n\t\t \"dhcp-start-address\": \"\",\n\t\t \"dhcp-end-address\": \"\",\n \"gateway-address\": \"192.168.90.1\",\n\t\t \"host-routes\":[]\n }\n ]\n },\n \"sdnc-request-header\": {\n \"svc-action\": \"reserve\",\n \"svc-notification-url\": \"http://so.onap.org\",\n \"svc-request-id\": \"postman001\"\n }\n }\n}\n\n"
- },
- "url": {
- "raw": "{{url-sdnc}}/restconf/operations/VNF-API:preload-network-topology-operation",
- "host": [
- "{{url-sdnc}}"
- ],
- "path": [
- "restconf",
- "operations",
- "VNF-API:preload-network-topology-operation"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Instantiate Generic Neutron Virtual Link",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "030a6341-9ba8-4dc4-bbe4-9818c34d5f1c",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- "pm.environment.set(\"auto_virtual_link_instance_id\", \"\"+jsonData.requestReferences.instanceId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n\t\"requestDetails\": {\n\t\t\"requestInfo\": {\n\t\t\t\"instanceName\": \"{{virtual_link_instance_name}}\",\n\t\t\t\"source\": \"VID\",\n\t\t\t\"suppressRollback\": false,\n\t\t\t\"requestorId\": \"demo\"\n\t\t},\n \t\t\"modelInfo\": {\n \t\t\t\"modelType\": \"network\",\n \t\t\t\"modelInvariantId\": \"{{auto_virtual_link_invariant_uuid}}\",\n \t\t\t\"modelVersionId\": \"{{auto_virtual_link_uuid}}\",\n \t\t\t\"modelName\": \"Generic NeutronNet\",\n \t\t\t\"modelVersion\": \"1.0\",\n \t\t\t\"modelCustomizationId\": \"{{auto_virtual_link_customization_uuid}}\",\n \t\t\t\"modelCustomizationName\": \"Generic NeutronNet 0\"\n \t\t},\n \"requestParameters\": {\n \"userParams\": [],\n \"aLaCarte\": true,\n\t \"testApi\": \"VNF_API\"\n },\n \"cloudConfiguration\": {\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"tenantId\": \"{{tenant_id}}\"\n },\n\t\t\"lineOfBusiness\": {\n\t\t\t\"lineOfBusinessName\": \"{{lineofbusiness}}\"\n\t\t},\n\t\t\"platform\": {\n\t\t\t\"platformName\": \"{{platform}}\"\n\t\t},\n \t\t\"relatedInstanceList\": [{\n \t\t\t\"relatedInstance\": {\n \t\t\t\t\"instanceId\": \"{{auto_service_instance_id}}\",\n \t\t\t\t\"modelInfo\": {\n \t\t\t\t\t\"modelType\": \"service\",\n \t\t\t\t\t\"modelName\": \"{{service}}\",\n \t\t\t\t\t\"modelInvariantId\": \"{{auto_service_invariant_uuid}}\",\n \t\t\t\t\t\"modelVersion\": \"1.0\",\n \t\t\t\t\t\"modelVersionId\": \"{{auto_service_uuid}}\"\n \t\t\t\t}\n \t\t\t}\n \t\t}]\n }\n}\n\n"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/networks",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "networks"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ef3be415-7453-4d2d-91ce-de6e2df05dbe",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_virtual_link_instance_id\", \"\"+jsonData.request.instanceReferences.networkInstanceId+\"\");",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Network has been created successfully.\";"
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "8f7cbf38-cb21-45af-9648-1915c92bcb12",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET virtual link in AAI by id",
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/l3-networks/l3-network/{{auto_virtual_link_instance_id}}",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "l3-networks",
- "l3-network",
- "{{auto_virtual_link_instance_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET subnets of network AAI by id",
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/l3-networks/l3-network/{{auto_virtual_link_instance_id}}/subnets",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "l3-networks",
- "l3-network",
- "{{auto_virtual_link_instance_id}}",
- "subnets"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO Instantiate service",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "e779f618-dd36-474f-802d-a36abdf69708",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- "pm.environment.set(\"auto_service_instance_id\", \"\"+jsonData.requestReferences.instanceId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n\t\"requestDetails\": {\n\t\t\"requestInfo\": {\n\t\t\t\"instanceName\": \"{{service_instance_name}}\",\n\t\t\t\"source\": \"VID\",\n\t\t\t\"suppressRollback\": false,\n\t\t\t\"requestorId\": \"demo\"\n\t\t},\n\t\t\"modelInfo\": {\n\t\t\t\"modelType\": \"service\",\n\t\t\t\"modelInvariantId\": \"{{auto_service_invariant_uuid}}\",\n\t\t\t\"modelVersionId\": \"{{auto_service_uuid}}\",\n\t\t\t\"modelName\": \"{{service}}\",\n\t\t\t\"modelVersion\": \"1.0\"\n\t\t},\n \"cloudConfiguration\": {\n \"tenantId\": \"{{tenant_id}}\",\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\"\n },\n\t\t\"requestParameters\": {\n\t\t\t\"userParams\": [],\n\t\t\t\"testApi\": \"VNF_API\",\n\t\t\t\"subscriptionServiceType\": \"{{service}}\",\n\t\t\t\"aLaCarte\": true\n\t\t},\n\t\t\"subscriberInfo\": {\n\t\t\t\"globalSubscriberId\": \"{{customer_name}}\"\n\t\t},\n\t\t\"project\": {\n\t\t\t\"projectName\": \"{{project}}\"\n\t\t},\n\t\t\"owningEntity\": {\n\t\t\t\"owningEntityId\": \"{{auto_owning_entity_id}}\",\n\t\t\t\"owningEntityName\": \"{{owning_entity}}\"\n\t\t}\n\t}\n}"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "ef3be415-7453-4d2d-91ce-de6e2df05dbe",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_virtual_link_instance_id\", \"\"+jsonData.request.instanceReferences.networkInstanceId+\"\");",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Network has been created successfully.\";"
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "8f7cbf38-cb21-45af-9648-1915c92bcb12",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/11_delete_instances.postman_collection.json b/test/postman/11_delete_instances.postman_collection.json
deleted file mode 100644
index 9fc09b687..000000000
--- a/test/postman/11_delete_instances.postman_collection.json
+++ /dev/null
@@ -1,759 +0,0 @@
-{
- "info": {
- "_postman_id": "7e53b9c4-273b-4012-81f3-0634173626c2",
- "name": "11_delete_instances",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "SO Delete vf-module",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2c20c606-a034-47e0-8a66-fc4f1b8ed1c8",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "DELETE",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"requestDetails\": {\n \"requestInfo\": {\n \"source\": \"VID\",\n \"requestorId\": \"test\"\n },\n \"modelInfo\": {\n \"modelType\": \"vfModule\",\n \"modelInvariantId\": \"{{auto_vf_module_model_invariant_uuid}}\",\n \"modelVersionId\": \"{{auto_vf_module_model_uuid}}\",\n \"modelName\": \"{{auto_vf_module_model_name}}\",\n \"modelVersion\": \"1\",\n \"modelCustomizationId\": \"{{auto_vf_module_model_customization_uuid}}\",\n \"modelCustomizationName\": \"{{auto_vf_module_model_name}}\"\n },\n \"requestParameters\": {\n \"testApi\": \"VNF_API\"\n },\n \"cloudConfiguration\": {\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"tenantId\": \"{{tenant_id}}\"\n }\n }\n}"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/vnfs/{{auto_vnf_instance_id}}/vfModules/{{auto_vfmodule_instance_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "vnfs",
- "{{auto_vnf_instance_id}}",
- "vfModules",
- "{{auto_vfmodule_instance_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 30s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "2ca20cdd-fbb0-46a3-b6af-eb36d388448c",
- "exec": [
- "",
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Vf Module has been deleted successfully.\";"
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "0fc6bd97-ca45-4caa-bdc5-346d661a5409",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check vfmodule instance in inventory via AAI request",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "6244395e-23bd-4d4b-9f31-cd9b1e4102f6",
- "exec": [
- "",
- "var jsonData = pm.response.json();",
- "",
- "",
- "var vfmodule_instance_found = false;",
- "",
- "if (pm.response.code === 200){",
- " for (var i = 0; i < jsonData[\"vf-module\"].length; i++) { ",
- " if (jsonData[\"vf-module\"][i][\"vf-module-name\"] === pm.environment.get(\"vfmodule_instance_name\")) {",
- " vfmodule_instance_found = true;",
- " }",
- " }",
- "}",
- "",
- "",
- "tests[\"vfmodule Instance : \"+ pm.environment.get(\"vfmodule_instance_name\") +\" no longer exists in AAI inventory\"] = vfmodule_instance_found === false;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/generic-vnfs/generic-vnf/{{auto_vnf_instance_id}}/vf-modules",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "generic-vnfs",
- "generic-vnf",
- "{{auto_vnf_instance_id}}",
- "vf-modules"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO Delete vnf_instance",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "eab54e34-7898-452e-ad8e-6ac68c3efd36",
- "exec": [
- "pm.test(\"Status code is 202\", function () {",
- " pm.response.to.have.status(202);",
- "});",
- "var jsonData = pm.response.json();",
- "",
- "pm.environment.set(\"auto_so_request_id\", \"\"+jsonData.requestReferences.requestId+\"\");",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "DELETE",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n\t\"requestDetails\": {\n\t\t\"requestInfo\": {\n\t\t\t\"source\": \"VID\",\n\t\t\t\"requestorId\": \"test\"\n\t\t},\n\t\t\"modelInfo\": {\n\t\t\t\"modelType\": \"vnf\",\n\t\t\t\"modelInvariantId\": \"{{auto_vf_invariant_uuid}}\",\n\t\t\t\"modelVersionId\": \"{{auto_vf_uuid}}\",\n\t\t\t\"modelName\": \"{{vf_name}}\",\n\t\t\t\"modelVersion\": \"1.0\",\n\t\t\t\"modelCustomizationId\": \"{{auto_vf_customization_id}}\",\n\t\t\t\"modelCustomizationName\": \"{{vf_name}} 0\"\n\t\t},\n\t\t\"requestParameters\": {\n\t\t\t\"testApi\": \"VNF_API\"\n\t\t},\n \"cloudConfiguration\": {\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"tenantId\": \"{{tenant_id}}\"\n }\n\t}\n}\n\n"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/vnfs/{{auto_vnf_instance_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "vnfs",
- "{{auto_vnf_instance_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO Delete network",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "eab54e34-7898-452e-ad8e-6ac68c3efd36",
- "exec": [
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "DELETE",
- "header": [
- {
- "key": "Accept",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Content-Type",
- "value": "application/json",
- "type": "text"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA==",
- "type": "text"
- },
- {
- "key": "X-ONAP-PartnerName",
- "value": "NBI",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": " {\n \t\"requestDetails\": {\n \t\t\"modelInfo\": {\n \t\t\t\"modelType\": \"network\"\n \t\t},\n \t\t\"requestInfo\": {\n \t\t\t\"source\": \"VID\",\n \t\t\t\"requestorId\": \"demo\"\n \t\t},\n \t\t\"requestParameters\": {\n \t\t\t\"testApi\": \"VNF_API\"\n \t\t},\n \"cloudConfiguration\": {\n \"cloudOwner\": \"{{cloud_owner_name}}\",\n \"lcpCloudRegionId\": \"{{onap_cloud_region_id}}\",\n \"tenantId\": \"{{tenant_id}}\"\n }\n\t}\n}\n\n"
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/serviceInstantiation/v7/serviceInstances/{{auto_service_instance_id}}/networks/{{auto_virtual_link_instance_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "serviceInstantiation",
- "v7",
- "serviceInstances",
- "{{auto_service_instance_id}}",
- "networks",
- "{{auto_virtual_link_instance_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "SO orchestrationRequests after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "68a5392d-2904-416b-9cf0-a89c971aa341",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[\"requestState is : \"+jsonData.request.requestStatus.requestState] = jsonData.request.requestStatus.requestState === \"COMPLETE\";",
- "",
- "tests[\"statusMessage is : \"+jsonData.request.requestStatus.statusMessage] = jsonData.request.requestStatus.statusMessage === \"Vnf has been deleted successfully.\";"
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "0f9f067e-d8d4-4b69-a7eb-71f1e73b8196",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-so}}/onap/so/infra/orchestrationRequests/v7/{{auto_so_request_id}}",
- "host": [
- "{{url-so}}"
- ],
- "path": [
- "onap",
- "so",
- "infra",
- "orchestrationRequests",
- "v7",
- "{{auto_so_request_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "check vnf instance in inventory via AAI request",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "851c2dff-10fb-4f6a-a346-4bab48868d3a",
- "exec": [
- "",
- "var jsonData = pm.response.json();",
- "",
- "var vnf_instance_found = false;",
- "",
- "if (pm.response.code === 200){",
- "for (var i = 0; i < jsonData[\"generic-vnf\"].length; i++) { ",
- " if (jsonData[\"generic-vnf\"][i][\"vnf-name\"] === pm.environment.get(\"vnf_instance_name\")) {",
- " vnf_instance_found = true;",
- " }",
- "}",
- "}",
- "tests[\"VNF Instance : \"+ pm.environment.get(\"vnf_instance_name\") +\" no longer exists in AAI inventory\"] = vnf_instance_found === false;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/generic-vnfs",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "generic-vnfs"
- ]
- }
- },
- "response": []
- },
- {
- "name": "Delete service Instance via ServiceOrder API",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "764a92a8-efb5-48f8-999a-2cf34d9364b2",
- "exec": [
- "pm.test(\"Status code is 201\", function () {",
- " pm.response.to.have.status(201);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "pm.environment.set(\"auto_service_order_id\", jsonData.id);",
- "",
- "",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\n \"externalId\": \"{{externalId}}\",\n \"priority\": \"1\",\n \"description\": \"{{service}} ordering on generic customer via Postman\",\n \"category\": \"Consumer\",\n \"requestedStartDate\": \"2018-04-26T08:33:37.299Z\",\n \"requestedCompletionDate\": \"2018-04-26T08:33:37.299Z\",\n \"relatedParty\": [\n {\n \"id\": \"{{customer_name}}\",\n \"role\": \"ONAPcustomer\",\n \"name\": \"{{customer_name}}\"\n }\n ],\n \"orderItem\": [\n {\n \"id\": \"1\",\n \"action\": \"delete\",\n \"service\": {\n \"id\": \"{{auto_service_instance_id}}\",\n \"serviceState\": \"active\",\n \"serviceSpecification\": {\n \"id\": \"{{auto_service_id}}\"\n }\n }\n }\n ]\n}"
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/serviceOrder",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "serviceOrder"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET previous serviceOrder after 10s",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "de2c349d-b58d-42ae-8454-3afc1084cef1",
- "exec": [
- "pm.test(\"Status code is 200\", function () {",
- " pm.response.to.have.status(200);",
- "});",
- "",
- "var jsonData = pm.response.json();",
- "",
- "tests[pm.environment.get(\"externalId\")+\" exists\"] = jsonData.externalId === pm.environment.get(\"externalId\");",
- "tests[\"service_order_id exists\"] = jsonData.id === pm.environment.get(\"auto_service_order_id\");",
- "tests[\"service Order state is : \"+jsonData.state] = jsonData.state === \"completed\";",
- "tests[\"Service Instance state is : \"+jsonData.orderItem[0].service.serviceState] = jsonData.orderItem[0].service.serviceState === \"active\";",
- "",
- "",
- "if(jsonData.state == \"completed\"){",
- " pm.environment.set(\"auto_service_instance_id\", \"\"+jsonData.orderItem[0].service.id+\"\");",
- "}",
- "if (jsonData.orderMessage !== null){",
- " tests[\"order message is : \" +jsonData.orderMessage[0].messageInformation] = jsonData.orderMessage[0].messageInformation === \"\";",
- " pm.environment.set(\"auto_service_messageInformation\", \"\"+jsonData.orderMessage[0].messageInformation+\"\");",
- " }",
- "",
- ""
- ],
- "type": "text/javascript"
- }
- },
- {
- "listen": "prerequest",
- "script": {
- "id": "b9cdde4f-bf25-4922-a8c0-b378bb0b9535",
- "exec": [
- " var date = new Date();",
- " var curDate = null;",
- " do {",
- " curDate = new Date();",
- " }",
- " while (curDate - date < 10000);"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/serviceOrder/{{auto_service_order_id}}",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "serviceOrder",
- "{{auto_service_order_id}}"
- ]
- }
- },
- "response": []
- },
- {
- "name": "GET all service instance via ServiceInventory API",
- "event": [
- {
- "listen": "test",
- "script": {
- "id": "c7353dda-5afc-4c1c-a9d2-06036a9c984c",
- "exec": [
- "var jsonData = pm.response.json();",
- "",
- "var service_instance_found = false;",
- "",
- "if (pm.response.code === 200){",
- "for (var i = 0; i < jsonData.length; i++) { ",
- " if (jsonData[i].name === pm.environment.get(\"service_instance_name\")) {",
- " service_instance_found = true;",
- " }",
- "}",
- "}",
- "tests[pm.environment.get(\"service_instance_name\")+\" no longer in list\"] = service_instance_found === false;"
- ],
- "type": "text/javascript"
- }
- }
- ],
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-nbi}}/nbi/api/v4/service?relatedParty.id={{customer_name}}",
- "host": [
- "{{url-nbi}}"
- ],
- "path": [
- "nbi",
- "api",
- "v4",
- "service"
- ],
- "query": [
- {
- "key": "relatedParty.id",
- "value": "{{customer_name}}"
- }
- ]
- }
- },
- "response": []
- },
- {
- "name": "DELETE Virtual Link",
- "request": {
- "method": "DELETE",
- "header": [
- {
- "key": "Accept",
- "value": "application/json"
- },
- {
- "key": "Content-Type",
- "value": "application/json"
- },
- {
- "key": "X-FromAppId",
- "value": "AAI"
- },
- {
- "key": "X-TransactionId",
- "value": "get_aai_subscr"
- },
- {
- "key": "Authorization",
- "value": "Basic QUFJOkFBSQ=="
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "{{url-aai}}/aai/v16/network/l3-networks/l3-network/{{auto_virtual_link_instance_id}}?resource-version=1564496333018",
- "host": [
- "{{url-aai}}"
- ],
- "path": [
- "aai",
- "v16",
- "network",
- "l3-networks",
- "l3-network",
- "{{auto_virtual_link_instance_id}}"
- ],
- "query": [
- {
- "key": "resource-version",
- "value": "1564496333018"
- }
- ]
- }
- },
- "response": []
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/12_NS_Instance.postman_collection.json b/test/postman/12_NS_Instance.postman_collection.json
deleted file mode 100644
index 78a1798b8..000000000
--- a/test/postman/12_NS_Instance.postman_collection.json
+++ /dev/null
@@ -1,234 +0,0 @@
-{
- "info": {
- "_postman_id": "305afb68-6632-49a7-8a6d-9d9d57699f6e",
- "name": "Ns_Instance",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "ns_instance",
- "item": [
- {
- "name": "ns_create",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"context\": {\r\n \"globalCustomerId\": \"global-customer-id-test1\",\r\n \"serviceType\": \"service-type-test1\"\r\n },\r\n \"csarId\": \"d5d678dc-80ef-461e-8630-d105f43b0a18\",\r\n \"nsName\": \"ns_vsn\",\r\n \"description\": \"description\"\r\n}"
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nslcm/v1/ns",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nslcm",
- "v1",
- "ns"
- ]
- }
- },
- "response": []
- },
- {
- "name": "ns_inst",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"additionalParamForNs\": {\r\n \"sdnControllerId\": \"2\"\r\n },\r\n \"locationConstraints\": [{\r\n \"vnfProfileId\": \"45711f40-3f43-415b-bb45-46e5c6940735\",\r\n \"locationConstraints\": {\r\n \"vimId\": \"CPE-DC_RegionOne\"\r\n }\r\n }]\r\n}\r\n"
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nslcm/v1/ns/f0b4c09f-c653-438a-b091-5218b0f806ec/instantiate",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nslcm",
- "v1",
- "ns",
- "f0b4c09f-c653-438a-b091-5218b0f806ec",
- "instantiate"
- ]
- }
- },
- "response": []
- },
- {
- "name": "ns_terminate",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"gracefulTerminationTimeout\": 600, \r\n \"terminationType\": \"FORCEFUL\"\r\n}"
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nslcm/v1/ns/f0b4c09f-c653-438a-b091-5218b0f806ec/terminate",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nslcm",
- "v1",
- "ns",
- "f0b4c09f-c653-438a-b091-5218b0f806ec",
- "terminate"
- ]
- },
- "description": "generate:\"jobId\": \"NS-terminate_ns-f0b4c09f-c653-438a-b091-5218b0f806ec-6f12ca34-f7a2-11e8-8c7e-02bf457e51f9\"\n\ninstid:f0b4c09f-c653-438a-b091-5218b0f806ec"
- },
- "response": []
- },
- {
- "name": "ns_delete",
- "request": {
- "method": "DELETE",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nslcm/v1/ns/f0b4c09f-c653-438a-b091-5218b0f806ec",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nslcm",
- "v1",
- "ns",
- "f0b4c09f-c653-438a-b091-5218b0f806ec"
- ]
- },
- "description": "ns/instid:f0b4c09f-c653-438a-b091-5218b0f806ec\nInstid after ns, fill in according to the actual situation"
- },
- "response": []
- },
- {
- "name": "ns_heal",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": "{\r\n \"vnfInstanceId\": \"044b705c-e44d-4733-ab64-987f46d9b567\",\r\n \"cause\": \"restartvm\",\r\n \"additionalParams\": {\r\n \"action\": \"restartvm\",\r\n \"actionvminfo\": {\r\n \"vmid\": \"1623cd25-ae6f-4880-8132-15914367e47b\",\r\n \"vduid\": \"\",\r\n \"vmname\": \"1623cd25-ae6f-4880-8132-15914367e47b\"\r\n }\r\n }"
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nslcm/v1/ns/5443e9f8-7993-479d-8d7a-34e422eabc8f/heal",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nslcm",
- "v1",
- "ns",
- "5443e9f8-7993-479d-8d7a-34e422eabc8f",
- "heal"
- ]
- }
- },
- "response": []
- }
- ],
- "event": [
- {
- "listen": "prerequest",
- "script": {
- "id": "debc308f-9109-41b1-adf3-9d288b2061e6",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- },
- {
- "listen": "test",
- "script": {
- "id": "0a37542a-0a02-4e84-970a-f8f32b0bf713",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- }
- ]
- }
- ]
-}
diff --git a/test/postman/13_Ns_package_onboard.postman_collection.json b/test/postman/13_Ns_package_onboard.postman_collection.json
deleted file mode 100644
index 0182c132c..000000000
--- a/test/postman/13_Ns_package_onboard.postman_collection.json
+++ /dev/null
@@ -1,223 +0,0 @@
-{
- "info": {
- "_postman_id": "ce6ead69-bd22-4d59-b711-d5acf1afd243",
- "name": "Ns_package_onboard",
- "description": "The VFC NS package onboard",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "ns",
- "item": [
- {
- "name": "msb_download",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/03c8e541-77a5-4594-b6a1-f955f6460dca/nsd_content",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "03c8e541-77a5-4594-b6a1-f955f6460dca",
- "nsd_content"
- ]
- },
- "description": "nsdinfo:03c8e541-77a5-4594-b6a1-f955f6460dca\nns_descriptors 后面跟nsdinfoid,根据实际情况填写"
- },
- "response": []
- },
- {
- "name": "msb_get",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/8acc4e46-08c1-46f3-a594-2910490930de",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "8acc4e46-08c1-46f3-a594-2910490930de"
- ]
- },
- "description": "pick up information:\n[\n {\n \"id\": \"609d8ba3-9a94-4cc7-b67d-2ece3ac6e185\",\n \"nsdId\": \"ffdddc5d-a44b-45ae-8fc3-e6551cce350f\",\n \"nsdName\": \"vcpe\",\n \"nsdVersion\": \"1.0.0\",\n \"nsdDesigner\": \"ONAP\",\n \"nsdInvariantId\": \"c3887fb3-d1b2-477f-86c4-fd641ee9a7cf\",\n \"vnfPkgIds\": [\n \"386d6fb2-e1d9-4152-9bd6-606e33594ade\",\n \"511fb4ff-6a10-4699-864f-f3e7ad9b34b3\",\n \"39eb297c-4988-4902-9c37-55e4fd5f1c38\",\n \"f660da08-cc7c-4a1f-8196-05d4db415e1d\",\n \"1109773b-0cab-4f11-a11e-0cc22b1a29a3\"\n ],\n \"pnfdInfoIds\": [],\n \"nestedNsdInfoIds\": [],\n \"nsdOnboardingState\": \"ONBOARDED\",\n \"onboardingFailureDetails\": null,\n \"nsdOperationalState\": \"ENABLED\",\n \"nsdUsageState\": \"NOT_IN_USE\",\n \"userDefinedData\": {},\n \"_links\": null\n }\n]"
- },
- "response": []
- },
- {
- "name": "msb_get_one",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/8acc4e46-08c1-46f3-a594-2910490930de",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "8acc4e46-08c1-46f3-a594-2910490930de"
- ]
- },
- "description": "nsdinfoid:8acc4e46-08c1-46f3-a594-2910490930de"
- },
- "response": []
- },
- {
- "name": "msb_delete",
- "request": {
- "method": "DELETE",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/03c8e541-77a5-4594-b6a1-f955f6460dca",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "03c8e541-77a5-4594-b6a1-f955f6460dca"
- ]
- },
- "description": "nsinstid:03c8e541-77a5-4594-b6a1-f955f6460dca"
- },
- "response": []
- },
- {
- "name": "msb_create",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "userDefinedData",
- "value": " \"key1\": \"value1\"",
- "type": "text"
- }
- ]
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors"
- ]
- }
- },
- "response": []
- },
- {
- "name": "msb_upload",
- "request": {
- "method": "PUT",
- "header": [],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "",
- "type": "file",
- "src": ""
- }
- ]
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/79ca81ec-10e0-44e4-bc85-ba968f345711/nsd_content",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "79ca81ec-10e0-44e4-bc85-ba968f345711",
- "nsd_content"
- ]
- }
- },
- "response": []
- }
- ]
- }
- ]
-}
diff --git a/test/postman/14_Vnf_package_onboard.postman_collection.json b/test/postman/14_Vnf_package_onboard.postman_collection.json
deleted file mode 100644
index ef7fe7395..000000000
--- a/test/postman/14_Vnf_package_onboard.postman_collection.json
+++ /dev/null
@@ -1,185 +0,0 @@
-{
- "info": {
- "_postman_id": "ce6ead69-bd22-4d59-b711-d5acf1afd243",
- "name": "Vnf_package_onboard",
- "description": "The VFC VNF package onboard",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "VNF",
- "item": [
- {
- "name": "msb_create",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json ",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "userDefinedData",
- "value": " \"key2\": \"value2\"",
- "type": "text"
- }
- ]
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages"
- ]
- }
- },
- "response": []
- },
- {
- "name": "msb_get",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages"
- ]
- },
- "description": "pick up information:\n[\n {\n \"id\": \"1109773b-0cab-4f11-a11e-0cc22b1a29a3\",\n \"vnfdId\": \"b1bb0ce7-2222-4fa7-95ed-4840d70a1101\",\n \"vnfProductName\": \"vcpe_vbng\",\n \"vnfSoftwareVersion\": \"1.0.0\",\n \"vnfdVersion\": \"1.0\",\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"ONBOARDED\",\n \"operationalState\": \"ENABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"_links\": null\n },\n {\n \"id\": \"386d6fb2-e1d9-4152-9bd6-606e33594ade\",\n \"vnfdId\": \"b1bb0ce7-2222-4fa7-95ed-4840d70a1100\",\n \"vnfProductName\": \"vcpe_infra\",\n \"vnfSoftwareVersion\": \"1.0.0\",\n \"vnfdVersion\": \"1.0\",\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"ONBOARDED\",\n \"operationalState\": \"ENABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"_links\": null\n },\n {\n \"id\": \"39eb297c-4988-4902-9c37-55e4fd5f1c38\",\n \"vnfdId\": \"0408f076-e6c0-4c82-9940-272fddbb82de\",\n \"vnfProductName\": \"vcpe_vgmux\",\n \"vnfSoftwareVersion\": \"1.0.0\",\n \"vnfdVersion\": \"1.0\",\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"ONBOARDED\",\n \"operationalState\": \"ENABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"_links\": null\n },\n {\n \"id\": \"511fb4ff-6a10-4699-864f-f3e7ad9b34b3\",\n \"vnfdId\": \"3fca3543-07f5-492f-812c-ed462e4f94f4\",\n \"vnfProductName\": \"vcpe_vgw\",\n \"vnfSoftwareVersion\": \"1.0.0\",\n \"vnfdVersion\": \"1.0\",\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"ONBOARDED\",\n \"operationalState\": \"ENABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"_links\": null\n },\n {\n \"id\": \"e924a06e-2550-4700-9e72-3a2db75925ac\",\n \"vnfdId\": null,\n \"vnfProductName\": null,\n \"vnfSoftwareVersion\": null,\n \"vnfdVersion\": null,\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"CREATED\",\n \"operationalState\": \"DISABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"userDefinedData\": {},\n \"_links\": null\n },\n {\n \"id\": \"f660da08-cc7c-4a1f-8196-05d4db415e1d\",\n \"vnfdId\": \"b1bb0ce7-2222-4fa7-95ed-4840d70a1102\",\n \"vnfProductName\": \"vcpe_vbrgemu\",\n \"vnfSoftwareVersion\": \"1.0.0\",\n \"vnfdVersion\": \"1.0\",\n \"softwareImages\": null,\n \"additionalArtifacts\": null,\n \"onboardingState\": \"ONBOARDED\",\n \"operationalState\": \"ENABLED\",\n \"usageState\": \"NOT_IN_USE\",\n \"_links\": null\n }\n]"
- },
- "response": []
- },
- {
- "name": "msb_get_one",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/ns_descriptors/03c8e541-77a5-4594-b6a1-f955f6460dca",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "ns_descriptors",
- "03c8e541-77a5-4594-b6a1-f955f6460dca"
- ]
- },
- "description": "Id:03c8e541-77a5-4594-b6a1-f955f6460dca\nAccording to obtaining different vnfinstid"
- },
- "response": []
- },
- {
- "name": "msb_delete",
- "request": {
- "method": "DELETE",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages/396b889f-529f-426a-97ab-65b00720b308",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages",
- "396b889f-529f-426a-97ab-65b00720b308"
- ]
- },
- "description": "Different deletion, different filling of vnfinstid"
- },
- "response": []
- },
- {
- "name": "msb_download",
- "protocolProfileBehavior": {
- "disableBodyPruning": true
- },
- "request": {
- "method": "GET",
- "header": [],
- "body": {},
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages/396b889f-529f-426a-97ab-65b00720b308/package_content",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages",
- "396b889f-529f-426a-97ab-65b00720b308",
- "package_content"
- ]
- },
- "description": "msb_download:\nget The acquired data needs to be written to the CSAR file, and there is no write operation here; \ nthe vnf package tested here does not exist.\n"
- },
- "response": []
- }
- ]
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/15_Pnf_package_onboard.postman_collection.json b/test/postman/15_Pnf_package_onboard.postman_collection.json
deleted file mode 100644
index 798a780e4..000000000
--- a/test/postman/15_Pnf_package_onboard.postman_collection.json
+++ /dev/null
@@ -1,244 +0,0 @@
-{
- "info": {
- "_postman_id": "ce6ead69-bd22-4d59-b711-d5acf1afd243",
- "name": "Pnf_package_onboard",
- "description": "The VFC pnf package onboard",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "PNF",
- "item": [
- {
- "name": "msb_create",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "userDefinedData",
- "value": " \"key3\": \"value3\"",
- "type": "text"
- }
- ]
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/pnf_descriptors",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "pnf_descriptors"
- ]
- },
- "description": "get data:\n{\n \"id\": \"844d254e-5dc8-4a28-82a8-694b6a60bee3\",\n \"pnfdOnboardingState\": \"CREATED\",\n \"pnfdUsageState\": \"NOT_IN_USE\",\n \"userDefinedData\": {},\n \"_links\": null\n}"
- },
- "response": []
- },
- {
- "name": "msb_get_one",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/pnf_descriptors/5478150b-493c-4a66-a808-05387ebab874",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "pnf_descriptors",
- "5478150b-493c-4a66-a808-05387ebab874"
- ]
- }
- },
- "response": []
- },
- {
- "name": "msb_get",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/pnf_descriptors",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "pnf_descriptors"
- ]
- },
- "description": "get data:\n[\n {\n \"id\": \"844d254e-5dc8-4a28-82a8-694b6a60bee3\",\n \"pnfdId\": null,\n \"pnfdName\": null,\n \"pnfdVersion\": null,\n \"pnfdProvider\": null,\n \"pnfdInvariantId\": null,\n \"pnfdOnboardingState\": \"CREATED\",\n \"onboardingFailureDetails\": null,\n \"pnfdUsageState\": \"NOT_IN_USE\",\n \"userDefinedData\": {},\n \"_links\": null\n }\n]"
- },
- "response": []
- },
- {
- "name": "msb_delete",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/nsd/v1/pnf_descriptors/03c39754-46e7-44bd-ad4a-9a6de771d8bd",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "nsd",
- "v1",
- "pnf_descriptors",
- "03c39754-46e7-44bd-ad4a-9a6de771d8bd"
- ]
- }
- },
- "response": []
- },
- {
- "name": "msb_upload",
- "request": {
- "method": "PUT",
- "header": [],
- "body": {
- "mode": "formdata",
- "formdata": [
- {
- "key": "",
- "type": "file",
- "src": ""
- }
- ]
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages/38037a12-a0d4-4aa4-ac50-cd6b05ce0b24/package_content",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages",
- "38037a12-a0d4-4aa4-ac50-cd6b05ce0b24",
- "package_content"
- ]
- },
- "description": "Invalid vnf package, need to change"
- },
- "response": []
- },
- {
- "name": "msb_download",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/vnfpkgm/v1/vnf_packages/396b889f-529f-426a-97ab-65b00720b308/package_content",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "vnfpkgm",
- "v1",
- "vnf_packages",
- "396b889f-529f-426a-97ab-65b00720b308",
- "package_content"
- ]
- },
- "description": "The acquired data needs to be written to the CSAR file. Not added here. Get data only"
- },
- "response": []
- }
- ],
- "event": [
- {
- "listen": "prerequest",
- "script": {
- "id": "7ed5956a-ebaa-4de8-b2d2-a18027de85f7",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- },
- {
- "listen": "test",
- "script": {
- "id": "d9830571-314d-49ee-a2a8-8be045a51469",
- "type": "text/javascript",
- "exec": [
- ""
- ]
- }
- }
- ]
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/16_Catalog_package_onboard.postman_collection.json b/test/postman/16_Catalog_package_onboard.postman_collection.json
deleted file mode 100644
index 70e7ed9e6..000000000
--- a/test/postman/16_Catalog_package_onboard.postman_collection.json
+++ /dev/null
@@ -1,163 +0,0 @@
-{
- "info": {
- "_postman_id": "ce6ead69-bd22-4d59-b711-d5acf1afd243",
- "name": "Catalog_package_onboard",
- "description": "The VFC CATALOG package onboard",
- "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
- },
- "item": [
- {
- "name": "catalog",
- "item": [
- {
- "name": "nspackage_rc_get",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/catalog/v1/nspackages",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "catalog",
- "v1",
- "nspackages"
- ]
- }
- },
- "response": []
- },
- {
- "name": "nspackage_rc_post",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json ",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/catalog/v1/nspackages/4a43c1d7-29e9-4b67-968d-aa62c4562286",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "catalog",
- "v1",
- "nspackages",
- "4a43c1d7-29e9-4b67-968d-aa62c4562286"
- ]
- },
- "description": "Data is csarid. This test failed due to environmental problems. Casrid is just looking for it. Need to be replaced during actual test"
- },
- "response": []
- },
- {
- "name": "nfpackages_rc_get",
- "request": {
- "method": "GET",
- "header": [],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/catalog/v1/api/catalog/v1/vnfpackages",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "catalog",
- "v1",
- "api",
- "catalog",
- "v1",
- "vnfpackages"
- ]
- }
- },
- "response": []
- },
- {
- "name": "nfpackages_rc_post",
- "request": {
- "method": "POST",
- "header": [
- {
- "key": "content-type",
- "value": " application/json ",
- "type": "text"
- },
- {
- "key": "accept",
- "value": " application/json",
- "type": "text"
- }
- ],
- "body": {
- "mode": "raw",
- "raw": ""
- },
- "url": {
- "raw": "http://172.30.3.104:30280/api/catalog/v1/api/catalog/v1/vnfpackages/4a43c1d7-29e9-4b67-968d-aa62c4562286",
- "protocol": "http",
- "host": [
- "172",
- "30",
- "3",
- "104"
- ],
- "port": "30280",
- "path": [
- "api",
- "catalog",
- "v1",
- "api",
- "catalog",
- "v1",
- "vnfpackages",
- "4a43c1d7-29e9-4b67-968d-aa62c4562286"
- ]
- },
- "description": "The environment failed the test, and casrid was replaced according to the actual situation. Data is csarid"
- },
- "response": []
- }
- ]
- }
- ]
-} \ No newline at end of file
diff --git a/test/postman/globals.postman_globals.json b/test/postman/globals.postman_globals.json
deleted file mode 100644
index adf368f48..000000000
--- a/test/postman/globals.postman_globals.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "id": "146e52f0-fd32-4814-8e58-8a3c0f4d5eb7",
- "values": [],
- "name": "My Workspace Globals",
- "_postman_variable_scope": "globals",
- "_postman_exported_at": "2019-10-22T09:09:26.620Z",
- "_postman_exported_using": "Postman/6.7.5"
-} \ No newline at end of file
diff --git a/test/postman/images/collection-detail-test.png b/test/postman/images/collection-detail-test.png
deleted file mode 100755
index b64cd7ffa..000000000
--- a/test/postman/images/collection-detail-test.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/collection-detail.png b/test/postman/images/collection-detail.png
deleted file mode 100755
index 59b7db266..000000000
--- a/test/postman/images/collection-detail.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/collections.png b/test/postman/images/collections.png
deleted file mode 100755
index 4469cdb9d..000000000
--- a/test/postman/images/collections.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/globals.png b/test/postman/images/globals.png
deleted file mode 100755
index f3ae23978..000000000
--- a/test/postman/images/globals.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/import.png b/test/postman/images/import.png
deleted file mode 100755
index 122c867ce..000000000
--- a/test/postman/images/import.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/run.png b/test/postman/images/run.png
deleted file mode 100755
index 35c4d5b1f..000000000
--- a/test/postman/images/run.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/images/zipfile.png b/test/postman/images/zipfile.png
deleted file mode 100755
index a7cfa89ea..000000000
--- a/test/postman/images/zipfile.png
+++ /dev/null
Binary files differ
diff --git a/test/postman/integration_test_urls.postman_environment.json b/test/postman/integration_test_urls.postman_environment.json
deleted file mode 100644
index 1a134cb2c..000000000
--- a/test/postman/integration_test_urls.postman_environment.json
+++ /dev/null
@@ -1,410 +0,0 @@
-{
- "id": "0ab0cae5-3627-447a-a322-aca5a6e764b0",
- "name": "integration_test_urls",
- "values": [
- {
- "key": "url-sdc",
- "value": "http://sdc.api.fe.simpledemo.onap.org:30205",
- "enabled": true
- },
- {
- "key": "url-sdc2",
- "value": "http://sdc.api.fe.simpledemo.onap.org:30206",
- "enabled": true
- },
- {
- "key": "url-vid",
- "value": "http://vid.api.simpledemo.onap.org:30238",
- "enabled": true
- },
- {
- "key": "url-aai",
- "value": "https://aai.api.sparky.simpledemo.onap.org:30233",
- "enabled": true
- },
- {
- "key": "url-sdnc",
- "value": "https://sdnc.api.simpledemo.onap.org:30267",
- "enabled": true
- },
- {
- "key": "url-nbi",
- "value": "http://nbi.api.simpledemo.onap.org:30274/nbi/api/v4",
- "enabled": true
- },
- {
- "key": "url-so",
- "value": "http://so.api.simpledemo.onap.org:30277",
- "enabled": true
- },
- {
- "key": "vendor_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "vsp_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "vf_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "service",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "virtual_link_type",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "owning_entity_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "customer_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "complex_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "cloud_owner_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "onap_cloud_region_id",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "tenant_id",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "service_instance_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "vnf_instance_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "vfmodule_instance_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "availability_zone_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "hypervisor_type",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "platform",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "project",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "lineofbusiness",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "tenant_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "externalId",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "openstack_region_id",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "cloud_type",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "cloud_region_version",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "keystone_url",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "user_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "keystone_password",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "random_uuid",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "virtual_link_instance_name",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "service_name_macro",
- "value": "",
- "description": "",
- "enabled": true
- },
- {
- "key": "auto_vendor_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vendor_version_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vsp_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vsp_version_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vsp_version_name",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_invariant_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_unique_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_new_unique_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_invariant_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_unique_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_customization_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_name_for_model",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_invariant_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_unique_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_customization_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_name_for_model",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_new_unique_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_module_model_customization_name",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_module_model_customization_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_module_model_invariant_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_module_model_name",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vf_module_model_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_model_customization_uuid",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_distribution_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_customer_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_invariantUUID",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_resource_version",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_owning_entity_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_service_instance_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_so_request_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_virtual_link_instance_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vnf_instance_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "auto_vfmodule_instance_id",
- "value": "",
- "enabled": true
- },
- {
- "key": "owning_entity_id",
- "value": "",
- "description": "",
- "enabled": true
- }
- ],
- "_postman_variable_scope": "environment",
- "_postman_exported_at": "2019-10-22T15:18:32.148Z",
- "_postman_exported_using": "Postman/6.7.5"
-} \ No newline at end of file
diff --git a/test/s3p/collector/get_resource_stats.py b/test/s3p/collector/get_resource_stats.py
index 8ad22c575..db377a766 100755
--- a/test/s3p/collector/get_resource_stats.py
+++ b/test/s3p/collector/get_resource_stats.py
@@ -6,7 +6,6 @@ import datetime
import collections
import re
import tzlocal
-from decimal import Decimal
sys.path.append('../util')
import docker_util
@@ -20,7 +19,7 @@ AAI2_IP = "10.0.1.2"
SO_IP = "10.0.5.1"
SDNC_IP = "10.0.7.1"
-def aai1():
+def aai1():
containers = docker_util.get_container_list(AAI1_IP)
run(AAI1_NAME, AAI1_IP, containers)
@@ -39,7 +38,7 @@ def sdnc():
def run(component, ip, containers):
cmd = ["ssh", "-i", "onap_dev"]
cmd.append("ubuntu@" + ip)
- cmd.append("sudo docker stats --no-stream")
+ cmd.append("sudo docker stats --no-stream")
for c in containers:
cmd.append(c)
ssh = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -80,7 +79,7 @@ def get_memory_number(s):
return f
file = open("resource.log", "w+")
-while True:
+while True:
so()
sdnc()
aai1()
diff --git a/test/s3p/generator/locustfile.py b/test/s3p/generator/locustfile.py
index 63031cd78..54fee1d3e 100644
--- a/test/s3p/generator/locustfile.py
+++ b/test/s3p/generator/locustfile.py
@@ -1,16 +1,15 @@
+import collections
+import datetime
+import fcntl
+import json
+import os
import random
import string
import time
-import datetime
-import sys
-import collections
-import json
+from decimal import Decimal
+
import tzlocal
-import os
-import fcntl
-import logging
from locust import HttpLocust, TaskSet, task
-from decimal import Decimal
class UserBehavior(TaskSet):
@@ -26,7 +25,7 @@ class UserBehavior(TaskSet):
self.init()
def init(self):
- pass
+ pass
def myconverter(self, o):
if isinstance(o, datetime.datetime):
@@ -34,16 +33,16 @@ class UserBehavior(TaskSet):
@task(1)
def create_service(self):
- # Post a E2E service instantiation request to SO
+ # Post a E2E service instantiation request to SO
method = "POST"
- url = self.base
- service_instance_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
- data = self.service_creation_body % service_instance_name
+ url = self.base
+ service_instance_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
+ data = self.service_creation_body % service_instance_name
- t1 = datetime.datetime.now(tzlocal.get_localzone())
+ t1 = datetime.datetime.now(tzlocal.get_localzone())
response = self.client.request(method, url, headers=self.headers, data=data)
- t2 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t2 - t1
+ t2 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t2 - t1
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -55,21 +54,21 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- serviceId = response.json()['service']['serviceId']
- operationId = response.json()['service']['operationId']
+ serviceId = response.json()['service']['serviceId']
+ operationId = response.json()['service']['operationId']
- # Get the request status
- method = "GET"
- url = self.base + "/" + serviceId + "/operations/" + operationId
- url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
- count = 1
- while count < 50:
- tt1 = datetime.datetime.now()
- response = self.client.request(method, url, name=url1, headers=self.headers)
- tt2 = datetime.datetime.now()
+ # Get the request status
+ method = "GET"
+ url = self.base + "/" + serviceId + "/operations/" + operationId
+ url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
+ count = 1
+ while count < 50:
+ tt1 = datetime.datetime.now()
+ response = self.client.request(method, url, name=url1, headers=self.headers)
+ tt2 = datetime.datetime.now()
delta = tt2 - tt1
- result = response.json()['operationStatus']['result']
- progress = response.json()['operationStatus']['progress']
+ result = response.json()['operationStatus']['result']
+ progress = response.json()['operationStatus']['progress']
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -84,18 +83,18 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- if result == "finished" or result == "error":
+ if result == "finished" or result == "error":
break
- else:
- time.sleep(1)
- count = count + 1
-
+ else:
+ time.sleep(1)
+ count = count + 1
+
if result == "finished":
result = "success"
else:
result = "failure"
- t3 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t3 - t1
+ t3 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t3 - t1
data = collections.OrderedDict()
data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
data['operation'] = "volte_create"
@@ -107,16 +106,16 @@ class UserBehavior(TaskSet):
os.fsync(self.operation_file)
fcntl.flock(self.operation_file, fcntl.LOCK_UN)
- self.delete_service(serviceId)
+ self.delete_service(serviceId)
def delete_service(self, serviceId):
- method = "DELETE"
- url = self.base + "/" + serviceId
- data = "{\"globalSubscriberId\":\"Demonstration\", \"serviceType\":\"vIMS\"}"
- t1 = datetime.datetime.now(tzlocal.get_localzone())
+ method = "DELETE"
+ url = self.base + "/" + serviceId
+ data = "{\"globalSubscriberId\":\"Demonstration\", \"serviceType\":\"vIMS\"}"
+ t1 = datetime.datetime.now(tzlocal.get_localzone())
response = self.client.request(method, url, name=self.base, headers=self.headers, data=data)
- t2 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t2 - t1
+ t2 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t2 - t1
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -128,20 +127,20 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- operationId = response.json()['operationId']
+ operationId = response.json()['operationId']
- # Get the request status
- method = "GET"
- url = self.base + "/" + serviceId + "/operations/" + operationId
- url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
- count = 1
- while count < 50:
- tt1 = datetime.datetime.now(tzlocal.get_localzone())
- response = self.client.request(method, url, name=url1, headers=self.headers)
- tt2 = datetime.datetime.now(tzlocal.get_localzone())
+ # Get the request status
+ method = "GET"
+ url = self.base + "/" + serviceId + "/operations/" + operationId
+ url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
+ count = 1
+ while count < 50:
+ tt1 = datetime.datetime.now(tzlocal.get_localzone())
+ response = self.client.request(method, url, name=url1, headers=self.headers)
+ tt2 = datetime.datetime.now(tzlocal.get_localzone())
delta = tt2 - tt1
- result = response.json()['operationStatus']['result']
- progress = response.json()['operationStatus']['progress']
+ result = response.json()['operationStatus']['result']
+ progress = response.json()['operationStatus']['progress']
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -156,18 +155,18 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- if result == "finished" or result == "error":
- break
- else:
- time.sleep(1)
- count = count + 1
-
+ if result == "finished" or result == "error":
+ break
+ else:
+ time.sleep(1)
+ count = count + 1
+
if result == "finished":
result = "success"
else:
result = "failure"
- t3 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t3 - t1
+ t3 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t3 - t1
data = collections.OrderedDict()
data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
data['operation'] = "volte_delete"
diff --git a/test/s3p/util/docker_util.py b/test/s3p/util/docker_util.py
index a0e688199..021e48c98 100644
--- a/test/s3p/util/docker_util.py
+++ b/test/s3p/util/docker_util.py
@@ -22,7 +22,7 @@ def get_container_list(ip):
Args:
param1 (str): host ip
- Returns:
+ Returns:
list of containers in string
"""
@@ -35,7 +35,7 @@ def get_container_list(ip):
containers = []
if result == []:
error = ssh.stderr.readlines()
- print error
+ print(error)
else:
for line in result:
token = line.decode('ascii').strip()
@@ -47,12 +47,12 @@ def get_container_list(ip):
def get_container_volume_size(ip, container):
"""
Get container total volume usage
- Args:
+ Args:
param1 (str): host ip
param2 (str): container name
-
+
Returns:
- float number in GB if the container has volume(s), None otherwise
+ float number in GB if the container has volume(s), None otherwise
"""
cmd = ['ssh', '-i', 'onap_dev']
@@ -65,7 +65,7 @@ def get_container_volume_size(ip, container):
total = None
if result == []:
error = ssh.stderr.readlines()
- print error
+ print(error)
else:
data = json.loads(result[0])
for entry in data:
@@ -83,10 +83,10 @@ def get_container_volume_size(ip, container):
def get_volume_size(ip, volume):
"""
Get a volume size
- Args:
+ Args:
param1 (str): host ip
param2 (str): volume name
-
+
Returns:
float number in GB
"""
@@ -98,7 +98,7 @@ def get_volume_size(ip, volume):
p2 = subprocess.Popen(['grep', volume], stdin=p1.stdout,
stdout=subprocess.PIPE)
p1.stdout.close()
- (output, err) = p2.communicate()
+ (output, err) = p2.communicate() # pylint: disable=W0612
size = output.split()[2]
return convert_to_GB(size)
@@ -109,7 +109,7 @@ def convert_to_GB(s):
Args:
param1 (str): volume size with unit
- Returns:
+ Returns:
float number representing volume size in GB
"""
@@ -122,5 +122,3 @@ def convert_to_GB(s):
d = round(Decimal(float(re.sub('[^0-9\\.]', '', s))
/ 1000000.0), 1)
return d
-
-
diff --git a/test/security/check_certificates/MANIFEST.in b/test/security/check_certificates/MANIFEST.in
new file mode 100644
index 000000000..02c7aaf32
--- /dev/null
+++ b/test/security/check_certificates/MANIFEST.in
@@ -0,0 +1 @@
+include check_certificates/templates/*.j2
diff --git a/test/security/check_certificates/check_certificates/__init__.py b/test/security/check_certificates/check_certificates/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/__init__.py
diff --git a/test/security/check_certificates/check_certificates/check_certificates_validity.py b/test/security/check_certificates/check_certificates/check_certificates_validity.py
new file mode 100644
index 000000000..5d19a7390
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/check_certificates_validity.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python3
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Orange, Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+# Check all the kubernetes pods, evaluate the certificate and build a
+# certificate dashboard.
+#
+# Dependencies:
+# See requirements.txt
+# The dashboard is based on bulma framework
+#
+# Environment:
+# This script should be run on the local machine which has network access to
+# the onap K8S cluster.
+# It requires k8s cluster config file on local machine
+# It requires also the ONAP IP provided through an env variable ONAP_IP
+# ONAP_NAMESPACE env variable is also considered
+# if not set we set it to onap
+# Example usage:
+# python check_certificates_validity.py
+# the summary html page will be generated where the script is launched
+"""
+Check ONAP certificates
+"""
+import argparse
+import logging
+import os
+import ssl
+import sys
+import OpenSSL
+from datetime import datetime
+from kubernetes import client, config
+from jinja2 import Environment, FileSystemLoader, select_autoescape
+from socket import * # pylint: disable=W0614
+
+# Set SSL timeout
+setdefaulttimeout(10)
+
+# Logger
+LOG_LEVEL = 'INFO'
+logging.basicConfig()
+LOGGER = logging.getLogger("Gating-Index")
+LOGGER.setLevel(LOG_LEVEL)
+CERT_MODES = ['nodeport', 'ingress', 'internal']
+EXP_CRITERIA_MIN = 30
+EXP_CRITERIA_MAX = 389
+EXPECTED_CERT_STRING = "C=US;O=ONAP;OU=OSAAF;CN=intermediateCA_9"
+EXPECTED_STRIMZI_CA_CERT_STRING = "O=io.strimzi;CN=cluster-ca v0"
+RESULT_PATH = "."
+
+
+# Get arguments
+parser = argparse.ArgumentParser()
+parser.add_argument(
+ '-m',
+ '--mode',
+ choices=CERT_MODES,
+ help='Mode (nodeport, ingress, internal). If not set all modes are tried',
+ default='nodeport')
+parser.add_argument(
+ '-i',
+ '--ip',
+ help='ONAP IP needed (for nodeport mode)',
+ default=os.environ.get('ONAP_IP'))
+parser.add_argument(
+ '-n',
+ '--namespace',
+ help='ONAP namespace',
+ default='onap')
+parser.add_argument(
+ '-d',
+ '--dir',
+ help='Result directory',
+ default=RESULT_PATH)
+
+args = parser.parse_args()
+
+# Get the ONAP namespace
+onap_namespace = args.namespace
+LOGGER.info("Verification of the %s certificates started", onap_namespace)
+
+# Create the target dir (in case it does not exist)
+if os.pardir not in args.dir:
+ os.makedirs(args.dir, exist_ok=True)
+
+# Nodeport specific section
+# Retrieve the kubernetes IP for mode nodeport
+if args.mode == "nodeport":
+ if args.ip is None:
+ LOGGER.error(
+ "In nodeport mode, the IP of the ONAP cluster is required." +
+ "The value can be set using -i option " +
+ "or retrieved from the ONAP_IP env variable")
+ exit(parser.print_usage())
+ try:
+ nodeports_xfail_list = []
+ with open('nodeports_xfail.txt', 'r') as f:
+ first_line = f.readline()
+ for line in f:
+ nodeports_xfail_list.append(
+ line.split(" ", 1)[0].strip('\n'))
+ LOGGER.info("nodeports xfail list: %s",
+ nodeports_xfail_list)
+ except KeyError:
+ LOGGER.error("Please set the environment variable ONAP_IP")
+ sys.exit(1)
+ except FileNotFoundError:
+ LOGGER.warning("Nodeport xfail list not found")
+
+# Kubernetes section
+# retrieve the candidate ports first
+if args.mode == "internal":
+ k8s_config = config.load_incluster_config()
+else:
+ k8s_config = config.load_kube_config()
+
+core = client.CoreV1Api()
+api_instance = client.NetworkingV1Api(
+ client.ApiClient(k8s_config))
+k8s_services = core.list_namespaced_service(onap_namespace).items
+k8s_ingress = api_instance.list_namespaced_ingress(onap_namespace).items
+
+
+def get_certifificate_info(host, port):
+ LOGGER.debug("Host: %s", host)
+ LOGGER.debug("Port: %s", port)
+ cert = ssl.get_server_certificate(
+ (host, port))
+ LOGGER.debug("get certificate")
+ x509 = OpenSSL.crypto.load_certificate(
+ OpenSSL.crypto.FILETYPE_PEM, cert)
+
+ LOGGER.debug("get certificate")
+ exp_date = datetime.strptime(
+ x509.get_notAfter().decode('ascii'), '%Y%m%d%H%M%SZ')
+ LOGGER.debug("Expiration date retrieved %s", exp_date)
+ issuer = x509.get_issuer().get_components()
+
+ issuer_info = ''
+ # format issuer nicely
+ for issuer_info_key, issuer_info_val in issuer:
+ issuer_info += (issuer_info_key.decode('utf-8') + "=" +
+ issuer_info_val.decode('utf-8') + ";")
+ cert_validity = False
+ if issuer_info[:-1] in [EXPECTED_CERT_STRING, EXPECTED_STRIMZI_CA_CERT_STRING]:
+ cert_validity = True
+
+ return {'expiration_date': exp_date,
+ 'issuer': issuer_info[:-1],
+ 'validity': cert_validity}
+
+
+def test_services(k8s_services, mode):
+ success_criteria = True # success criteria per scan
+ # looks for the certificates
+ node_ports_list = []
+ node_ports_ssl_error_list = []
+ node_ports_connection_error_list = []
+ node_ports_type_error_list = []
+ node_ports_reset_error_list = []
+
+ # for node ports and internal we consider the services
+ # for the ingress we consider the ingress
+ for service in k8s_services:
+ try:
+ for port in service.spec.ports:
+ # For nodeport mode, we consider
+ # - the IP of the cluster
+ # - spec.port.node_port
+ #
+ # For internal mode, we consider
+ # - spec.selector.app
+ # - spec.port.port
+ test_name = service.metadata.name
+ test_port = None
+ error_waiver = False # waiver per port
+ if mode == 'nodeport':
+ test_url = args.ip
+ test_port = port.node_port
+
+ # Retrieve the nodeport xfail list
+ # to consider SECCOM waiver if needed
+ if test_port in nodeports_xfail_list:
+ error_waiver = True
+ else: # internal mode
+ test_port = port.port
+ test_url = ''
+ # in Internal mode there are 2 types
+ # app
+ # app.kubernetes.io/name
+ try:
+ test_url = service.spec.selector['app']
+ except KeyError:
+ test_url = service.spec.selector['app.kubernetes.io/name']
+
+ if test_port is not None:
+ LOGGER.info(
+ "Look for certificate %s (%s:%s)",
+ test_name,
+ test_url,
+ test_port)
+ cert_info = get_certifificate_info(test_url, test_port)
+ exp_date = cert_info['expiration_date']
+ LOGGER.info("Expiration date retrieved %s", exp_date)
+ # calculate the remaining time
+ delta_time = (exp_date - datetime.now()).days
+
+ # Test criteria
+ if error_waiver:
+ LOGGER.info("Port found in the xfail list," +
+ "do not consider it for success criteria")
+ else:
+ if (delta_time < EXP_CRITERIA_MIN or
+ delta_time > EXP_CRITERIA_MAX):
+ success_criteria = False
+ if cert_info['validity'] is False:
+ success_criteria = False
+ # add certificate to the list
+ node_ports_list.append(
+ {'pod_name': test_name,
+ 'pod_port': test_port,
+ 'expiration_date': str(exp_date),
+ 'remaining_days': delta_time,
+ 'cluster_ip': service.spec.cluster_ip,
+ 'issuer': cert_info['issuer'],
+ 'validity': cert_info['validity']})
+ else:
+ LOGGER.debug("Port value retrieved as None")
+ except ssl.SSLError as e:
+ LOGGER.exception("Bad certificate for port %s" % port)
+ node_ports_ssl_error_list.append(
+ {'pod_name': test_name,
+ 'pod_port': test_port,
+ 'error_details': str(e)})
+ except ConnectionRefusedError as e:
+ LOGGER.exception("ConnectionrefusedError for port %s" % port)
+ node_ports_connection_error_list.append(
+ {'pod_name': test_name,
+ 'pod_port': test_port,
+ 'error_details': str(e)})
+ except TypeError as e:
+ LOGGER.exception("Type Error for port %s" % port)
+ node_ports_type_error_list.append(
+ {'pod_name': test_name,
+ 'pod_port': test_port,
+ 'error_details': str(e)})
+ except ConnectionResetError as e:
+ LOGGER.exception("ConnectionResetError for port %s" % port)
+ node_ports_reset_error_list.append(
+ {'pod_name': test_name,
+ 'pod_port': test_port,
+ 'error_details': str(e)})
+ except:
+ LOGGER.error("Unknown error")
+
+ # Create html summary
+ jinja_env = Environment(
+ autoescape=select_autoescape(['html']),
+ loader=FileSystemLoader('./templates'))
+ if args.mode == 'nodeport':
+ jinja_env.get_template('cert-nodeports.html.j2').stream(
+ node_ports_list=node_ports_list,
+ node_ports_ssl_error_list=node_ports_ssl_error_list,
+ node_ports_connection_error_list=node_ports_connection_error_list,
+ node_ports_type_error_list=node_ports_type_error_list,
+ node_ports_reset_error_list=node_ports_reset_error_list).dump(
+ '{}/certificates.html'.format(args.dir))
+ else:
+ jinja_env.get_template('cert-internal.html.j2').stream(
+ node_ports_list=node_ports_list,
+ node_ports_ssl_error_list=node_ports_ssl_error_list,
+ node_ports_connection_error_list=node_ports_connection_error_list,
+ node_ports_type_error_list=node_ports_type_error_list,
+ node_ports_reset_error_list=node_ports_reset_error_list).dump(
+ '{}/certificates.html'.format(args.dir))
+
+ return success_criteria
+
+
+def test_ingress(k8s_ingress, mode):
+ LOGGER.debug('Test %s mode', mode)
+ for ingress in k8s_ingress:
+ LOGGER.debug(ingress)
+ return True
+
+
+# ***************************************************************************
+# ***************************************************************************
+# start of the test
+# ***************************************************************************
+# ***************************************************************************
+test_status = True
+if args.mode == "ingress":
+ test_routine = test_ingress
+ test_param = k8s_ingress
+else:
+ test_routine = test_services
+ test_param = k8s_services
+
+LOGGER.info(">>>> Test certificates: mode = %s", args.mode)
+if test_routine(test_param, args.mode):
+ LOGGER.warning(">>>> Test PASS")
+else:
+ LOGGER.warning(">>>> Test FAIL")
+ test_status = False
+
+if test_status:
+ LOGGER.info(">>>> Test Check certificates PASS")
+else:
+ LOGGER.error(">>>> Test Check certificates FAIL")
+ sys.exit(1)
diff --git a/test/security/check_certificates/check_certificates/templates/base.html.j2 b/test/security/check_certificates/check_certificates/templates/base.html.j2
new file mode 100644
index 000000000..cbb4e4428
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/templates/base.html.j2
@@ -0,0 +1,231 @@
+{% macro color(failing, total) %}
+{% if failing == 0 %}
+is-success
+{% else %}
+{% if (failing / total) <= 0.1 %}
+is-warning
+{% else %}
+is-danger
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+{% macro percentage(failing, total) %}
+{{ ((total - failing) / total) | round }}
+{% endmacro %}
+
+{% macro statistic(resource_name, failing, total) %}
+{% set success = total - failing %}
+<div class="level-item has-text-centered">
+ <div>
+ <p class="heading">{{ resource_name | capitalize }}</p>
+ <p class="title">{{ success }}/{{ total }}</p>
+ <progress class="progress {{ color(failing, total) }}" value="{{ success }}" max="{{ total }}">{{ percentage(failing, total) }}</progress>
+ </div>
+ </div>
+{% endmacro %}
+
+{% macro pods_table(pods) %}
+<div id="pods" class="table-container">
+ <table class="table is-fullwidth is-striped is-hoverable">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Ready</th>
+ <th>Status</th>
+ <th>Reason</th>
+ <th>Restarts</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for pod in pods %}
+ <tr>
+ <td><a href="./pod-{{ pod.name }}.html" title="{{ pod.name }}">{{ pod.k8s.metadata.name }}</a></td>
+ {% if pod.init_done %}
+ <td>{{ pod.running_containers }}/{{ (pod.containers | length) }}</td>
+ {% else %}
+ <td>Init:{{ pod.runned_init_containers }}/{{ (pod.init_containers | length) }}</td>
+ {% endif %}
+ <td>{{ pod.k8s.status.phase }}</td>
+ <td>{{ pod.k8s.status.reason }}</td>
+ {% if pod.init_done %}
+ <td>{{ pod.restart_count }}</td>
+ {% else %}
+ <td>{{ pod.init_restart_count }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endmacro %}
+
+{% macro key_value_description_list(title, dict) %}
+<dt><strong>{{ title | capitalize }}:</strong></dt>
+<dd>
+ {% if dict %}
+ {% for key, value in dict.items() %}
+ {% if loop.first %}
+ <dl>
+ {% endif %}
+ <dt>{{ key }}:</dt>
+ <dd>{{ value }}</dd>
+ {% if loop.last %}
+ </dl>
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+</dd>
+{% endmacro %}
+
+{% macro description(k8s) %}
+<div class="container">
+ <h1 class="title is-1">Description</h1>
+ <div class="content">
+ <dl>
+ {% if k8s.spec.type %}
+ <dt><strong>Type:</strong></dt>
+ <dd>{{ k8s.spec.type }}</dd>
+ {% if (k8s.spec.type | lower) == "clusterip" %}
+ <dt><strong>Headless:</strong></dt>
+ <dd>{% if (k8s.spec.cluster_ip | lower) == "none" %}Yes{% else %}No{% endif %}</dd>
+ {% endif %}
+ {% endif %}
+ {{ key_value_description_list('Labels', k8s.metadata.labels) | indent(width=6) }}
+ {{ key_value_description_list('Annotations', k8s.metadata.annotations) | indent(width=6) }}
+ {% if k8s.spec.selector %}
+ {% if k8s.spec.selector.match_labels %}
+ {{ key_value_description_list('Selector', k8s.spec.selector.match_labels) | indent(width=6) }}
+ {% else %}
+ {{ key_value_description_list('Selector', k8s.spec.selector) | indent(width=6) }}
+ {% endif %}
+ {% endif %}
+ {% if k8s.phase %}
+ <dt><strong>Status:</strong></dt>
+ <dd>{{ k8s.phase }}</dd>
+ {% endif %}
+ {% if k8s.metadata.owner_references %}
+ <dt><strong>Controlled By:</strong></dt>
+ <dd>{{ k8s.metadata.owner_references[0].kind }}/{{ k8s.metadata.owner_references[0].name }}</dd>
+ {% endif %}
+ </dl>
+ </div>
+</div>
+{% endmacro %}
+
+{% macro pods_container(pods, parent, has_title=True) %}
+<div class="container">
+ {% if has_title %}
+ <h1 class="title is-1">Pods</h1>
+ {% endif %}
+ {% if (pods | length) > 0 %}
+ {{ pods_table(pods) | indent(width=2) }}
+ {% else %}
+ <div class="notification is-warning">{{ parent }} has no pods!</div>
+ {% endif %}
+</div>
+{% endmacro %}
+
+{% macro two_level_breadcrumb(title, name) %}
+<section class="section">
+ <div class="container">
+ <nav class="breadcrumb" aria-label="breadcrumbs">
+ <ul>
+ <li><a href="./index.html">Summary</a></li>
+ <li class="is-active"><a href="#" aria-current="page">{{ title | capitalize }} {{ name }}</a></li>
+ </ul>
+ </nav>
+ </div>
+</section>
+{% endmacro %}
+
+{% macro pod_parent_summary(title, name, failed_pods, pods) %}
+{{ summary(title, name, [{'title': 'Pod', 'failing': failed_pods, 'total': (pods | length)}]) }}
+{% endmacro %}
+
+{% macro number_ok(number, none_value, total=None) %}
+{% if number %}
+{% if total and number < total %}
+<span class="tag is-warning">{{ number }}</span>
+{% else %}
+{{ number }}
+{% endif %}
+{% else %}
+<span class="tag is-warning">{{ none_value }}</span>
+{% endif %}
+{% endmacro %}
+
+{% macro summary(title, name, statistics) %}
+<section class="hero is-light">
+ <div class="hero-body">
+ <div class="container">
+ <h1 class="title is-1">
+ {{ title | capitalize }} {{ name }} Summary
+ </h1>
+ <nav class="level">
+ {% for stat in statistics %}
+ {% if stat.total > 0 %}
+ {{ statistic(stat.title, stat.failing, stat.total) | indent(width=8) }}
+ {% endif %}
+ {% endfor %}
+ </nav>
+ </div>
+ </div>
+</section>
+{% endmacro %}
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Tests results - {% block title %}{% endblock %}</title>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.0/css/bulma.min.css">
+ <script defer src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"></script>
+ {% block more_head %}{% endblock %}
+ </head>
+ <body>
+ <nav class="navbar" role="navigation" aria-label="main navigation">
+ <div class="navbar-brand">
+ <a class="navbar-item" href="https://www.onap.org">
+ <img src="https://www.onap.org/wp-content/uploads/sites/20/2017/02/logo_onap_2017.png" width="234" height="50">
+ </a>
+
+ <a role="button" class="navbar-burger burger" aria-label="menu" aria-expanded="false" data-target="navbarBasicExample">
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ </a>
+ </div>
+
+ <div id="navbarBasicExample" class="navbar-menu">
+ <div class="navbar-start">
+ <a class="navbar-item">
+ Summary
+ </a>
+ </div>
+ </div>
+ </nav>
+
+ {% block content %}{% endblock %}
+
+ <footer class="footer">
+ <div class="container">
+ <div class="columns">
+ <div class="column">
+ <p class="has-text-grey-light">
+ <a href="https://bulma.io/made-with-bulma/">
+ <img src="https://bulma.io/images/made-with-bulma.png" alt="Made with Bulma" width="128" height="24">
+ </a>
+ </div>
+ <div class="column">
+ <a class="has-text-grey" href="https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status" style="border-bottom: 1px solid currentColor;">
+ Improve this page on Gitlab
+ </a>
+ </p>
+ </div>
+ </div>
+ </div>
+ </footer>
+ </body>
+</html>
diff --git a/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2 b/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2
new file mode 100644
index 000000000..f9049807a
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2
@@ -0,0 +1,129 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAP Certificates expiration page{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">ONAP Certificates</h1>
+<section class="section">
+ <div class="container">
+ <h3 class="subtitle">Cluster Internal Ports</h3>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Expected Expiration Date</th>
+ <th>Remaining Days</th>
+ <th>Root CA</th>
+ <th>Root CA Validity</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list %}
+ <tr {% if cert.remaining_days < 0 %} class="has-background-danger" {%elif cert.remaining_days < 30 %} class="has-background-warning" {%elif cert.remaining_days < 60 %} class="has-background-warning-light " {%elif cert.remaining_days > 389 %} class="has-background-warning-light" {%elif cert.remaining_days == 364 and cert.validity %} class="has-background-success-light" {% endif %}>
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.expiration_date }}</td>
+ <td>{{ cert.remaining_days }}</td>
+ <td>{{ cert.issuer }}</td>
+ <td>{% if cert.validity %}
+ <span class="icon is-large has-text-success">
+ <i class="fas fa-check-square"></i>
+ </span>
+ {% else %}
+ <span class="icon is-large has-text-danger">
+ <i class="fas fa-ban"></i>
+ </span>
+ {% endif %}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+ {% if node_ports_ssl_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Port SSL errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_ssl_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_connection_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports Connection errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_connection_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_list_type_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports ports Type Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list_type_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_reset_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports Connections Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_reset_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+{% endif %}
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/security/check_certificates/check_certificates/templates/cert-nodeports.html.j2 b/test/security/check_certificates/check_certificates/templates/cert-nodeports.html.j2
new file mode 100644
index 000000000..df37c3da9
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/templates/cert-nodeports.html.j2
@@ -0,0 +1,129 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAP Certificates expiration page{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">ONAP Certificates</h1>
+<section class="section">
+ <div class="container">
+ <h3 class="subtitle">Node ports</h3>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Expected Expiration Date</th>
+ <th>Remaining Days</th>
+ <th>Root CA</th>
+ <th>Root CA Validity</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list %}
+ <tr {% if cert.remaining_days < 0 %} class="has-background-danger" {%elif cert.remaining_days < 30 %} class="has-background-warning" {%elif cert.remaining_days < 60 %} class="has-background-warning-light " {%elif cert.remaining_days > 389 %} class="has-background-warning-light" {%elif cert.remaining_days == 364 and cert.validity %} class="has-background-success-light" {% endif %}>
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.expiration_date }}</td>
+ <td>{{ cert.remaining_days }}</td>
+ <td>{{ cert.issuer }}</td>
+ <td>{% if cert.validity %}
+ <span class="icon is-large has-text-success">
+ <i class="fas fa-check-square"></i>
+ </span>
+ {% else %}
+ <span class="icon is-large has-text-danger">
+ <i class="fas fa-ban"></i>
+ </span>
+ {% endif %}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+ {% if node_ports_ssl_error_list|length > 0 %}
+ <h3 class="subtitle">Node ports SSL errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_ssl_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_connection_error_list|length > 0 %}
+ <h3 class="subtitle">Node ports Connection errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_connection_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_list_type_error_list|length > 0 %}
+ <h3 class="subtitle">Node ports Type Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list_type_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_reset_error_list|length > 0 %}
+ <h3 class="subtitle">Node ports Connections Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_reset_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+{% endif %}
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/security/check_certificates/requirements.txt b/test/security/check_certificates/requirements.txt
new file mode 100644
index 000000000..15d50c44c
--- /dev/null
+++ b/test/security/check_certificates/requirements.txt
@@ -0,0 +1,3 @@
+pyopenssl
+kubernetes
+jinja2
diff --git a/test/security/check_certificates/setup.cfg b/test/security/check_certificates/setup.cfg
new file mode 100644
index 000000000..72966f837
--- /dev/null
+++ b/test/security/check_certificates/setup.cfg
@@ -0,0 +1,3 @@
+[metadata]
+name = check_certificates
+version = 0.1
diff --git a/test/security/check_certificates/setup.py b/test/security/check_certificates/setup.py
new file mode 100644
index 000000000..f5154f282
--- /dev/null
+++ b/test/security/check_certificates/setup.py
@@ -0,0 +1,5 @@
+import setuptools
+setuptools.setup(
+ setup_requires=['pbr', 'setuptools'],
+ pbr=True,
+ include_package_data=True)
diff --git a/test/security/check_certificates/test-requirements.txt b/test/security/check_certificates/test-requirements.txt
new file mode 100644
index 000000000..a0679b703
--- /dev/null
+++ b/test/security/check_certificates/test-requirements.txt
@@ -0,0 +1,6 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+coverage!=4.4,>=4.0 # Apache-2.0
+flake8 # MIT
+pylint # GPLv2
diff --git a/test/security/check_certificates/tox.ini b/test/security/check_certificates/tox.ini
new file mode 100644
index 000000000..2172bbc96
--- /dev/null
+++ b/test/security/check_certificates/tox.ini
@@ -0,0 +1,9 @@
+[tox]
+envlist = py3
+
+[testenv]
+deps =
+ -r{toxinidir}/requirements.txt
+
+[testenv:py3]
+commands = python {toxinidir}/setup.py test
diff --git a/test/security/check_for_http_endpoints.sh b/test/security/check_for_http_endpoints.sh
deleted file mode 100755
index 5c2ba20c3..000000000
--- a/test/security/check_for_http_endpoints.sh
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env bash
-
-# COPYRIGHT NOTICE STARTS HERE
-#
-# Copyright 2019 Samsung Electronics Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# COPYRIGHT NOTICE ENDS HERE
-
-# Check all ports exposed outside of kubernetes cluster looking for plain http
-# endpoints.
-#
-# Dependencies:
-# nmap
-# kubectl + config
-#
-# Return value: Number of discovered http ports
-# Output: List of pods exposing http endpoints
-#
-
-#Prerequisities commands list
-REQ_APPS=(kubectl nmap awk column sort paste grep wc)
-
-# Check for prerequisites apps
-for cmd in "${REQ_APPS[@]}"; do
- if ! [ -x "$(command -v "$cmd")" ]; then
- echo "Error: command $cmd is not installed"
- exit 1
- fi
-done
-
-if [ "$#" -lt 1 ]; then
- echo "Usage: $0 <k8s-namespace>"
- exit 1
-fi
-
-K8S_NAMESPACE=$1
-
-# Get both values on single call as this may get slow
-PORTS_SVCS=`kubectl get svc --namespace=$K8S_NAMESPACE -o go-template='{{range $item := .items}}{{range $port := $item.spec.ports}}{{if .nodePort}}{{.nodePort}}{{"\t"}}{{$item.metadata.name}}{{"\n"}}{{end}}{{end}}{{end}}' | column -t | sort -n`
-
-# Split port number and service name
-PORTS=`awk '{print $1}' <<<"$PORTS_SVCS"`
-SVCS=`awk '{print $2}' <<<"$PORTS_SVCS"`
-
-# Create a list in nmap-compatible format
-PORT_LIST=`tr "\\n" "," <<<"$PORTS" | sed 's/,$//'; echo ''`
-
-# Get IP addres of some cluster node
-K8S_NODE=`kubectl describe nodes \`kubectl get nodes | grep -v NAME | head -n 1 | awk '{print $1}'\` | grep external-ip | awk '{print $2}'`
-
-# perform scan
-SCAN_RESULT=`nmap $K8S_NODE -sV -p $PORT_LIST 2>/dev/null | grep \tcp`
-
-# Concatenate scan result with service name
-RESULTS=`paste <(printf %s "$SVCS") <(printf %s "$SCAN_RESULT") | column -t`
-
-# Find all plain http ports
-HTTP_PORTS=`grep http <<< "$RESULTS" | grep -v ssl/http`
-
-# Count them
-N_HTTP=`wc -l <<<"$HTTP_PORTS"`
-
-if [ "$N_HTTP" -gt 0 ]; then
- echo "$HTTP_PORTS"
-fi
-
-exit $N_HTTP
diff --git a/test/security/check_for_ingress_and_nodeports.py b/test/security/check_for_ingress_and_nodeports.py
new file mode 100755
index 000000000..e7950a0d7
--- /dev/null
+++ b/test/security/check_for_ingress_and_nodeports.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+# Check all node ports exposed outside of kubernetes cluster looking for plain http and https.
+# Check all ingress controller services exposed outside of the kubernetes cluster
+# looking for plain http and https. This script looks for K8S NodePorts and ingress services declared
+# in the K8S cluster configurations and check if service is alive or not.
+# Automatic detect nodeport or ingress protocol HTTP or HTTPS it also detect if particular service uses HTTPS
+# with self signed certificate (HTTPU).
+# Verbose option retrives HTTP header and prints it for each service
+#
+# To setup runtime environment execute:
+#
+# $ tox
+# $ source .tox/security/bin/activate
+#
+# Environment:
+# This script should be run on on a host with access to the Onap K8S cluster API.
+# It requires k8s cluster config file on local machine.
+#
+# Example usage:
+# Display exposed nodeport and ingress resources declared in the K8S cluster without scanning:
+# check_for_ingress_and_nodeports.py
+# Scan declared nodeports:
+# check_for_ingress_and_nodeports.py --scan-nodeport
+# Scan declared exposed ingress resources:
+# check_for_ingress_and_nodeports.py --scan-ingress
+
+from kubernetes import client, config
+import http.client
+import ssl
+import socket
+from enum import Enum
+import argparse
+import sys
+import colorama
+from colorama import Fore
+import urllib.parse
+from os import path
+
+""" List all nodeports """
+def list_nodeports(v1):
+ ret = {}
+ svc = v1.list_namespaced_service(K8S_NAMESPACE)
+ for i in svc.items:
+ if i.spec.ports:
+ ports = [ j.node_port for j in i.spec.ports if j.node_port ]
+ if ports:
+ ret[i.metadata.name] = ports
+ return ret
+
+# Class enum for returning current http mode
+class ScanMode(Enum):
+ HTTPS = 0 #Safe https
+ HTTPU = 1 #Unsafe https
+ HTTP = 2 #Pure http
+ def __str__(self):
+ return self.name
+
+#Read the ingress controller http and https ports from the kubernetes cluster
+def find_ingress_ports(v1):
+ svc = v1.list_namespaced_service(K8S_INGRESS_NS)
+ http_port = 0
+ https_port = 0
+ for item in svc.items:
+ if item.metadata.name == K8S_INGRESS_NS:
+ for pinfo in item.spec.ports:
+ if pinfo and pinfo.name == 'http':
+ http_port = pinfo.node_port
+ elif pinfo and pinfo.name == 'https':
+ https_port = pinfo.node_port
+
+ return http_port,https_port
+ else: return(80,443)
+
+# List all ingress devices
+def list_ingress(xv1b):
+ SSL_ANNOTATION = 'nginx.ingress.kubernetes.io/ssl-redirect'
+ inglist = xv1b.list_namespaced_ingress(K8S_NAMESPACE)
+ svc_list = {}
+ for ing in inglist.items:
+ svc_name = ing.metadata.labels['app']
+ arr = []
+ annotations = ing.metadata.annotations
+ for host in ing.spec.rules:
+ arr.append(host.host)
+ if (SSL_ANNOTATION in annotations) and annotations[SSL_ANNOTATION]=="true":
+ smode = ScanMode.HTTPS
+ else: smode = ScanMode.HTTP
+ svc_list[svc_name] = [ arr, smode ]
+ return svc_list
+
+# Scan single port
+def scan_single_port(host,port,scanmode):
+ ssl_unverified = ssl._create_unverified_context()
+ if scanmode==ScanMode.HTTP:
+ conn = http.client.HTTPConnection(host,port,timeout=10)
+ elif scanmode==ScanMode.HTTPS:
+ conn = http.client.HTTPSConnection(host,port,timeout=10)
+ elif scanmode==ScanMode.HTTPU:
+ conn = http.client.HTTPSConnection(host,port,timeout=10,context=ssl_unverified)
+ outstr = None
+ retstatus = False
+ try:
+ conn.request("GET","/")
+ outstr = conn.getresponse()
+ except http.client.BadStatusLine as exc:
+ outstr = "Non HTTP proto" + str(exc)
+ retstatus = exc
+ except ConnectionRefusedError as exc:
+ outstr = "Connection refused" + str(exc)
+ retstatus = exc
+ except ConnectionResetError as exc:
+ outstr = "Connection reset" + str(exc)
+ retstatus = exc
+ except socket.timeout as exc:
+ outstr = "Connection timeout" + str(exc)
+ retstatus = exc
+ except ssl.SSLError as exc:
+ outstr = "SSL error" + str(exc)
+ retstatus = exc
+ except OSError as exc:
+ outstr = "OS error" + str(exc)
+ retstatus = exc
+ conn.close()
+ return retstatus,outstr
+
+# Scan port
+def scan_portn(port):
+ host = urllib.parse.urlsplit(v1c.host).hostname
+ for mode in ScanMode:
+ retstatus, out = scan_single_port(host,port,mode)
+ if not retstatus:
+ result = port, mode, out.getcode(), out.read().decode('utf-8'),mode
+ break
+ else:
+ result = port, retstatus, out, None,mode
+ return result
+
+
+def scan_port(host, http, https, mode):
+ if mode==ScanMode.HTTP:
+ retstatus, out = scan_single_port(host,http,ScanMode.HTTP)
+ if not retstatus:
+ return host, ScanMode.HTTP, out.getcode(), out.read().decode('utf-8'), mode
+ else:
+ return host, retstatus, out, None, mode
+ elif mode==ScanMode.HTTPS:
+ retstatus, out = scan_single_port(host,https,ScanMode.HTTPS)
+ if not retstatus:
+ return host, ScanMode.HTTPS, out.getcode(), out.read().decode('utf-8'), mode
+ else:
+ retstatus, out = scan_single_port(host,https,ScanMode.HTTPU)
+ if not retstatus:
+ return host, ScanMode.HTTPU, out.getcode(), out.read().decode('utf-8'), mode
+ else:
+ return host, retstatus, out, None, mode
+
+
+# Visualise scan result
+def console_visualisation(cname, name, retstatus, httpcode, out, mode, httpcodes = None):
+ if httpcodes is None: httpcodes=[]
+ print(Fore.YELLOW,end='')
+ print( cname,name, end='\t',sep='\t')
+ if isinstance(retstatus,ScanMode):
+ if httpcode in httpcodes: estr = Fore.RED + '[ERROR '
+ else: estr = Fore.GREEN + '[OK '
+ print( estr, retstatus, str(httpcode)+ ']'+Fore.RESET,end='')
+ if VERBOSE: print( '\t',str(out) )
+ else: print()
+ else:
+ if not out: out = str(retstatus)
+ print( Fore.RED, '[ERROR ' +str(mode) +']', Fore.RESET,'\t', str(out))
+
+# Visualize compare results
+def console_compare_visualisation(cname,d1,d2):
+ print(Fore.YELLOW,end='')
+ print(cname, end='\t',sep='\t')
+ if d1!=d2:
+ print(Fore.RED + '[ERROR] '+ Fore.RESET)
+ if d1[0]!=d2[0]:
+ print('\tCode:',d1[0],'!=',d2[0])
+ if d1[1]!=d2[1]:
+ print('\t******** Response #1 ********\n',d1[1])
+ print('\t******** Response #2 ********\n',d2[1])
+ else:
+ print(Fore.GREEN + '[OK ',d1[0],']', Fore.RESET,sep='')
+ if VERBOSE and d1[1]:
+ print(d1[1])
+
+
+# Port detector type
+def check_onap_ports():
+ print("Scanning onap NodePorts")
+ check_list = list_nodeports(v1)
+ if not check_list:
+ print(Fore.RED + 'Unable to find any declared node port in the K8S cluster', Fore.RESET)
+ for k,v in check_list.items():
+ for port in v:
+ console_visualisation(k,*scan_portn(port) )
+
+#Check ONAP ingress
+def check_onap_ingress():
+ print("Scanning onap ingress services")
+ ihttp,ihttps = find_ingress_ports(v1)
+ check_list = list_ingress(v1b)
+ if not check_list:
+ print(Fore.RED+ 'Unable to find any declared ingress service in the K8S cluster', Fore.RESET)
+ for k,v in check_list.items():
+ for host in v[0]:
+ console_visualisation(k,*scan_port(host,ihttp,ihttps,v[1]),httpcodes=[404])
+
+#Print onap all ingress ports and node ports
+def onap_list_all():
+ ihttp,ihttps = find_ingress_ports(v1)
+ host = urllib.parse.urlsplit(v1c.host).hostname
+ print( 'Cluster IP' + Fore.YELLOW, host, Fore.RESET )
+ print('Ingress ' + Fore.RED + 'HTTP' + Fore.RESET + ' port:',Fore.YELLOW, ihttp, Fore.RESET)
+ print('Ingress ' + Fore.RED + 'HTTPS' + Fore.RESET + ' port:',Fore.YELLOW, ihttps, Fore.RESET)
+ print(Fore.YELLOW+"Onap NodePorts list:",Fore.RESET)
+ check_list = list_nodeports(v1)
+ for name,ports in check_list.items():
+ print(Fore.GREEN, name,Fore.RESET,":", *ports)
+ print(Fore.YELLOW+"Onap ingress controler services list:",Fore.RESET)
+ check_list = list_ingress(v1b)
+ for name,hosts in check_list.items():
+ print(Fore.GREEN, name + Fore.RESET,":", *hosts[0], Fore.RED+':', hosts[1],Fore.RESET)
+
+#Scan and compare nodeports and ingress check for results
+def compare_nodeports_and_ingress():
+ ihttp,ihttps = find_ingress_ports(v1)
+ print('Scanning nodeport services ...')
+ check_list = list_nodeports(v1)
+ if not check_list:
+ print(Fore.RED + 'Unable to find any declared node port in the K8S cluster', Fore.RESET)
+ valid_results = {}
+ for k,v in check_list.items():
+ for port in v:
+ nodeport_results = scan_portn(port)
+ if isinstance(nodeport_results[1],ScanMode) and nodeport_results[2] != 404:
+ valid_results[k] = nodeport_results
+ if VERBOSE: console_visualisation(k,*nodeport_results)
+ check_list = list_ingress(v1b)
+ if not check_list:
+ print(Fore.RED+ 'Unable to find any declared ingress service in the K8S cluster', Fore.RESET)
+ print('Scanning ingress services ...')
+ ing_valid_results = {}
+ for k,v in check_list.items():
+ for host in v[0]:
+ ingress_results = scan_port(host,ihttp,ihttps,v[1])
+ if isinstance(ingress_results[1],ScanMode) and ingress_results[2]!=404:
+ ing_valid_results[k] = ingress_results
+ if VERBOSE: console_visualisation(k,*ingress_results,httpcodes=[404])
+ ks1 = set(valid_results.keys())
+ ks2 = set(ing_valid_results.keys())
+ diff_keys = (ks1 - ks2) | (ks2 - ks1)
+ common_keys = ks1 & ks2
+ if VERBOSE and diff_keys:
+ print(Fore.BLUE + '[WARNING] Non matching nodes and ingress list:')
+ for key in diff_keys: print(key,sep='\t')
+ print(Fore.RESET + 'Please check is it correct.')
+ print('Matching ingress and nodeport host scan results:')
+ for scan_key in common_keys:
+ s1 = valid_results[scan_key][2:4]
+ s2 = ing_valid_results[scan_key][2:4]
+ num_failures = 0
+ if s1!=s2: ++num_failures
+ console_compare_visualisation(scan_key,s1,s2)
+ return num_failures
+
+def kube_config_exists(conf):
+ try:
+ assert path.exists(conf)
+ except AssertionError:
+ raise argparse.ArgumentTypeError(f'Fatal! K8S config {conf} does not exist')
+ else:
+ return conf
+
+if __name__ == "__main__":
+ colorama.init()
+ parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ command_group = parser.add_mutually_exclusive_group()
+ command_group.add_argument("--scan-nodeport",
+ default=False, action='store_true',
+ help='Scan onap for node services'
+ )
+ command_group.add_argument("--scan-ingress",
+ default=False, action='store_true',
+ help='Scan onap for ingress services'
+ )
+ command_group.add_argument("--scan-and-compare",
+ default=False, action='store_true',
+ help='Scan nodeports and ingress and compare results'
+ )
+ parser.add_argument( "--namespace",
+ default='onap', action='store',
+ help = 'kubernetes onap namespace'
+ )
+ parser.add_argument( "--ingress-namespace",
+ default='ingress-nginx', action='store',
+ help = 'kubernetes ingress namespace'
+ )
+ parser.add_argument( "--conf",
+ default='~/.kube/config', action='store',
+ help = 'kubernetes config file',
+ type = kube_config_exists
+ )
+ parser.add_argument("--verbose",
+ default=False, action='store_true',
+ help='Verbose output'
+ )
+ args = parser.parse_args()
+ K8S_NAMESPACE = args.namespace
+ K8S_INGRESS_NS = args.ingress_namespace
+ VERBOSE = args.verbose
+ config.load_kube_config(config_file=args.conf)
+ v1 = client.CoreV1Api()
+ v1b = client.ExtensionsV1beta1Api()
+ v1c = client.Configuration()
+ if args.scan_nodeport: check_onap_ports()
+ elif args.scan_ingress: check_onap_ingress()
+ elif args.scan_and_compare: sys.exit(compare_nodeports_and_ingress())
+ else: onap_list_all()
diff --git a/test/security/check_for_jdwp.sh b/test/security/check_for_jdwp.sh
index e79f712bf..ec5b5cb16 100755
--- a/test/security/check_for_jdwp.sh
+++ b/test/security/check_for_jdwp.sh
@@ -28,68 +28,122 @@
# Return value: Number of discovered JDWP ports
# Output: List of pods and exposing JDWP interface
#
+usage() {
+ cat <<EOF
+Usage: $(basename $0) <k8s-namespace> [-l <white list file>]
+ -l: jdpw white list ports file
+EOF
+ exit ${1:-0}
+}
if [ "$#" -lt 1 ]; then
- echo "Usage: $0 <k8s-namespace>"
+ usage
exit 1
fi
K8S_NAMESPACE=$1
LOCAL_PORT=12543
+FILTERED_PORTS_LIST=$(mktemp jdpw_ports_XXXXXX)
+WL_RAW_FILE_PATH=$(mktemp raw_filtered_ports_XXXXXX)
+
+manage_white_list() {
+ # init filtered port list file
+ if [ ! -f $WL_FILE_PATH ];then
+ echo "File not found"
+ usage
+ fi
+ grep -o '^[^#]*' $WL_FILE_PATH > $WL_RAW_FILE_PATH
+}
+
+### getopts
+while :
+do
+ case $2 in
+ -h|--help|help) usage;;
+ -l) WL_FILE_PATH=$3;manage_white_list;shift;;
+ -*) usage 1 ;;
+ *) break ;;
+ esac
+done
list_pods() {
- kubectl get po --namespace=$K8S_NAMESPACE | grep Running | awk '{print $1}' | grep -v NAME
+ kubectl get po --namespace=$K8S_NAMESPACE | grep Running | awk '{print $1}' | grep -v NAME
}
do_jdwp_handshake() {
- local ip="127.0.0.1"
- local port=$1
- local jdwp_challenge="JDWP-Handshake\n"
- local jdwp_response="JDWP-Handshake"
-
- # 10s timeout to avoid hangs when service doesn't answer at all
- local response=`nc -w 10 $ip $port <<<$jdwp_challenge | tr '\0' '\n'`
- local n_response_lines=`echo "$response" | wc -l`
- if [[ "$n_response_lines" -le 1 ]] && [[ $response == *"$jdwp_response"* ]]; then
- return 0
- fi
-
- return 1
+ local ip="127.0.0.1"
+ local port=$1
+ local jdwp_challenge="JDWP-Handshake\n"
+ local jdwp_response="JDWP-Handshake"
+
+ # 10s timeout to avoid hangs when service doesn't answer at all
+ local response=`nc -w 10 $ip $port <<<$jdwp_challenge | tr '\0' '\n'`
+ local n_response_lines=`echo "$response" | wc -l`
+ if [[ "$n_response_lines" -le 1 ]] && [[ $response == *"$jdwp_response"* ]]; then
+ return 0
+ fi
+
+ return 1
}
# get open ports from procfs as netstat is not always available
get_open_ports_on_pod() {
- local pod=$1
- local open_ports_hex=`kubectl exec --namespace=$K8S_NAMESPACE $pod cat /proc/net/tcp 2>/dev/null| grep -v "local_address" | awk '{ print $2" "$4 }' | grep '0A$' | tr ":" " " | awk '{ print $2 }' | sort | uniq`
- for hex_port in $open_ports_hex; do
- echo $((16#$hex_port))
- done
+ local pod=$1
+ local open_ports_hex=`kubectl exec --namespace=$K8S_NAMESPACE $pod cat /proc/net/tcp 2>/dev/null| grep -v "local_address" | awk '{ print $2" "$4 }' | grep '0A$' | tr ":" " " | awk '{ print $2 }' | sort | uniq`
+ for hex_port in $open_ports_hex; do
+ echo $((16#$hex_port))
+ done
}
+echo "------------------------------------------------------------------------"
+# Display the waivers
+if [ -s $XL_FILE_PATH ]; then
+ echo "-------------------- *** WARNING XFail List *** ------------------------"
+ cat $WL_FILE_PATH
+ echo "------------------------------------------------------------------------"
+fi
+
N_PORTS=0
# go through all pods
for pod in `list_pods`; do
- open_ports=`get_open_ports_on_pod $pod`
- # if there is no open ports just go to next pod
- if [ -z "$open_ports" ]; then
- continue
- fi
-
- # let's setup a proxy and check every open port
- for port in $open_ports; do
- # run proxy
- kubectl port-forward --namespace=$K8S_NAMESPACE $pod $LOCAL_PORT:$port &>/dev/null &
- sleep 1
- proxy_pid=$!
-
- do_jdwp_handshake $LOCAL_PORT
- if [ $? -eq 0 ]; then
- echo $pod $port
- ((++N_PORTS))
- fi
- kill $proxy_pid 2>/dev/null
- wait $proxy_pid 2>/dev/null
- done
+ open_ports=`get_open_ports_on_pod $pod`
+ # if there is no open ports just go to next pod
+ if [ -z "$open_ports" ]; then
+ continue
+ fi
+
+ # let's setup a proxy and check every open port
+ for port in $open_ports; do
+ # run proxy
+ kubectl port-forward --namespace=$K8S_NAMESPACE $pod $LOCAL_PORT:$port &>/dev/null &
+ sleep 1
+ proxy_pid=$!
+
+ do_jdwp_handshake $LOCAL_PORT
+ if [ $? -eq 0 ]; then
+ echo $pod $port | tee $FILTERED_PORTS_LIST
+ ((++N_PORTS))
+ fi
+ kill $proxy_pid 2>/dev/null
+ wait $proxy_pid 2>/dev/null
+ done
done
-exit $N_PORTS
+while IFS= read -r line; do
+ # for each line we test if it is in the white list with a regular expression
+ while IFS= read -r wl_line; do
+ wl_name=$(echo $wl_line | awk {'print $1'})
+ wl_port=$(echo $wl_line | awk {'print $2'})
+ if grep -e $wl_name.*$wl_port <<< "$line";then
+ # Found in white list, exclude it
+ sed -i "/$line/d" $FILTERED_PORTS_LIST
+ fi
+ done < $WL_RAW_FILE_PATH
+done < $FILTERED_PORTS_LIST
+
+N_FILTERED_PORTS_LIST=$(cat $FILTERED_PORTS_LIST |wc -l)
+echo "------------------------------------"
+echo "Nb error pod(s): $N_FILTERED_PORTS_LIST"
+cat $FILTERED_PORTS_LIST
+
+exit $N_FILTERED_PORTS_LIST
diff --git a/test/security/check_for_nonssl_endpoints.sh b/test/security/check_for_nonssl_endpoints.sh
new file mode 100755
index 000000000..446792dea
--- /dev/null
+++ b/test/security/check_for_nonssl_endpoints.sh
@@ -0,0 +1,126 @@
+#!/usr/bin/env bash
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2019 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+# Check all ports exposed outside of kubernetes cluster looking for non-SSL
+# endpoints.
+#
+# Dependencies:
+# nmap
+# kubectl + config
+#
+# Return value: Number of discovered non-SSL ports
+# Output: List of pods exposing non-SSL endpoints
+#
+
+usage() {
+ cat <<EOF
+Usage: $(basename $0) <k8s-namespace> [-l <list of non-SSL endpoints expected to fail this test>]
+ -l: list of non-SSL endpoints expected to fail this test
+EOF
+ exit ${1:-0}
+}
+
+#Prerequisities commands list
+REQ_APPS=(kubectl nmap awk column sort paste grep wc mktemp sed cat)
+
+# Check for prerequisites apps
+for cmd in "${REQ_APPS[@]}"; do
+ if ! [ -x "$(command -v "$cmd")" ]; then
+ echo "Error: command $cmd is not installed"
+ exit 1
+ fi
+done
+
+if [ "$#" -lt 1 ]; then
+ usage 1
+fi
+
+K8S_NAMESPACE=$1
+FILTERED_PORTS_LIST=$(mktemp nonssl_endpoints_XXXXXX)
+XF_RAW_FILE_PATH=$(mktemp raw_filtered_nonssl_endpoints_XXXXXX)
+
+strip_white_list() {
+ if [ ! -f $XF_FILE_PATH ]; then
+ echo "File not found"
+ usage 1
+ fi
+ grep -o '^[^#]*' $XF_FILE_PATH > $XF_RAW_FILE_PATH
+}
+
+### getopts
+while :
+do
+ case $2 in
+ -h|--help|help) usage ;;
+ -l) XF_FILE_PATH=$3; strip_white_list; shift ;;
+ -*) usage 1 ;;
+ *) break ;;
+ esac
+done
+
+echo "------------------------------------------------------------------------"
+# Display the waivers
+if [ -s $XF_FILE_PATH ]; then
+ echo "-------------------- *** WARNING XFail List *** ------------------------"
+ cat $XF_FILE_PATH
+ echo "------------------------------------------------------------------------"
+fi
+
+# Get both values on single call as this may get slow
+PORTS_SVCS=`kubectl get svc --namespace=$K8S_NAMESPACE -o go-template='{{range $item := .items}}{{range $port := $item.spec.ports}}{{if .nodePort}}{{.nodePort}}{{"\t"}}{{$item.metadata.name}}{{"\n"}}{{end}}{{end}}{{end}}' | column -t | sort -n`
+
+# Split port number and service name
+PORTS=`awk '{print $1}' <<<"$PORTS_SVCS"`
+SVCS=`awk '{print $2}' <<<"$PORTS_SVCS"`
+
+# Create a list in nmap-compatible format
+PORT_LIST=`tr "\\n" "," <<<"$PORTS" | sed 's/,$//'; echo ''`
+
+# Get IP address of some cluster node (both "external-ip" and "ExternalIP" labels are matched)
+K8S_NODE=`kubectl describe nodes \`kubectl get nodes | grep -v NAME | head -n 1 | awk '{print $1}'\` | grep "external-ip\|ExternalIP" | awk '{print $2}'`
+
+# perform scan
+SCAN_RESULT=`nmap $K8S_NODE -sV -p $PORT_LIST 2>/dev/null | grep \tcp`
+
+# Concatenate scan result with service name
+RESULTS=`paste <(printf %s "$SVCS") <(printf %s "$SCAN_RESULT") | column -t`
+
+# Find all non-SSL ports
+HTTP_PORTS=`grep -v ssl <<< "$RESULTS" | grep open | tee "$FILTERED_PORTS_LIST"`
+
+# Filter out whitelisted endpoints
+while IFS= read -r line; do
+ # for each line we test if it is in the white list with a regular expression
+ while IFS= read -r wl_line; do
+ wl_name=$(echo $wl_line | awk {'print $1'})
+ wl_port=$(echo $wl_line | awk {'print $2'})
+ if grep -e $wl_name.*$wl_port <<< "$line"; then
+ # Found in white list, exclude it
+ sed -i "/^$wl_name.*$wl_port/d" $FILTERED_PORTS_LIST
+ fi
+ done < $XF_RAW_FILE_PATH
+done < $FILTERED_PORTS_LIST
+
+# Count them
+N_FILTERED_PORTS_LIST=$(cat $FILTERED_PORTS_LIST | wc -l)
+echo "------------------------------------"
+echo "Nb error pod(s): $N_FILTERED_PORTS_LIST"
+cat $FILTERED_PORTS_LIST
+exit $N_FILTERED_PORTS_LIST
diff --git a/test/security/check_versions/.gitignore b/test/security/check_versions/.gitignore
new file mode 100644
index 000000000..2b574f8c0
--- /dev/null
+++ b/test/security/check_versions/.gitignore
@@ -0,0 +1,4 @@
+.pytest_cache/
+__pycache__/
+/temp/
+/.tox/
diff --git a/test/security/check_versions/README.md b/test/security/check_versions/README.md
new file mode 100644
index 000000000..399d10443
--- /dev/null
+++ b/test/security/check_versions/README.md
@@ -0,0 +1,92 @@
+# Kubernetes Binaries Versions Inspector
+
+**Kubernetes Binaries Versions Inspector** (`k8s_bin_versions_inspector`) is a
+python module for verifying versions of CPython and OpenJDK binaries installed
+in the kubernetes cluster containers.
+
+## Commands
+
+### Install dependencies
+
+To install dependencies for normal usage of script, run this command.
+
+```bash
+pip3 install -r requirements.txt
+```
+
+### Code formatting
+
+```bash
+black src tests
+```
+
+### Code static analysis
+
+```bash
+pylint -d C0330 src
+```
+
+### Automatic tests
+
+To running the automated tests is required to have properly configured
+kubernetes cluster, which is in the virtual machine, that is containing
+development environment.
+
+```bash
+PYTHONPATH=src pytest -vv -s tests
+```
+
+### Removing caches
+
+```bash
+find -name __pycache__ -exec rm -Rf {} +
+find -name .pytest_cache -exec rm -Rf {} +
+```
+
+## Acceptable format
+
+Example of the acceptable file format:
+
+```yaml
+python:
+ - 3.6.9
+ - 3.7.3
+java:
+ - 11.0.7
+```
+
+## Paths research
+
+Commands to research for the paths
+of the software binaries in multiple docker images:
+
+```bash
+docker run --entrypoint /bin/sh python:buster -c "which python"
+docker run --entrypoint /bin/sh python:alpine -c "which python"
+docker run --entrypoint /bin/sh python:slim -c "which python"
+docker run --entrypoint /bin/sh python:2-buster -c "which python"
+docker run --entrypoint /bin/sh python:2-alpine -c "which python"
+docker run --entrypoint /bin/sh python:2-slim -c "which python"
+docker run --entrypoint /bin/sh ubuntu:bionic -c "apt-get update && apt-get install -y python && which python"
+docker run --entrypoint /bin/sh ubuntu:bionic -c "apt-get update && apt-get install -y python3 && which python3"
+docker run --entrypoint /bin/sh openjdk -c "type java"
+```
+
+## Todo
+
+List of features, that should be implemented:
+
+- Complete license and copyrights variables.
+- Find a way, to safe searching of the container files from Kubernetes API.
+- Parallelization of executing binaries on the single container.
+- Parallelization of versions determination in multiple containers.
+- Support for determination the old versions of OpenJDK (attribute `-version`).
+- Deleting namespace from cluster in development environment (for example,
+ during cluster reset), cause hanging in namespace terminating state.
+- Find a nicer way to extracting exit code from execution result.
+
+## Links
+
+- <https://github.com/kubernetes-client/python>
+- <https://github.com/kubernetes-client/python/issues/812>
+- <https://success.docker.com/article/kubernetes-namespace-stuck-in-terminating>
diff --git a/test/security/check_versions/pyproject.toml b/test/security/check_versions/pyproject.toml
new file mode 100644
index 000000000..2c235c7b8
--- /dev/null
+++ b/test/security/check_versions/pyproject.toml
@@ -0,0 +1,24 @@
+[project]
+name = "check_versions"
+readme = "README.md"
+version = "1.0"
+requires-python = ">=3.7"
+dependencies = [
+ "kubernetes",
+ "jinja2",
+ "xtesting",
+ "tabulate",
+ "cerberus",
+ "packaging",
+ "wget"
+]
+
+[build-system]
+requires = ["setuptools"]
+build-backend = "setuptools.build_meta"
+
+[project.entry-points."xtesting.testcase"]
+versions = "versions.k8s_bin_versions_inspector_test_case:Inspector"
+
+[tool.setuptools.package-data]
+versions = ["templates/*.j2"]
diff --git a/test/security/check_versions/requirements.txt b/test/security/check_versions/requirements.txt
new file mode 100644
index 000000000..8e46a3acf
--- /dev/null
+++ b/test/security/check_versions/requirements.txt
@@ -0,0 +1,7 @@
+kubernetes
+jinja2
+xtesting
+tabulate
+cerberus
+packaging
+wget
diff --git a/test/security/check_versions/tests/conftest.py b/test/security/check_versions/tests/conftest.py
new file mode 100644
index 000000000..7c3e2e171
--- /dev/null
+++ b/test/security/check_versions/tests/conftest.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import pytest
+
+
+def pod_name_trimmer_fun(pod_name):
+ return "-".join(pod_name.split("-")[:-2])
+
+
+@pytest.fixture
+def pod_name_trimmer():
+ return pod_name_trimmer_fun
diff --git a/test/security/check_versions/tests/test_gather_containers_informations.py b/test/security/check_versions/tests/test_gather_containers_informations.py
new file mode 100644
index 000000000..63401721e
--- /dev/null
+++ b/test/security/check_versions/tests/test_gather_containers_informations.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def test_gather_containers_informations(pod_name_trimmer):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.gather_containers_informations(api, "", False)
+ data = [
+ (
+ c.namespace,
+ pod_name_trimmer(c.pod),
+ c.container,
+ c.versions.python,
+ c.versions.java,
+ )
+ for c in containers
+ ]
+ sorted_data = sorted(data)
+ assert sorted_data == [
+ ("default", "kbvi-test-java-keycloak", "keycloak", [], ["11.0.8"]),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old", [], ["11.0.5"]),
+ (
+ "default",
+ "kbvi-test-java-keycloak-very-old",
+ "keycloak-very-old",
+ ["2.7.5"],
+ [],
+ ), # TODO
+ ("default", "kbvi-test-python-jupyter", "jupyter", ["3.8.4"], []),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old", ["3.6.6"], []),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat", ["2.7.5"], []),
+ ("default", "kbvi-test-terminated", "python", [], []), # TODO
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server", [], []),
+ ("kube-system", "kbvi-test-kube-system", "echo-server", [], []),
+ ]
diff --git a/test/security/check_versions/tests/test_list_all_containers.py b/test/security/check_versions/tests/test_list_all_containers.py
new file mode 100644
index 000000000..4178077c3
--- /dev/null
+++ b/test/security/check_versions/tests/test_list_all_containers.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def exec_list_all_containers(pod_name_trimmer, field_selector):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.list_all_containers(api, field_selector)
+ extracted = ((c.namespace, c.pod, c.container) for c in containers)
+ trimmed = ((n, pod_name_trimmer(p), c) for n, p, c in extracted)
+ result = sorted(trimmed)
+ return result
+
+
+def test_list_all_containers(pod_name_trimmer):
+ result = exec_list_all_containers(pod_name_trimmer, "")
+ assert result == [
+ ("default", "kbvi-test-java-keycloak", "keycloak"),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old"),
+ ("default", "kbvi-test-java-keycloak-very-old", "keycloak-very-old"),
+ ("default", "kbvi-test-python-jupyter", "jupyter"),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old"),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat"),
+ ("default", "kbvi-test-terminated", "python"),
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server"),
+ ("kube-system", "kbvi-test-kube-system", "echo-server"),
+ ]
+
+
+def test_list_all_containers_not_default(pod_name_trimmer):
+ field_selector = "metadata.namespace!=default"
+ result = exec_list_all_containers(pod_name_trimmer, field_selector)
+ assert result == [
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server"),
+ ("kube-system", "kbvi-test-kube-system", "echo-server"),
+ ]
+
+
+def test_list_all_containers_conjunction(pod_name_trimmer):
+ field_selector = "metadata.namespace!=kube-system,metadata.namespace!=ingress-nginx"
+ result = exec_list_all_containers(pod_name_trimmer, field_selector)
+ assert result == [
+ ("default", "kbvi-test-java-keycloak", "keycloak"),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old"),
+ ("default", "kbvi-test-java-keycloak-very-old", "keycloak-very-old"),
+ ("default", "kbvi-test-python-jupyter", "jupyter"),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old"),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat"),
+ ("default", "kbvi-test-terminated", "python"),
+ ]
diff --git a/test/security/check_versions/tests/test_main.py b/test/security/check_versions/tests/test_main.py
new file mode 100644
index 000000000..37ad45ee3
--- /dev/null
+++ b/test/security/check_versions/tests/test_main.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import json
+import tempfile
+import yaml
+
+
+def exec_main(pod_name_trimmer, acceptable_data):
+ with tempfile.NamedTemporaryFile() as output_temp, tempfile.NamedTemporaryFile() as acceptable_temp:
+ with open(acceptable_temp.name, "w") as stream:
+ yaml.safe_dump(acceptable_data, stream)
+
+ result = kbvi.main(
+ [
+ "--quiet",
+ "--output-file",
+ output_temp.name,
+ "--output-format",
+ "json",
+ "--acceptable",
+ acceptable_temp.name,
+ ]
+ )
+
+ with open(output_temp.name, "r") as stream:
+ output_data = json.load(stream)
+ output_extracted = (
+ (
+ item["namespace"],
+ pod_name_trimmer(item["pod"]),
+ item["container"],
+ item["versions"]["python"],
+ item["versions"]["java"],
+ )
+ for item in output_data
+ )
+ output_sorted = sorted(output_extracted)
+
+ assert output_sorted == [
+ ("default", "kbvi-test-java-keycloak", "keycloak", [], ["11.0.8"]),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old", [], ["11.0.5"]),
+ (
+ "default",
+ "kbvi-test-java-keycloak-very-old",
+ "keycloak-very-old",
+ ["2.7.5"],
+ [],
+ ),
+ ("default", "kbvi-test-python-jupyter", "jupyter", ["3.8.4"], []),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old", ["3.6.6"], []),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat", ["2.7.5"], []),
+ ("default", "kbvi-test-terminated", "python", [], []),
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server", [], []),
+ ("kube-system", "kbvi-test-kube-system", "echo-server", [], []),
+ ]
+
+ return result
+
+
+def test_main(pod_name_trimmer):
+ acceptable_data = {
+ "python": ["2.7.5", "3.6.6", "3.8.4"],
+ "java": ["11.0.5", "11.0.8"],
+ }
+
+ result = exec_main(pod_name_trimmer, acceptable_data)
+
+ assert result == 0
+
+
+def test_main_neg(pod_name_trimmer):
+ acceptable_data = {
+ "python": ["3.6.6", "3.8.4"],
+ "java": ["11.0.5", "11.0.8"],
+ }
+
+ result = exec_main(pod_name_trimmer, acceptable_data)
+
+ assert result == 1
diff --git a/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py b/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py
new file mode 100644
index 000000000..50620d3a7
--- /dev/null
+++ b/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def exec_sync_post_namespaced_pod_exec(pod, command):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.list_all_containers(api, "")
+ container = next(c for c in containers if c.pod.startswith(pod))
+ result = kbvi.sync_post_namespaced_pod_exec(api, container, command)
+ return result
+
+
+def test_sync_post_namespaced_pod_exec():
+ pod = "kbvi-test-python-jupyter"
+ result = exec_sync_post_namespaced_pod_exec(pod, "id")
+ assert result == {
+ "stdout": "uid=1000(jovyan) gid=100(users) groups=100(users)\n",
+ "stderr": "",
+ "error": {"status": "Success", "metadata": {}},
+ "code": 0,
+ }
+
+
+def test_sync_post_namespaced_pod_exec_not_running():
+ pod = "kbvi-test-terminated"
+ result = exec_sync_post_namespaced_pod_exec(pod, "id")
+ assert result == {"stdout": "", "stderr": "", "error": {}, "code": -1}
+
+
+def test_sync_post_namespaced_pod_exec_not_found():
+ pod = "kbvi-test-python-jupyter"
+ command = "/command/not/found"
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result["stdout"] == ""
+ assert result["stderr"] == ""
+ assert result["error"]["status"] == "Failure"
+ assert result["error"]["reason"] == "InternalError"
+ assert result["code"] == -2
+
+
+def test_sync_post_namespaced_pod_exec_exit_code():
+ pod = "kbvi-test-python-jupyter"
+ command = ["python3", "--invalid-attribute"]
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result == {
+ "stdout": "",
+ "stderr": "unknown option --invalid-attribute\n"
+ "usage: python3 [option] ... [-c cmd | -m mod | file | -] [arg] ...\n"
+ "Try `python -h' for more information.\n",
+ "error": {
+ "status": "Failure",
+ "reason": "NonZeroExitCode",
+ "message": "command terminated with non-zero exit code: error "
+ "executing command [python3 --invalid-attribute], exit code 2",
+ "details": {"causes": [{"message": "2", "reason": "ExitCode"}]},
+ "metadata": {},
+ },
+ "code": 2,
+ }
+
+
+def test_sync_post_namespaced_pod_exec_stderr():
+ pod = "kbvi-test-python-stderr-filebeat"
+ command = ["python", "--version"]
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result == {
+ "stdout": "",
+ "stderr": "Python 2.7.5\n",
+ "error": {"status": "Success", "metadata": {}},
+ "code": 0,
+ }
diff --git a/test/security/check_versions/tests/test_verify_versions_acceptability.py b/test/security/check_versions/tests/test_verify_versions_acceptability.py
new file mode 100644
index 000000000..1cb931679
--- /dev/null
+++ b/test/security/check_versions/tests/test_verify_versions_acceptability.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import yaml
+import tempfile
+import pathlib
+
+
+def exec_verify_versions_acceptability(containers):
+ config = {
+ "python": ["1.1.1", "2.2.2"],
+ "java": ["3.3.3"],
+ }
+
+ with tempfile.NamedTemporaryFile() as temp:
+ with open(temp.name, "w") as stream:
+ yaml.safe_dump(config, stream)
+ acceptable = pathlib.Path(temp.name)
+ result = kbvi.verify_versions_acceptability(containers, acceptable, True)
+
+ return result
+
+
+def test_verify_versions_acceptability():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions([], [])),
+ kbvi.ContainerInfo(
+ "a", "b", "c", None, kbvi.ContainerVersions(["1.1.1"], ["3.3.3"])
+ ),
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 0
+
+
+def test_verify_versions_acceptability_neg_1():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions(["3.3.3"], []))
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 1
+
+
+def test_verify_versions_acceptability_neg_2():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions([], ["1.1.1"]))
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 1
diff --git a/test/security/check_versions/tox.ini b/test/security/check_versions/tox.ini
new file mode 100644
index 000000000..d2a007160
--- /dev/null
+++ b/test/security/check_versions/tox.ini
@@ -0,0 +1,19 @@
+[tox]
+envlist = black, pylint, pytest
+skipsdist = true
+
+[testenv]
+basepython = python3.8
+deps = -r{toxinidir}/requirements.txt
+
+[testenv:black]
+commands = black {toxinidir}/versions tests
+deps = black
+
+[testenv:pylint]
+commands = pylint -d C0330,W0511 {toxinidir}/versions
+deps= pylint
+
+[testenv:pytest]
+setenv = PYTHONPATH = {toxinidir}/src
+commands = pytest -vv -s tests
diff --git a/test/security/check_versions/versions/__init__.py b/test/security/check_versions/versions/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/test/security/check_versions/versions/__init__.py
diff --git a/test/security/check_versions/versions/k8s_bin_versions_inspector.py b/test/security/check_versions/versions/k8s_bin_versions_inspector.py
new file mode 100644
index 000000000..bd3041d63
--- /dev/null
+++ b/test/security/check_versions/versions/k8s_bin_versions_inspector.py
@@ -0,0 +1,769 @@
+#!/usr/bin/env python3
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+# Copyright 2023 Deutsche Telekom AG
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+"""
+k8s_bin_versions_inspector is a module for verifying versions of CPython and
+OpenJDK binaries installed in the kubernetes cluster containers.
+"""
+
+__title__ = "k8s_bin_versions_inspector"
+__summary__ = (
+ "Module for verifying versions of CPython and OpenJDK binaries installed"
+ " in the kubernetes cluster containers."
+)
+__version__ = "0.1.0"
+__author__ = "kkkk.k@samsung.com"
+__license__ = "Apache-2.0"
+__copyright__ = "Copyright 2020 Samsung Electronics Co., Ltd."
+
+from typing import Iterable, List, Optional, Pattern, Union
+
+import argparse
+import dataclasses
+import itertools
+import json
+import logging
+import pathlib
+import pprint
+import re
+import string
+import sys
+from typing import Iterable, List, Optional, Pattern, Union
+import tabulate
+import yaml
+
+import kubernetes
+
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-inspector")
+LOGGER.setLevel("INFO")
+
+
+def parse_argv(argv: Optional[List[str]] = None) -> argparse.Namespace:
+ """Function for parsing command line arguments.
+
+ Args:
+ argv: Unparsed list of command line arguments.
+
+ Returns:
+ Namespace with values from parsed arguments.
+ """
+
+ epilog = (
+ f"Author: {__author__}\n"
+ f"License: {__license__}\n"
+ f"Copyright: {__copyright__}\n"
+ )
+
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawTextHelpFormatter,
+ prog=__title__,
+ description=__summary__,
+ epilog=epilog,
+ add_help=False,
+ )
+
+ parser.add_argument("-c", "--config-file", help="Name of the kube-config file.")
+
+ parser.add_argument(
+ "-s",
+ "--field-selector",
+ default="",
+ help="Kubernetes field selector, to filter out containers objects.",
+ )
+
+ parser.add_argument(
+ "-o",
+ "--output-file",
+ type=pathlib.Path,
+ help="Path to file, where output will be saved.",
+ )
+
+ parser.add_argument(
+ "-f",
+ "--output-format",
+ choices=("tabulate", "pprint", "json"),
+ default="tabulate",
+ help="Format of the output file (tabulate, pprint, json).",
+ )
+
+ parser.add_argument(
+ "-i",
+ "--ignore-empty",
+ action="store_true",
+ help="Ignore containers without any versions.",
+ )
+
+ parser.add_argument(
+ "-a",
+ "--acceptable",
+ type=pathlib.Path,
+ help="Path to YAML file, with list of acceptable software versions.",
+ )
+
+ parser.add_argument(
+ "-n",
+ "--namespace",
+ help="Namespace to use to list pods."
+ "If empty pods are going to be listed from all namespaces",
+ )
+
+ parser.add_argument(
+ "--check-istio-sidecar",
+ action="store_true",
+ help="Add if you want to check istio sidecars also",
+ )
+
+ parser.add_argument(
+ "--istio-sidecar-name",
+ default="istio-proxy",
+ help="Name of istio sidecar to filter out",
+ )
+
+ parser.add_argument(
+ "-d",
+ "--debug",
+ action="store_true",
+ help="Enable debugging mode in the k8s API.",
+ )
+
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ action="store_true",
+ help="Suppress printing text on standard output.",
+ )
+
+ parser.add_argument(
+ "-w",
+ "--waiver",
+ type=pathlib.Path,
+ help="Path of the waiver xfail file.",
+ )
+
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=f"{__title__} {__version__}",
+ help="Display version information and exit.",
+ )
+
+ parser.add_argument(
+ "-h", "--help", action="help", help="Display this help text and exit."
+ )
+
+ args = parser.parse_args(argv)
+
+ return args
+
+
+@dataclasses.dataclass
+class ContainerExtra:
+ "Data class, to storage extra informations about container."
+
+ running: bool
+ image: str
+ identifier: str
+
+
+@dataclasses.dataclass
+class ContainerVersions:
+ "Data class, to storage software versions from container."
+
+ python: list
+ java: list
+
+
+@dataclasses.dataclass
+class ContainerInfo:
+ "Data class, to storage multiple informations about container."
+
+ namespace: str
+ pod: str
+ container: str
+ extra: ContainerExtra
+ versions: ContainerVersions = None
+
+
+def is_container_running(
+ status: kubernetes.client.models.v1_container_status.V1ContainerStatus,
+) -> bool:
+ """Function to determine if k8s cluster container is in running state.
+
+ Args:
+ status: Single item from container_statuses list, that represents container status.
+
+ Returns:
+ If container is in running state.
+ """
+
+ if status.state.terminated:
+ return False
+
+ if status.state.waiting:
+ return False
+
+ if not status.state.running:
+ return False
+
+ return True
+
+
+def list_all_containers(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ field_selector: str,
+ namespace: Union[None, str],
+ check_istio_sidecars: bool,
+ istio_sidecar_name: str,
+) -> Iterable[ContainerInfo]:
+ """Get list of all containers names.
+
+ Args:
+ api: Client of the k8s cluster API.
+ field_selector: Kubernetes field selector, to filter out containers objects.
+ namespace: Namespace to limit reading pods from
+ check_istio_sidecars: Flag to enable/disable istio sidecars check.
+ Default to False
+ istio_sidecar_name: If checking istio sidecars is disabled the name to filter
+ containers out
+
+ Yields:
+ Objects for all containers in k8s cluster.
+ """
+
+ if namespace:
+ pods = api.list_namespaced_pod(namespace, field_selector=field_selector).items
+ else:
+ pods = api.list_pod_for_all_namespaces(field_selector=field_selector).items
+
+ # Filtering to avoid testing integration or replica pods
+ pods = [
+ pod
+ for pod in pods
+ if "replica" not in pod.metadata.name and "integration" not in pod.metadata.name
+ ]
+
+ containers_statuses = (
+ (pod.metadata.namespace, pod.metadata.name, pod.status.container_statuses)
+ for pod in pods
+ if pod.status.container_statuses
+ )
+
+ containers_status = (
+ itertools.product([namespace], [pod], statuses)
+ for namespace, pod, statuses in containers_statuses
+ )
+
+ containers_chained = itertools.chain.from_iterable(containers_status)
+
+ containers_fields = (
+ (
+ namespace,
+ pod,
+ status.name,
+ is_container_running(status),
+ status.image,
+ status.container_id,
+ )
+ for namespace, pod, status in containers_chained
+ )
+
+ container_items = (
+ ContainerInfo(
+ namespace, pod, container, ContainerExtra(running, image, identifier)
+ )
+ for namespace, pod, container, running, image, identifier in containers_fields
+ )
+
+ if not check_istio_sidecars:
+ container_items = filter(
+ lambda container: container.container != istio_sidecar_name, container_items
+ )
+
+ yield from container_items
+
+
+def sync_post_namespaced_pod_exec(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ container: ContainerInfo,
+ command: Union[List[str], str],
+) -> dict:
+ """Function to execute command on selected container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+ command: Command to execute as a list of arguments or single string.
+
+ Returns:
+ Dictionary that store informations about command execution.
+ * stdout - Standard output captured from execution.
+ * stderr - Standard error captured from execution.
+ * error - Error object that was received from kubernetes API.
+ * code - Exit code returned by executed process
+ or -1 if container is not running
+ or -2 if other failure occurred.
+ """
+
+ stdout = ""
+ stderr = ""
+ error = {}
+ code = -1
+ LOGGER.debug("sync_post_namespaced_pod_exec container= %s", container.pod)
+ try:
+ client_stream = kubernetes.stream.stream(
+ api.connect_post_namespaced_pod_exec,
+ namespace=container.namespace,
+ name=container.pod,
+ container=container.container,
+ command=command,
+ stderr=True,
+ stdin=False,
+ stdout=True,
+ tty=False,
+ _request_timeout=1.0,
+ _preload_content=False,
+ )
+ client_stream.run_forever(timeout=5)
+ stdout = client_stream.read_stdout()
+ stderr = client_stream.read_stderr()
+ error = yaml.safe_load(
+ client_stream.read_channel(kubernetes.stream.ws_client.ERROR_CHANNEL)
+ )
+
+ code = (
+ 0
+ if error["status"] == "Success"
+ else -2
+ if error["reason"] != "NonZeroExitCode"
+ else int(error["details"]["causes"][0]["message"])
+ )
+ except (
+ kubernetes.client.rest.ApiException,
+ kubernetes.client.exceptions.ApiException,
+ ):
+ LOGGER.debug("Discard unexpected k8s client Error..")
+ except TypeError:
+ LOGGER.debug("Type Error, no error status")
+ pass
+
+ return {
+ "stdout": stdout,
+ "stderr": stderr,
+ "error": error,
+ "code": code,
+ }
+
+
+def generate_python_binaries() -> List[str]:
+ """Function to generate list of names and paths for CPython binaries.
+
+ Returns:
+ List of names and paths, to CPython binaries.
+ """
+
+ dirnames = ["", "/usr/bin/", "/usr/local/bin/"]
+
+ majors_minors = [
+ f"{major}.{minor}" for major, minor in itertools.product("23", string.digits)
+ ]
+
+ suffixes = ["", "2", "3"] + majors_minors
+
+ basenames = [f"python{suffix}" for suffix in suffixes]
+
+ binaries = [f"{dir}{base}" for dir, base in itertools.product(dirnames, basenames)]
+
+ return binaries
+
+
+def generate_java_binaries() -> List[str]:
+ """Function to generate list of names and paths for OpenJDK binaries.
+
+ Returns:
+ List of names and paths, to OpenJDK binaries.
+ """
+
+ binaries = [
+ "java",
+ "/usr/bin/java",
+ "/usr/local/bin/java",
+ "/etc/alternatives/java",
+ "/usr/java/openjdk-14/bin/java",
+ ]
+
+ return binaries
+
+
+def determine_versions_abstraction(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ container: ContainerInfo,
+ binaries: List[str],
+ extractor: Pattern,
+) -> List[str]:
+ """Function to determine list of software versions, that are installed in
+ given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+ binaries: List of names and paths to the abstract software binaries.
+ extractor: Pattern to extract the version string from the output of the binary execution.
+
+ Returns:
+ List of installed software versions.
+ """
+
+ commands = ([binary, "--version"] for binary in binaries)
+ commands_old = ([binary, "-version"] for binary in binaries)
+ commands_all = itertools.chain(commands, commands_old)
+
+ # TODO: This list comprehension should be parallelized
+ results = (
+ sync_post_namespaced_pod_exec(api, container, command)
+ for command in commands_all
+ )
+
+ successes = (
+ f"{result['stdout']}{result['stderr']}"
+ for result in results
+ if result["code"] == 0
+ )
+
+ extractions = (extractor.search(success) for success in successes)
+
+ versions = sorted(
+ set(extraction.group(1) for extraction in extractions if extraction)
+ )
+
+ return versions
+
+
+def determine_versions_of_python(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api, container: ContainerInfo
+) -> List[str]:
+ """Function to determine list of CPython versions,
+ that are installed in given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+
+ Returns:
+ List of installed CPython versions.
+ """
+
+ extractor = re.compile("Python ([0-9.]+)")
+
+ binaries = generate_python_binaries()
+
+ versions = determine_versions_abstraction(api, container, binaries, extractor)
+
+ return versions
+
+
+def determine_versions_of_java(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api, container: ContainerInfo
+) -> List[str]:
+ """Function to determine list of OpenJDK versions,
+ that are installed in given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+
+ Returns:
+ List of installed OpenJDK versions.
+ """
+
+ extractor = re.compile('openjdk [version" ]*([0-9._]+)')
+
+ binaries = generate_java_binaries()
+
+ versions = determine_versions_abstraction(api, container, binaries, extractor)
+
+ return versions
+
+
+def gather_containers_informations(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ field_selector: str,
+ ignore_empty: bool,
+ namespace: Union[None, str],
+ check_istio_sidecars: bool,
+ istio_sidecar_name: str,
+) -> List[ContainerInfo]:
+ """Get list of all containers names.
+
+ Args:
+ api: Client of the k8s cluster API.
+ field_selector: Kubernetes field selector, to filter out containers objects.
+ ignore_empty: Determines, if containers with empty versions should be ignored.
+ namespace: Namespace to limit reading pods from
+ check_istio_sidecars: Flag to enable/disable istio sidecars check.
+ Default to False
+ istio_sidecar_name: If checking istio sidecars is disabled the name to filter
+ containers out
+
+ Returns:
+ List of initialized objects for containers in k8s cluster.
+ """
+
+ containers = list(
+ list_all_containers(
+ api, field_selector, namespace, check_istio_sidecars, istio_sidecar_name
+ )
+ )
+ LOGGER.info("List of containers: %s", containers)
+
+ # TODO: This loop should be parallelized
+ for container in containers:
+ LOGGER.info("Container -----------------> %s", container)
+ python_versions = determine_versions_of_python(api, container)
+ java_versions = determine_versions_of_java(api, container)
+ container.versions = ContainerVersions(python_versions, java_versions)
+ LOGGER.info("Container versions: %s", container.versions)
+
+ if ignore_empty:
+ containers = [c for c in containers if c.versions.python or c.versions.java]
+
+ return containers
+
+
+def generate_output_tabulate(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in tabulate format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by tabulate module.
+ """
+
+ headers = [
+ "Namespace",
+ "Pod",
+ "Container",
+ "Running",
+ "CPython",
+ "OpenJDK",
+ ]
+
+ rows = [
+ [
+ container.namespace,
+ container.pod,
+ container.container,
+ container.extra.running,
+ " ".join(container.versions.python),
+ " ".join(container.versions.java),
+ ]
+ for container in containers
+ ]
+
+ output = tabulate.tabulate(rows, headers=headers)
+
+ return output
+
+
+def generate_output_pprint(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in pprint format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by pprint module.
+ """
+
+ output = pprint.pformat(containers)
+
+ return output
+
+
+def generate_output_json(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in JSON format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by json module.
+ """
+
+ data = [
+ {
+ "namespace": container.namespace,
+ "pod": container.pod,
+ "container": container.container,
+ "extra": {
+ "running": container.extra.running,
+ "image": container.extra.image,
+ "identifier": container.extra.identifier,
+ },
+ "versions": {
+ "python": container.versions.python,
+ "java": container.versions.java,
+ },
+ }
+ for container in containers
+ ]
+
+ output = json.dumps(data, indent=4)
+
+ return output
+
+
+def generate_and_handle_output(
+ containers: List[ContainerInfo],
+ output_format: str,
+ output_file: pathlib.Path,
+ quiet: bool,
+) -> None:
+ """Generate and handle the output of the containers software versions.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+ output_format: String that will determine output format (tabulate, pprint, json).
+ output_file: Path to file, where output will be save.
+ quiet: Determines if output should be printed, to stdout.
+ """
+
+ output_generators = {
+ "tabulate": generate_output_tabulate,
+ "pprint": generate_output_pprint,
+ "json": generate_output_json,
+ }
+ LOGGER.debug("output_generators: %s", output_generators)
+
+ output = output_generators[output_format](containers)
+
+ if output_file:
+ try:
+ output_file.write_text(output)
+ except AttributeError:
+ LOGGER.error("Not possible to write_text")
+
+ if not quiet:
+ LOGGER.info(output)
+
+
+def verify_versions_acceptability(
+ containers: List[ContainerInfo], acceptable: pathlib.Path, quiet: bool
+) -> bool:
+ """Function for verification of software versions installed in containers.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+ acceptable: Path to the YAML file, with the software verification parameters.
+ quiet: Determines if output should be printed, to stdout.
+
+ Returns:
+ 0 if the verification was successful or 1 otherwise.
+ """
+
+ if not acceptable:
+ return 0
+
+ try:
+ acceptable.is_file()
+ except AttributeError:
+ LOGGER.error("No acceptable file found")
+ return -1
+
+ if not acceptable.is_file():
+ raise FileNotFoundError(
+ "File with configuration for acceptable does not exists!"
+ )
+
+ with open(acceptable) as stream:
+ data = yaml.safe_load(stream)
+
+ python_acceptable = data.get("python3", [])
+ java_acceptable = data.get("java11", [])
+
+ python_not_acceptable = [
+ (container, "python3", version)
+ for container in containers
+ for version in container.versions.python
+ if version not in python_acceptable
+ ]
+
+ java_not_acceptable = [
+ (container, "java11", version)
+ for container in containers
+ for version in container.versions.java
+ if version not in java_acceptable
+ ]
+
+ if not python_not_acceptable and not java_not_acceptable:
+ return 0
+
+ if quiet:
+ return 1
+
+ LOGGER.error("List of not acceptable versions")
+ pprint.pprint(python_not_acceptable)
+ pprint.pprint(java_not_acceptable)
+
+ return 1
+
+
+def main(argv: Optional[List[str]] = None) -> str:
+ """Main entrypoint of the module for verifying versions of CPython and
+ OpenJDK installed in k8s cluster containers.
+
+ Args:
+ argv: List of command line arguments.
+ """
+
+ args = parse_argv(argv)
+
+ kubernetes.config.load_kube_config(args.config_file)
+
+ api = kubernetes.client.CoreV1Api()
+ api.api_client.configuration.debug = args.debug
+
+ containers = gather_containers_informations(
+ api,
+ args.field_selector,
+ args.ignore_empty,
+ args.namespace,
+ args.check_istio_sidecar,
+ args.istio_sidecar_name,
+ )
+
+ generate_and_handle_output(
+ containers, args.output_format, args.output_file, args.quiet
+ )
+
+ code = verify_versions_acceptability(containers, args.acceptable, args.quiet)
+
+ return code
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py b/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py
new file mode 100644
index 000000000..30e46cad5
--- /dev/null
+++ b/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+import logging
+import pathlib
+import time
+import os
+import wget
+from kubernetes import client, config
+from xtesting.core import testcase # pylint: disable=import-error
+
+import versions.reporting as Reporting
+from versions.k8s_bin_versions_inspector import (
+ gather_containers_informations,
+ generate_and_handle_output,
+ verify_versions_acceptability,
+)
+
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-inspector")
+LOGGER.setLevel("INFO")
+
+
+class Inspector(testcase.TestCase):
+ """Inspector CLass."""
+
+ def __init__(self, **kwargs):
+ """Init the testcase."""
+ if "case_name" not in kwargs:
+ kwargs["case_name"] = "check_versions"
+ super().__init__(**kwargs)
+
+ version = os.getenv("ONAP_VERSION", "master")
+ base_url = "https://git.onap.org/integration/seccom/plain"
+
+ self.namespace = "onap"
+ # if no Recommended file found, download it
+ if pathlib.Path(RECOMMENDED_VERSIONS_FILE).is_file():
+ self.acceptable = pathlib.Path(RECOMMENDED_VERSIONS_FILE)
+ else:
+ self.acceptable = wget.download(
+ base_url + "/recommended_versions.yaml?h=" + version,
+ out=RECOMMENDED_VERSIONS_FILE,
+ )
+ self.output_file = "/tmp/versions.json"
+ # if no waiver file found, download it
+ if pathlib.Path(WAIVER_LIST_FILE).is_file():
+ self.waiver = pathlib.Path(WAIVER_LIST_FILE)
+ else:
+ self.waiver = wget.download(
+ base_url + "/waivers/versions/versions_xfail.txt?h=" + version,
+ out=WAIVER_LIST_FILE,
+ )
+ self.result = 0
+ self.start_time = None
+ self.stop_time = None
+
+ def run(self):
+ """Execute the version Inspector."""
+ self.start_time = time.time()
+ config.load_kube_config()
+ api = client.CoreV1Api()
+
+ field_selector = "metadata.namespace==onap"
+
+ containers = gather_containers_informations(api, field_selector, True, None, False, "istio-proxy")
+ LOGGER.info("gather_containers_informations")
+ LOGGER.info(containers)
+ LOGGER.info("---------------------------------")
+
+ generate_and_handle_output(
+ containers, "json", pathlib.Path(self.output_file), True
+ )
+ LOGGER.info("generate_and_handle_output in %s", self.output_file)
+ LOGGER.info("---------------------------------")
+
+ code = verify_versions_acceptability(containers, self.acceptable, True)
+ LOGGER.info("verify_versions_acceptability")
+ LOGGER.info(code)
+ LOGGER.info("---------------------------------")
+
+ # Generate reporting
+ test = Reporting.OnapVersionsReporting(result_file=self.output_file)
+ LOGGER.info("Prepare reporting")
+ self.result = test.generate_reporting(self.output_file)
+ LOGGER.info("Reporting generated")
+
+ self.stop_time = time.time()
+ if self.result >= 90:
+ return testcase.TestCase.EX_OK
+ return testcase.TestCase.EX_TESTCASE_FAILED
+
+ def set_namespace(self, namespace):
+ """Set namespace."""
+ self.namespace = namespace
diff --git a/test/security/check_versions/versions/reporting.py b/test/security/check_versions/versions/reporting.py
new file mode 100644
index 000000000..9053600c2
--- /dev/null
+++ b/test/security/check_versions/versions/reporting.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 Orange, Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+Generate result page
+"""
+import logging
+import pathlib
+import json
+from dataclasses import dataclass
+import os
+import statistics
+import wget
+import yaml
+
+from packaging.version import Version
+
+from jinja2 import ( # pylint: disable=import-error
+ Environment,
+ select_autoescape,
+ PackageLoader,
+)
+
+# Logger
+LOG_LEVEL = "INFO"
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-reporting")
+LOGGER.setLevel(LOG_LEVEL)
+
+REPORTING_FILE = "/var/lib/xtesting/results/versions_reporting.html"
+# REPORTING_FILE = "/tmp/versions_reporting.html"
+RESULT_FILE = "/tmp/versions.json"
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+
+@dataclass
+class TestResult:
+ """Test results retrieved from xtesting."""
+
+ pod_name: str
+ container: str
+ image: str
+ python_version: str
+ python_status: int
+ java_version: str
+ java_status: int
+
+
+@dataclass
+class SerieResult:
+ """Serie of tests."""
+
+ serie_id: str
+ success_rate: int = 0
+ min: int = 0
+ max: int = 0
+ mean: float = 0.0
+ median: float = 0.0
+ nb_occurences: int = 0
+
+
+class OnapVersionsReporting:
+ """Build html summary page."""
+
+ def __init__(self, result_file) -> None:
+ """Initialization of the report."""
+ version = os.getenv("ONAP_VERSION", "master")
+ base_url = "https://git.onap.org/integration/seccom/plain"
+ if pathlib.Path(WAIVER_LIST_FILE).is_file():
+ self._waiver_file = pathlib.Path(WAIVER_LIST_FILE)
+ else:
+ self._waiver_file = wget.download(
+ base_url + "/waivers/versions/versions_xfail.txt?h=" + version,
+ out=WAIVER_LIST_FILE,
+ )
+ if pathlib.Path(RECOMMENDED_VERSIONS_FILE).is_file():
+ self._recommended_versions_file = pathlib.Path(RECOMMENDED_VERSIONS_FILE)
+ else:
+ self._recommended_versions_file = wget.download(
+ base_url + "/recommended_versions.yaml?h=" + version,
+ out=RECOMMENDED_VERSIONS_FILE,
+ )
+
+ def get_versions_scan_results(self, result_file, waiver_list):
+ """Get all the versions from the scan."""
+ testresult = []
+ # Get the recommended version list for java and python
+ min_java_version = self.get_recommended_version(
+ RECOMMENDED_VERSIONS_FILE, "java11"
+ )
+ min_python_version = self.get_recommended_version(
+ RECOMMENDED_VERSIONS_FILE, "python3"
+ )
+
+ LOGGER.info("Min Java recommended version: %s", min_java_version)
+ LOGGER.info("Min Python recommended version: %s", min_python_version)
+
+ with open(result_file) as json_file:
+ data = json.load(json_file)
+ LOGGER.info("Number of pods: %s", len(data))
+ for component in data:
+ if component["container"] not in waiver_list:
+ testresult.append(
+ TestResult(
+ pod_name=component["pod"],
+ container=component["container"],
+ image=component["extra"]["image"],
+ python_version=component["versions"]["python"],
+ java_version=component["versions"]["java"],
+ python_status=self.get_version_status(
+ component["versions"]["python"], min_python_version[0]
+ ),
+ java_status=self.get_version_status(
+ component["versions"]["java"], min_java_version[0]
+ ),
+ )
+ )
+ LOGGER.info("Nb of pods (after waiver filtering) %s", len(testresult))
+ return testresult
+
+ @staticmethod
+ def get_version_status(versions, min_version):
+ """Based on the min version set the status of the component version."""
+ # status_code
+ # 0: only recommended version found
+ # 1: recommended version found but not alone
+ # 2: recommended version not found but not far
+ # 3: recommended version not found but not far but not alone
+ # 4: recommended version not found
+ # we assume that versions are given accordign to usual java way
+ # X.Y.Z
+ LOGGER.debug("Version = %s", versions)
+ LOGGER.debug("Min Version = %s", min_version)
+ nb_versions_found = len(versions)
+ status_code = -1
+ LOGGER.debug("Nb versions found :%s", nb_versions_found)
+ # if no version found retrieved -1
+ if nb_versions_found > 0:
+ for version in versions:
+ clean_version = Version(version.replace("_", "."))
+ min_version_ok = str(min_version)
+
+ if clean_version >= Version(min_version_ok):
+ if nb_versions_found < 2:
+ status_code = 0
+ else:
+ status_code = 2
+ elif clean_version.major >= Version(min_version_ok).major:
+ if nb_versions_found < 2:
+ status_code = 1
+ else:
+ status_code = 3
+ else:
+ status_code = 4
+ LOGGER.debug("Version status code = %s", status_code)
+ return status_code
+
+ @staticmethod
+ def get_recommended_version(recommended_versions_file, component):
+ """Retrieve data from the json file."""
+ with open(recommended_versions_file) as stream:
+ data = yaml.safe_load(stream)
+ try:
+ recommended_version = data[component]["recommended_versions"]
+ except KeyError:
+ recommended_version = None
+ return recommended_version
+
+ @staticmethod
+ def get_waiver_list(waiver_file_path):
+ """Get the waiver list."""
+ pods_to_be_excluded = []
+ with open(waiver_file_path) as waiver_list:
+ for line in waiver_list:
+ line = line.strip("\n")
+ line = line.strip("\t")
+ if not line.startswith("#"):
+ pods_to_be_excluded.append(line)
+ return pods_to_be_excluded
+
+ @staticmethod
+ def get_score(component_type, scan_res):
+ # Look at the java and python results
+ # 0 = recommended version
+ # 1 = acceptable version
+ nb_good_versions = 0
+ nb_results = 0
+
+ for res in scan_res:
+ if component_type == "java":
+ if res.java_status >= 0:
+ nb_results += 1
+ if res.java_status < 2:
+ nb_good_versions += 1
+ elif component_type == "python":
+ if res.python_status >= 0:
+ nb_results += 1
+ if res.python_status < 2:
+ nb_good_versions += 1
+ try:
+ return round(nb_good_versions * 100 / nb_results, 1)
+ except ZeroDivisionError:
+ LOGGER.error("Impossible to calculate the success rate")
+ return 0
+
+ def generate_reporting(self, result_file):
+ """Generate HTML reporting page."""
+ LOGGER.info("Generate versions HTML report.")
+
+ # Get the waiver list
+ waiver_list = self.get_waiver_list(self._waiver_file)
+ LOGGER.info("Waiver list: %s", waiver_list)
+
+ # Get the Versions results
+ scan_res = self.get_versions_scan_results(result_file, waiver_list)
+
+ LOGGER.info("scan_res: %s", scan_res)
+
+ # Evaluate result
+ status_res = {"java": 0, "python": 0}
+ for component_type in "java", "python":
+ status_res[component_type] = self.get_score(component_type, scan_res)
+
+ LOGGER.info("status_res: %s", status_res)
+
+ # Calculate the average score
+ numbers = [status_res[key] for key in status_res]
+ mean_ = statistics.mean(numbers)
+
+ # Create reporting page
+ jinja_env = Environment(
+ autoescape=select_autoescape(["html"]),
+ loader=PackageLoader("versions"),
+ )
+ page_info = {
+ "title": "ONAP Integration versions reporting",
+ "success_rate": status_res,
+ "mean": mean_,
+ }
+ jinja_env.get_template("versions.html.j2").stream(
+ info=page_info, data=scan_res
+ ).dump("{}".format(REPORTING_FILE))
+
+ return mean_
+
+
+if __name__ == "__main__":
+ test = OnapVersionsReporting(
+ RESULT_FILE, WAIVER_LIST_FILE, RECOMMENDED_VERSIONS_FILE
+ )
+ test.generate_reporting(RESULT_FILE)
diff --git a/test/security/check_versions/versions/templates/base.html.j2 b/test/security/check_versions/versions/templates/base.html.j2
new file mode 100644
index 000000000..025c0ad25
--- /dev/null
+++ b/test/security/check_versions/versions/templates/base.html.j2
@@ -0,0 +1,232 @@
+{% macro color(failing, total) %}
+{% if failing == 0 %}
+is-success
+{% else %}
+{% if (failing / total) <= 0.1 %}
+is-warning
+{% else %}
+is-danger
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+{% macro percentage(failing, total) %}
+{{ ((total - failing) / total) | round }}
+{% endmacro %}
+
+{% macro statistic(resource_name, failing, total) %}
+{% set success = total - failing %}
+<div class="level-item has-text-centered">
+ <div>
+ <p class="heading">{{ resource_name | capitalize }}</p>
+ <p class="title">{{ success }}/{{ total }}</p>
+ <progress class="progress {{ color(failing, total) }}" value="{{ success }}" max="{{ total }}">{{ percentage(failing, total) }}</progress>
+ </div>
+ </div>
+{% endmacro %}
+
+{% macro pods_table(pods) %}
+<div id="pods" class="table-container">
+ <table class="table is-fullwidth is-striped is-hoverable">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Ready</th>
+ <th>Status</th>
+ <th>Reason</th>
+ <th>Restarts</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for pod in pods %}
+ <tr>
+ <td><a href="./pod-{{ pod.name }}.html" title="{{ pod.name }}">{{ pod.k8s.metadata.name }}</a></td>
+ {% if pod.init_done %}
+ <td>{{ pod.running_containers }}/{{ (pod.containers | length) }}</td>
+ {% else %}
+ <td>Init:{{ pod.runned_init_containers }}/{{ (pod.init_containers | length) }}</td>
+ {% endif %}
+ <td>{{ pod.k8s.status.phase }}</td>
+ <td>{{ pod.k8s.status.reason }}</td>
+ {% if pod.init_done %}
+ <td>{{ pod.restart_count }}</td>
+ {% else %}
+ <td>{{ pod.init_restart_count }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endmacro %}
+
+{% macro key_value_description_list(title, dict) %}
+<dt><strong>{{ title | capitalize }}:</strong></dt>
+<dd>
+ {% if dict %}
+ {% for key, value in dict.items() %}
+ {% if loop.first %}
+ <dl>
+ {% endif %}
+ <dt>{{ key }}:</dt>
+ <dd>{{ value }}</dd>
+ {% if loop.last %}
+ </dl>
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+</dd>
+{% endmacro %}
+
+{% macro description(k8s) %}
+<div class="container">
+ <h1 class="title is-1">Description</h1>
+ <div class="content">
+ <dl>
+ {% if k8s.spec.type %}
+ <dt><strong>Type:</strong></dt>
+ <dd>{{ k8s.spec.type }}</dd>
+ {% if (k8s.spec.type | lower) == "clusterip" %}
+ <dt><strong>Headless:</strong></dt>
+ <dd>{% if (k8s.spec.cluster_ip | lower) == "none" %}Yes{% else %}No{% endif %}</dd>
+ {% endif %}
+ {% endif %}
+ {{ key_value_description_list('Labels', k8s.metadata.labels) | indent(width=6) }}
+ {{ key_value_description_list('Annotations', k8s.metadata.annotations) | indent(width=6) }}
+ {% if k8s.spec.selector %}
+ {% if k8s.spec.selector.match_labels %}
+ {{ key_value_description_list('Selector', k8s.spec.selector.match_labels) | indent(width=6) }}
+ {% else %}
+ {{ key_value_description_list('Selector', k8s.spec.selector) | indent(width=6) }}
+ {% endif %}
+ {% endif %}
+ {% if k8s.phase %}
+ <dt><strong>Status:</strong></dt>
+ <dd>{{ k8s.phase }}</dd>
+ {% endif %}
+ {% if k8s.metadata.owner_references %}
+ <dt><strong>Controlled By:</strong></dt>
+ <dd>{{ k8s.metadata.owner_references[0].kind }}/{{ k8s.metadata.owner_references[0].name }}</dd>
+ {% endif %}
+ </dl>
+ </div>
+</div>
+{% endmacro %}
+
+{% macro pods_container(pods, parent, has_title=True) %}
+<div class="container">
+ {% if has_title %}
+ <h1 class="title is-1">Pods</h1>
+ {% endif %}
+ {% if (pods | length) > 0 %}
+ {{ pods_table(pods) | indent(width=2) }}
+ {% else %}
+ <div class="notification is-warning">{{ parent }} has no pods!</div>
+ {% endif %}
+</div>
+{% endmacro %}
+
+{% macro two_level_breadcrumb(title, name) %}
+<section class="section">
+ <div class="container">
+ <nav class="breadcrumb" aria-label="breadcrumbs">
+ <ul>
+ <li><a href="./index.html">Summary</a></li>
+ <li class="is-active"><a href="#" aria-current="page">{{ title | capitalize }} {{ name }}</a></li>
+ </ul>
+ </nav>
+ </div>
+</section>
+{% endmacro %}
+
+{% macro pod_parent_summary(title, name, failed_pods, pods) %}
+{{ summary(title, name, [{'title': 'Pod', 'failing': failed_pods, 'total': (pods | length)}]) }}
+{% endmacro %}
+
+{% macro number_ok(number, none_value, total=None) %}
+{% if number %}
+{% if total and number < total %}
+<span class="tag is-warning">{{ number }}</span>
+{% else %}
+{{ number }}
+{% endif %}
+{% else %}
+<span class="tag is-warning">{{ none_value }}</span>
+{% endif %}
+{% endmacro %}
+
+{% macro summary(title, name, statistics) %}
+<section class="hero is-light">
+ <div class="hero-body">
+ <div class="container">
+ <h1 class="title is-1">
+ {{ title | capitalize }} {{ name }} Summary
+ </h1>
+ <nav class="level">
+ {% for stat in statistics %}
+ {% if stat.total > 0 %}
+ {{ statistic(stat.title, stat.failing, stat.total) | indent(width=8) }}
+ {% endif %}
+ {% endfor %}
+ </nav>
+ </div>
+ </div>
+</section>
+{% endmacro %}
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Tests results - {% block title %}{% endblock %}</title>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.0/css/bulma.min.css">
+ <script defer src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"></script>
+ {% block more_head %}{% endblock %}
+ </head>
+ <body>
+ <nav class="navbar" role="navigation" aria-label="main navigation">
+ <div class="navbar-brand">
+ <a class="navbar-item" href="https://www.onap.org">
+ <img src="https://www.onap.org/wp-content/uploads/sites/20/2017/02/logo_onap_2017.png" width="234" height="50">
+ </a>
+
+ <a role="button" class="navbar-burger burger" aria-label="menu" aria-expanded="false" data-target="navbarBasicExample">
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ </a>
+ </div>
+
+ <div id="navbarBasicExample" class="navbar-menu">
+ <div class="navbar-start">
+ <a class="navbar-item">
+ Summary
+ </a>
+ </div>
+ </div>
+ </nav>
+
+ {% block content %}{% endblock %}
+
+ <footer class="footer">
+ <div class="container">
+ <div class="columns">
+ <div class="column">
+ <p class="has-text-grey-light">
+ <a href="https://bulma.io/made-with-bulma/">
+ <img src="https://bulma.io/images/made-with-bulma.png" alt="Made with Bulma" width="128" height="24">
+ </a>
+ </div>
+ <div class="column">
+ <a class="has-text-grey" href="https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status" style="border-bottom: 1px solid currentColor;">
+ Improve this page on Gitlab
+ </a>
+ </p>
+ </div>
+ </div>
+ </div>
+ </footer>
+ </body>
+</html>
+
diff --git a/test/security/check_versions/versions/templates/versions.html.j2 b/test/security/check_versions/versions/templates/versions.html.j2
new file mode 100644
index 000000000..4860a72da
--- /dev/null
+++ b/test/security/check_versions/versions/templates/versions.html.j2
@@ -0,0 +1,85 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAPTEST Bench{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">{{ info.title }}</h1>
+
+<div class="container">
+
+<article class="message">
+<div class="message-header">
+ <p>Results</p>
+</div>
+<div class="message-body">
+SECCOM recommended versions (global success rate: {{ info.mean }}):
+ <ul>
+ <li>Java: {{ info.success_rate.java }}% </li>
+ <li>Python: {{ info.success_rate.python }}%</li>
+ </ul>
+</div>
+</article>
+
+<article class="message">
+ <div class="message-header">
+ <p>Legend</p>
+ </div>
+ <div class="message-body">
+ <div class="has-background-success">SECCOM recommended version</div>
+ <div class="has-background-success-light">Not the recommended version but at least the major version</div>
+ <div class="has-background-warning-light">Ambiguous versions but at least 1 is the SECCOM recommended version</div>
+ <div class="has-background-warning">Ambiguous versions but at least 1 is the major recommended version</div>
+ <div class="has-background-danger">Wrong Versions</div>
+ </div>
+</article>
+<br>
+
+<h2 class="title is-1">JAVA versions</h2>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Versions</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for component in data %}
+ <tr {% if component.java_status == 4 %} class="has-background-danger" {%elif component.java_status == 0 %} class="has-background-success" {%elif component.java_status == 1 %} class="has-background-success-light" {%elif component.java_status == 2 %} class="has-background-warning-light" {%elif component.java_status == 3 %} class="has-background-warning" {% endif %}>
+
+ {% if component.java_version is defined and component.java_version|length > 0 %}
+ <td>{{ component.container }}</td>
+ <td>{{ component.java_version}}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+<br>
+
+<div class="container">
+<h2 class="title is-1">Python versions</h2>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Versions</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for component in data %}
+ <tr {% if component.python_status == 4 %} class="has-background-danger" {%elif component.python_status == 0 %} class="has-background-success" {%elif component.python_status == 1 %} class="has-background-success-light" {%elif component.python_status == 2 %} class="has-background-warning-light" {%elif component.python_status == 3 %} class="has-background-warning" {% endif %}>
+ {% if component.python_version is defined and component.python_version|length > 0 %}
+ <td>{{ component.container }}</td>
+ <td>{{ component.python_version}}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/security/k8s/README b/test/security/k8s/README
deleted file mode 100644
index fdb7e4c11..000000000
--- a/test/security/k8s/README
+++ /dev/null
@@ -1,56 +0,0 @@
-##############################
-K8s secure configuration check
-##############################
-
-Utility for checking if Kubernetes cluster configuration follows security recommendations.
-
-***************
-Getting started
-***************
-
-Prerequisites
-=============
-
-Build
------
-
-- make
-- go_
-
-.. _go: https://golang.org/doc/install
-
-Run
----
-
-.. note:: Below applies to Rancher-based clusters (e.g. Casablanca)
-
-- `Rancher CLI`_
-- Docker_ (required to perform selected Rancher CLI calls)
-
-.. _`Rancher CLI`: https://rancher.com/docs/rancher/v1.6/en/cli
-.. _Docker: https://docs.docker.com/install
-
-Test
-----
-
-- Ginkgo_
-
-.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
-
-Running
-=======
-
-Calling::
-
- make run
-
-will build and run configuration check executable. It is the default target.
-
-Testing
-=======
-
-Calling::
-
- make test
-
-will run tests.
diff --git a/test/security/k8s/README.rst b/test/security/k8s/README.rst
index 100b93820..b9e2dd5d2 120000..100644
--- a/test/security/k8s/README.rst
+++ b/test/security/k8s/README.rst
@@ -1 +1,45 @@
-README \ No newline at end of file
+##############################
+K8s secure configuration check
+##############################
+
+Utility for checking if Kubernetes cluster configuration follows security recommendations.
+
+***************
+Getting started
+***************
+
+Prerequisites
+=============
+
+Build
+-----
+
+- make
+- go_
+
+.. _go: https://golang.org/doc/install
+
+Test
+----
+
+- Ginkgo_
+
+.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
+
+Running
+=======
+
+Calling::
+
+ make run
+
+will build and run configuration check executable. It is the default target.
+
+Testing
+=======
+
+Calling::
+
+ make test
+
+will run tests.
diff --git a/test/security/k8s/src/check/check.go b/test/security/k8s/src/check/check.go
index cf412c112..91e9e5fd2 100644
--- a/test/security/k8s/src/check/check.go
+++ b/test/security/k8s/src/check/check.go
@@ -8,6 +8,8 @@ type Informer interface {
GetSchedulerParams() ([]string, error)
// GetControllerManagerParams returns controller manager parameters.
GetControllerManagerParams() ([]string, error)
+ // GetEtcdParams returns etcd parameters.
+ GetEtcdParams() ([]string, error)
}
// Command represents commands run on cluster.
@@ -20,6 +22,8 @@ const (
SchedulerProcess
// ControllerManagerProcess represents controller manager command ("kube-controller-manager").
ControllerManagerProcess
+ // EtcdProcess represents controller manager service ("etcd").
+ EtcdProcess
)
func (c Command) String() string {
@@ -27,35 +31,11 @@ func (c Command) String() string {
"kube-apiserver",
"kube-scheduler",
"kube-controller-manager",
+ "etcd",
}
- if c < APIProcess || c > ControllerManagerProcess {
+ if c < APIProcess || c > EtcdProcess {
return "exit"
}
return names[c]
}
-
-// Service represents services run on Rancher-based cluster.
-type Service int
-
-const (
- // APIService represents API server service ("kubernetes/kubernetes").
- APIService Service = iota
- // SchedulerService represents scheduler service ("kubernetes/scheduler").
- SchedulerService
- // ControllerManagerService represents controller manager service ("kubernetes/controller-manager").
- ControllerManagerService
-)
-
-func (s Service) String() string {
- names := [...]string{
- "kubernetes/kubernetes",
- "kubernetes/scheduler",
- "kubernetes/controller-manager",
- }
-
- if s < APIService || s > ControllerManagerService {
- return ""
- }
- return names[s]
-}
diff --git a/test/security/k8s/src/check/cmd/check/check.go b/test/security/k8s/src/check/cmd/check/check.go
index d7176170a..42e9903e6 100644
--- a/test/security/k8s/src/check/cmd/check/check.go
+++ b/test/security/k8s/src/check/cmd/check/check.go
@@ -5,32 +5,23 @@ import (
"log"
"check"
- "check/rancher"
"check/raw"
"check/validators/master"
)
var (
- ranchercli = flag.Bool("ranchercli", false, "use rancher utility for accessing cluster nodes")
- rke = flag.Bool("rke", false, "use RKE cluster definition and ssh for accessing cluster nodes (default)")
+ rke = flag.Bool("rke", true, "use RKE cluster definition and ssh for accessing cluster nodes (default)")
)
func main() {
flag.Parse()
- if *ranchercli && *rke {
+ if !(*rke) {
log.Fatal("Not supported.")
}
- // Use default cluster access method if none was declared explicitly.
- if !(*ranchercli || *rke) {
- *rke = true
- }
-
var info check.Informer
switch {
- case *ranchercli:
- info = &rancher.Rancher{}
case *rke:
info = &raw.Raw{}
default:
@@ -54,4 +45,14 @@ func main() {
log.Fatal(err)
}
master.CheckControllerManager(controllerManagerParams)
+
+ _, err = info.GetEtcdParams()
+ if err != nil {
+ switch err {
+ case check.ErrNotImplemented:
+ log.Print(err) // Fail softly.
+ default:
+ log.Fatal(err)
+ }
+ }
}
diff --git a/test/security/k8s/src/check/errors.go b/test/security/k8s/src/check/errors.go
new file mode 100644
index 000000000..d657c1827
--- /dev/null
+++ b/test/security/k8s/src/check/errors.go
@@ -0,0 +1,10 @@
+package check
+
+import (
+ "errors"
+)
+
+var (
+ // ErrNotImplemented is returned when function is not implemented yet.
+ ErrNotImplemented = errors.New("function not implemented")
+)
diff --git a/test/security/k8s/src/check/rancher/rancher.go b/test/security/k8s/src/check/rancher/rancher.go
deleted file mode 100644
index b5e382221..000000000
--- a/test/security/k8s/src/check/rancher/rancher.go
+++ /dev/null
@@ -1,118 +0,0 @@
-// Package rancher wraps Rancher commands necessary for K8s inspection.
-package rancher
-
-import (
- "bytes"
- "fmt"
- "os/exec"
-
- "check"
-)
-
-const (
- bin = "rancher"
- paramHost = "--host"
- cmdHosts = "hosts"
- cmdHostsParams = "--quiet"
- cmdDocker = "docker"
- cmdDockerCmdPs = "ps"
- cmdDockerCmdPsParams = "--no-trunc"
- cmdDockerCmdPsFilter = "--filter"
- cmdDockerCmdPsFilterArgs = "label=io.rancher.stack_service.name="
- cmdDockerCmdPsFormat = "--format"
- cmdDockerCmdPsFormatArgs = "{{.Command}}"
-)
-
-// Rancher implements Informer interface.
-type Rancher struct {
- check.Informer
-}
-
-// GetAPIParams returns parameters of running Kubernetes API server.
-// It queries default environment set in configuration file.
-func (r *Rancher) GetAPIParams() ([]string, error) {
- return getProcessParams(check.APIProcess, check.APIService)
-}
-
-// GetSchedulerParams returns parameters of running Kubernetes scheduler.
-// It queries default environment set in configuration file.
-func (r *Rancher) GetSchedulerParams() ([]string, error) {
- return getProcessParams(check.SchedulerProcess, check.SchedulerService)
-}
-
-// GetControllerManagerParams returns parameters of running Kubernetes scheduler.
-// It queries default environment set in configuration file.
-func (r *Rancher) GetControllerManagerParams() ([]string, error) {
- return getProcessParams(check.ControllerManagerProcess, check.ControllerManagerService)
-}
-
-func getProcessParams(process check.Command, service check.Service) ([]string, error) {
- hosts, err := listHosts()
- if err != nil {
- return []string{}, err
- }
-
- for _, host := range hosts {
- cmd, err := getPsCmdOutput(host, service)
- if err != nil {
- return []string{}, err
- }
-
- cmd = trimOutput(cmd) // TODO: improve `docker ps` query format.
- if len(cmd) > 0 {
- i := bytes.Index(cmd, []byte(process.String()))
- if i == -1 {
- return []string{}, fmt.Errorf("missing %s command", process)
- }
- return btos(cmd[i+len(process.String()):]), nil
- }
- }
- return []string{}, nil
-}
-
-// listHosts lists IDs of active hosts.
-// It queries default environment set in configuration file.
-func listHosts() ([]string, error) {
- cmd := exec.Command(bin, cmdHosts, cmdHostsParams)
- out, err := cmd.Output()
- if err != nil {
- return nil, err
- }
- return btos(out), nil
-}
-
-// getPsCmdOutput returns running Kubernetes service command with its parameters.
-// It queries default environment set in configuration file.
-func getPsCmdOutput(host string, service check.Service) ([]byte, error) {
- // Following is equivalent to:
- // $ rancher --host $HOST \
- // docker ps --no-trunc \
- // --filter "label=io.rancher.stack_service.name=$SERVICE" \
- // --format "{{.Command}}"
- cmd := exec.Command(bin, paramHost, host,
- cmdDocker, cmdDockerCmdPs, cmdDockerCmdPsParams,
- cmdDockerCmdPsFilter, cmdDockerCmdPsFilterArgs+service.String(),
- cmdDockerCmdPsFormat, cmdDockerCmdPsFormatArgs)
- out, err := cmd.Output()
- if err != nil {
- return nil, err
- }
- return out, nil
-}
-
-// trimOutput removes trailing new line and brackets from output.
-func trimOutput(b []byte) []byte {
- b = bytes.TrimSpace(b)
- b = bytes.TrimPrefix(b, []byte("["))
- b = bytes.TrimSuffix(b, []byte("]"))
- return b
-}
-
-// btos converts slice of bytes to slice of strings split by white space characters.
-func btos(in []byte) []string {
- var out []string
- for _, b := range bytes.Fields(in) {
- out = append(out, string(b))
- }
- return out
-}
diff --git a/test/security/k8s/src/check/raw/raw.go b/test/security/k8s/src/check/raw/raw.go
index 555115950..91237ba82 100644
--- a/test/security/k8s/src/check/raw/raw.go
+++ b/test/security/k8s/src/check/raw/raw.go
@@ -46,6 +46,12 @@ func (r *Raw) GetControllerManagerParams() ([]string, error) {
return getProcessParams(check.ControllerManagerProcess)
}
+// GetEtcdParams returns parameters of running etcd.
+// It queries only cluster nodes with "controlplane" role.
+func (r *Raw) GetEtcdParams() ([]string, error) {
+ return getProcessParams(check.EtcdProcess)
+}
+
func getProcessParams(process check.Command) ([]string, error) {
nodes, err := config.GetNodesInfo()
if err != nil {
@@ -61,6 +67,10 @@ func getProcessParams(process check.Command) ([]string, error) {
cmd = trimOutput(cmd) // TODO: improve `docker inspect` query format.
if len(cmd) > 0 {
+ if process == check.EtcdProcess { // etcd process name is not included in its argument list.
+ return btos(cmd), nil
+ }
+
i := bytes.Index(cmd, []byte(process.String()))
if i == -1 {
return []string{}, fmt.Errorf("missing %s command", process)
diff --git a/test/security/k8s/src/check/validators/master/api/api_test.go b/test/security/k8s/src/check/validators/master/api/api_test.go
index 4ba5070a8..01fe9b1c6 100644
--- a/test/security/k8s/src/check/validators/master/api/api_test.go
+++ b/test/security/k8s/src/check/validators/master/api/api_test.go
@@ -45,39 +45,7 @@ var _ = Describe("Api", func() {
"TLS_RSA_WITH_AES_128_GCM_SHA256",
}
- // kubeApiServerCasablanca was obtained from virtual environment for testing
- // (introduced in Change-Id: I57f9f3caac0e8b391e9ed480f6bebba98e006882).
- kubeApiServerCasablanca = []string{
- "--storage-backend=etcd2",
- "--storage-media-type=application/json",
- "--service-cluster-ip-range=10.43.0.0/16",
- "--etcd-servers=https://etcd.kubernetes.rancher.internal:2379",
- "--insecure-bind-address=0.0.0.0",
- "--insecure-port=0",
- "--cloud-provider=rancher",
- "--allow-privileged=true",
- "--admission-control=NamespaceLifecycle,LimitRanger,ServiceAccount," +
- "PersistentVolumeLabel,DefaultStorageClass,DefaultTolerationSeconds,ResourceQuota",
- "--client-ca-file=/etc/kubernetes/ssl/ca.pem",
- "--tls-cert-file=/etc/kubernetes/ssl/cert.pem",
- "--tls-private-key-file=/etc/kubernetes/ssl/key.pem",
- "--kubelet-client-certificate=/etc/kubernetes/ssl/cert.pem",
- "--kubelet-client-key=/etc/kubernetes/ssl/key.pem",
- "--runtime-config=batch/v2alpha1",
- "--anonymous-auth=false",
- "--authentication-token-webhook-config-file=/etc/kubernetes/authconfig",
- "--runtime-config=authentication.k8s.io/v1beta1=true",
- "--external-hostname=kubernetes.kubernetes.rancher.internal",
- "--etcd-cafile=/etc/kubernetes/etcd/ca.pem",
- "--etcd-certfile=/etc/kubernetes/etcd/cert.pem",
- "--etcd-keyfile=/etc/kubernetes/etcd/key.pem",
- "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256," +
- "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305," +
- "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384," +
- "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
- }
-
- // kubeApiServerCasablanca was obtained from virtual environment for testing
+ // kubeApiServerDublin was obtained from virtual environment for testing
// (introduced in Change-Id: I54ada5fade3b984dedd1715f20579e3ce901faa3).
kubeApiServerDublin = []string{
"--requestheader-group-headers=X-Remote-Group",
@@ -130,7 +98,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not absent on insecure cluster", []string{"--insecure-allow-any-token"}, false),
Entry("Should be absent on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent on Dublin cluster", kubeApiServerDublin, true),
)
@@ -140,7 +107,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Should be set to false on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be set to false on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be set to false on Dublin cluster", kubeApiServerDublin, true),
)
@@ -150,7 +116,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly enabled on insecure cluster", []string{"--profiling=true"}, false),
- Entry("Is not set on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be set to false on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be set to false on Dublin cluster", kubeApiServerDublin, true),
)
@@ -161,7 +126,6 @@ var _ = Describe("Api", func() {
},
Entry("Is explicitly disabled on insecure cluster", []string{"--kubelet-https=false"}, false),
Entry("Should be absent or set to true on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent or set to true on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent or set to true on Dublin cluster", kubeApiServerDublin, true),
)
@@ -171,7 +135,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly enabled on insecure cluster", []string{"--repair-malformed-updates=true"}, false),
- Entry("Is not set on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be set to false on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be set to false on Dublin cluster", kubeApiServerDublin, true),
)
@@ -182,7 +145,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly disabled on insecure cluster", []string{"--service-account-lookup=false"}, false),
- Entry("Is not set on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be set to true on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be set to true on Dublin cluster", kubeApiServerDublin, true),
)
@@ -195,7 +157,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not absent on insecure cluster", []string{"--basic-auth-file=/path/to/file"}, false),
Entry("Should be absent on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent on Dublin cluster", kubeApiServerDublin, true),
)
@@ -205,7 +166,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not absent on insecure cluster", []string{"--token-auth-file=/path/to/file"}, false),
Entry("Should be absent on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent on Dublin cluster", kubeApiServerDublin, true),
)
@@ -215,7 +175,6 @@ var _ = Describe("Api", func() {
},
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--audit-log-path="}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is absent on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -226,7 +185,6 @@ var _ = Describe("Api", func() {
},
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--kubelet-certificate-authority="}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is absent on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -238,7 +196,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--client-ca-file="}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -249,7 +206,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"-etcd-cafile="}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -259,7 +215,6 @@ var _ = Describe("Api", func() {
},
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--service-account-key-file="}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -271,7 +226,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--kubelet-client-certificate= --kubelet-client-key="}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -282,7 +236,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--etcd-certfile= --etcd-keyfile="}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -293,7 +246,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--tls-cert-file= --tls-private-key-file="}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
})
@@ -304,7 +256,6 @@ var _ = Describe("Api", func() {
Expect(IsInsecureBindAddressAbsentOrLoopback(params)).To(Equal(expected))
},
Entry("Is not absent on insecure cluster", []string{"--insecure-bind-address=1.2.3.4"}, false),
- Entry("Is not absent nor set to loopback on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be absent or set to loopback on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be absent or set to loopback on Dublin cluster", kubeApiServerDublin, true),
)
@@ -316,7 +267,6 @@ var _ = Describe("Api", func() {
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly enabled on insecure cluster", []string{"--insecure-port=1234"}, false),
Entry("Should be set to 0 on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be set to 0 on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be set to 0 on Dublin cluster", kubeApiServerDublin, true),
)
@@ -326,7 +276,6 @@ var _ = Describe("Api", func() {
},
Entry("Is explicitly disabled on insecure cluster", []string{"--secure-port=0"}, false),
Entry("Should be absent or set to valid port on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent or set to valid port on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent or set to valid port on Dublin cluster", kubeApiServerDublin, true),
)
})
@@ -339,7 +288,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--audit-log-maxage="}, false),
Entry("Is insufficient on insecure cluster", []string{"--audit-log-maxage=5"}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is absent on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be set appropriately on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -351,7 +299,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--audit-log-maxbackup="}, false),
Entry("Is insufficient on insecure cluster", []string{"--audit-log-maxbackup=2"}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is absent on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be set appropriately on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -363,7 +310,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--audit-log-maxsize="}, false),
Entry("Is insufficient on insecure cluster", []string{"--audit-log-maxsize=5"}, false),
- Entry("Is absent on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is absent on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be set appropriately on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -375,7 +321,6 @@ var _ = Describe("Api", func() {
Entry("Is empty on insecure cluster", []string{"--request-timeout="}, false),
Entry("Is too high on insecure cluster", []string{"--request-timeout=600"}, false),
Entry("Should be set only if needed on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be set only if needed on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be set only if needed on Dublin cluster", kubeApiServerDublin, true),
)
})
@@ -388,7 +333,6 @@ var _ = Describe("Api", func() {
Entry("Is not absent on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar,AlwaysAdmit,Baz,Quuz"}, false),
Entry("Is not absent on insecure deprecated cluster", []string{"--admission-control=Foo,Bar,AlwaysAdmit,Baz,Quuz"}, false),
Entry("Should be absent on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be absent on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be absent on Dublin cluster", kubeApiServerDublin, true),
)
@@ -398,7 +342,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is not present on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -409,7 +352,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is not present on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -420,7 +362,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is not present on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -431,7 +372,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is not present on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -443,7 +383,6 @@ var _ = Describe("Api", func() {
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should be present on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -453,7 +392,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -464,7 +402,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not present on insecure cluster", []string{"--enable-admission-plugins=Foo,Bar"}, false),
Entry("Is not present on insecure deprecated cluster", []string{"--admission-control=Foo,Bar"}, false),
- Entry("Is not present on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is not present on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
@@ -475,7 +412,6 @@ var _ = Describe("Api", func() {
},
Entry("Is explicitly disabled on insecure cluster", []string{"--disable-admission-plugins=Foo,Bar,NamespaceLifecycle,Baz,Quuz"}, false),
Entry("Should not be disabled on CIS-compliant cluster", kubeApiServerCISCompliant, true),
- Entry("Should not be disabled on Casablanca cluster", kubeApiServerCasablanca, true),
Entry("Should not be disabled on Dublin cluster", kubeApiServerDublin, true),
)
@@ -485,7 +421,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not explicitly disabled on insecure cluster", []string{}, false),
Entry("Is not absent on insecure cluster", []string{"--authorization-mode=Foo,Bar,AlwaysAllow,Baz,Quuz"}, false),
- Entry("Is not explicitly disabled on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should be absent on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should be absent on Dublin cluster", kubeApiServerDublin, true),
)
@@ -496,7 +431,6 @@ var _ = Describe("Api", func() {
},
Entry("Is not explicitly enabled on insecure cluster", []string{}, false),
Entry("Is not present on insecure cluster", []string{"--authorization-mode=Foo,Bar"}, false),
- Entry("Is not explicitly enabled on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Should present on CIS-compliant cluster", kubeApiServerCISCompliant, true),
Entry("Should present on Dublin cluster", kubeApiServerDublin, true),
)
@@ -510,7 +444,6 @@ var _ = Describe("Api", func() {
Entry("Is absent on insecure cluster", []string{}, false),
Entry("Is empty on insecure cluster", []string{"--tls-cipher-suites="}, false),
Entry("Is incomplete on insecure cluster", []string{"--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256"}, false),
- Entry("Is incomplete on Casablanca cluster", kubeApiServerCasablanca, false),
Entry("Is incomplete on Dublin cluster", kubeApiServerDublin, false),
Entry("Should be complete on CIS-compliant cluster", kubeApiServerCISCompliant, true),
)
diff --git a/test/security/k8s/src/check/validators/master/controllermanager/controllermanager.go b/test/security/k8s/src/check/validators/master/controllermanager/controllermanager.go
index f1dd0fe49..4629ad86f 100644
--- a/test/security/k8s/src/check/validators/master/controllermanager/controllermanager.go
+++ b/test/security/k8s/src/check/validators/master/controllermanager/controllermanager.go
@@ -25,3 +25,18 @@ func IsInsecureBindAddressAbsentOrLoopback(params []string) bool {
return boolean.IsFlagAbsent("--address=", params) ||
args.HasSingleFlagArgument("--address=", "127.0.0.1", params)
}
+
+// IsTerminatedPodGcThresholdValid validates terminated pod garbage collector threshold is set and it has non-empty argument.
+func IsTerminatedPodGcThresholdValid(params []string) bool {
+ return args.HasSingleFlagNonemptyArgument("--terminated-pod-gc-threshold", params)
+}
+
+// IsServiceAccountPrivateKeyFileSet validates service account private key is set and it has non-empty argument.
+func IsServiceAccountPrivateKeyFileSet(params []string) bool {
+ return args.HasSingleFlagNonemptyArgument("--service-account-private-key-file", params)
+}
+
+// IsRootCertificateAuthoritySet validates root certificate authority is set and it has non-empty argument.
+func IsRootCertificateAuthoritySet(params []string) bool {
+ return args.HasSingleFlagNonemptyArgument("--root-ca-file", params)
+}
diff --git a/test/security/k8s/src/check/validators/master/controllermanager/controllermanager_test.go b/test/security/k8s/src/check/validators/master/controllermanager/controllermanager_test.go
index 7fd8b5d53..05e3cae7e 100644
--- a/test/security/k8s/src/check/validators/master/controllermanager/controllermanager_test.go
+++ b/test/security/k8s/src/check/validators/master/controllermanager/controllermanager_test.go
@@ -16,21 +16,12 @@ var _ = Describe("Controllermanager", func() {
"--profiling=false",
"--use-service-account-credentials=true",
"--feature-gates=RotateKubeletServerCertificate=true",
+ "--terminated-pod-gc-threshold=10",
+ "--service-account-private-key-file=/etc/kubernetes/ssl/kube-service-account-token-key.pem",
+ "--root-ca-file=/etc/kubernetes/ssl/kube-ca.pem",
}
- // kubeControllerManagerCasablanca was obtained from virtual environment for testing
- // (introduced in Change-Id: I57f9f3caac0e8b391e9ed480f6bebba98e006882).
- kubeControllerManagerCasablanca = []string{
- "--kubeconfig=/etc/kubernetes/ssl/kubeconfig",
- "--address=0.0.0.0",
- "--root-ca-file=/etc/kubernetes/ssl/ca.pem",
- "--service-account-private-key-file=/etc/kubernetes/ssl/key.pem",
- "--allow-untagged-cloud",
- "--cloud-provider=rancher",
- "--horizontal-pod-autoscaler-use-rest-clients=false",
- }
-
- // kubeControllerManagerCasablanca was obtained from virtual environment for testing
+ // kubeControllerManagerDublin was obtained from virtual environment for testing
// (introduced in Change-Id: I54ada5fade3b984dedd1715f20579e3ce901faa3).
kubeControllerManagerDublin = []string{
"--kubeconfig=/etc/kubernetes/ssl/kubecfg-kube-controller-manager.yaml",
@@ -61,7 +52,6 @@ var _ = Describe("Controllermanager", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly enabled on insecure cluster", []string{"--profiling=true"}, false),
- Entry("Is not set on Casablanca cluster", kubeControllerManagerCasablanca, false),
Entry("Should be set to false on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
Entry("Should be set to false on Dublin cluster", kubeControllerManagerDublin, true),
)
@@ -72,24 +62,56 @@ var _ = Describe("Controllermanager", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly disabled on insecure cluster", []string{"--use-service-account-credentials=false"}, false),
- Entry("Is not set on Casablanca cluster", kubeControllerManagerCasablanca, false),
Entry("Should be set to true on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
Entry("Should be set to true on Dublin cluster", kubeControllerManagerDublin, true),
)
})
+ Describe("File path flags", func() {
+ DescribeTable("Service account private key",
+ func(params []string, expected bool) {
+ Expect(IsServiceAccountPrivateKeyFileSet(params)).To(Equal(expected))
+ },
+ Entry("Is absent on insecure cluster", []string{""}, false),
+ Entry("Is empty on insecure cluster", []string{"--service-account-private-key-file="}, false),
+ Entry("Should be explicitly set on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
+ Entry("Should be explicitly set on Dublin cluster", kubeControllerManagerDublin, true),
+ )
+
+ DescribeTable("Root certificate authority",
+ func(params []string, expected bool) {
+ Expect(IsRootCertificateAuthoritySet(params)).To(Equal(expected))
+ },
+ Entry("Is absent on insecure cluster", []string{""}, false),
+ Entry("Is empty on insecure cluster", []string{"--root-ca-file="}, false),
+ Entry("Should be explicitly set on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
+ Entry("Should be explicitly set on Dublin cluster", kubeControllerManagerDublin, true),
+ )
+ })
+
Describe("Address flag", func() {
DescribeTable("Bind address",
func(params []string, expected bool) {
Expect(IsInsecureBindAddressAbsentOrLoopback(params)).To(Equal(expected))
},
Entry("Is not absent on insecure cluster", []string{"--address=1.2.3.4"}, false),
- Entry("Is not absent nor set to loopback on Casablanca cluster", kubeControllerManagerCasablanca, false),
Entry("Is not absent nor set to loopback on Dublin cluster", kubeControllerManagerDublin, false),
Entry("Should be absent or set to loopback on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
)
})
+ Describe("Numeric flags", func() {
+ DescribeTable("Terminated pod garbage collector threshold",
+ func(params []string, expected bool) {
+ Expect(IsTerminatedPodGcThresholdValid(params)).To(Equal(expected))
+ },
+ Entry("Is absent on insecure cluster", []string{""}, false),
+ Entry("Is empty on insecure cluster", []string{"--terminated-pod-gc-threshold="}, false),
+ Entry("Should be explicitly set on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
+ Entry("Should be explicitly set on Dublin cluster", kubeControllerManagerDublin, true),
+ )
+ })
+
Describe("Argument list flags", func() {
DescribeTable("RotateKubeletServerCertificate",
func(params []string, expected bool) {
@@ -97,7 +119,6 @@ var _ = Describe("Controllermanager", func() {
},
Entry("Is not enabled on insecure cluster", []string{"--feature-gates=Foo=Bar,Baz=Quuz"}, false),
Entry("Is explicitly disabled on insecure cluster", []string{"--feature-gates=Foo=Bar,RotateKubeletServerCertificate=false,Baz=Quuz"}, false),
- Entry("Is not enabled on Casablanca cluster", kubeControllerManagerCasablanca, false),
Entry("Is not enabled on Dublin cluster", kubeControllerManagerDublin, false),
Entry("Should be enabled on CIS-compliant cluster", kubeControllerManagerCISCompliant, true),
)
diff --git a/test/security/k8s/src/check/validators/master/master.go b/test/security/k8s/src/check/validators/master/master.go
index 0f668f614..11c1b5052 100644
--- a/test/security/k8s/src/check/validators/master/master.go
+++ b/test/security/k8s/src/check/validators/master/master.go
@@ -70,7 +70,10 @@ func CheckScheduler(params []string) {
func CheckControllerManager(params []string) {
log.Println("==> Controller Manager:")
log.Printf("IsProfilingDisabled: %t\n", controllermanager.IsProfilingDisabled(params))
+ log.Printf("IsTerminatedPodGcThresholdValid: %t\n", controllermanager.IsTerminatedPodGcThresholdValid(params))
log.Printf("IsUseServiceAccountCredentialsEnabled: %t\n", controllermanager.IsUseServiceAccountCredentialsEnabled(params))
log.Printf("IsRotateKubeletServerCertificateIncluded: %t\n", controllermanager.IsRotateKubeletServerCertificateIncluded(params))
+ log.Printf("IsServiceAccountPrivateKeyFileSet: %t\n", controllermanager.IsServiceAccountPrivateKeyFileSet(params))
+ log.Printf("IsRootCertificateAuthoritySet: %t\n", controllermanager.IsRootCertificateAuthoritySet(params))
log.Printf("IsInsecureBindAddressAbsentOrLoopback: %t\n", controllermanager.IsInsecureBindAddressAbsentOrLoopback(params))
}
diff --git a/test/security/k8s/src/check/validators/master/scheduler/scheduler_test.go b/test/security/k8s/src/check/validators/master/scheduler/scheduler_test.go
index 4166a58d7..7fb13b820 100644
--- a/test/security/k8s/src/check/validators/master/scheduler/scheduler_test.go
+++ b/test/security/k8s/src/check/validators/master/scheduler/scheduler_test.go
@@ -16,14 +16,7 @@ var _ = Describe("Scheduler", func() {
"--profiling=false",
}
- // kubeSchedulerCasablanca was obtained from virtual environment for testing
- // (introduced in Change-Id: I57f9f3caac0e8b391e9ed480f6bebba98e006882).
- kubeSchedulerCasablanca = []string{
- "--kubeconfig=/etc/kubernetes/ssl/kubeconfig",
- "--address=0.0.0.0",
- }
-
- // kubeSchedulerCasablanca was obtained from virtual environment for testing
+ // kubeSchedulerDublin was obtained from virtual environment for testing
// (introduced in Change-Id: I54ada5fade3b984dedd1715f20579e3ce901faa3).
kubeSchedulerDublin = []string{
"--kubeconfig=/etc/kubernetes/ssl/kubecfg-kube-scheduler.yaml",
@@ -41,7 +34,6 @@ var _ = Describe("Scheduler", func() {
},
Entry("Is not set on insecure cluster", []string{}, false),
Entry("Is explicitly enabled on insecure cluster", []string{"--profiling=true"}, false),
- Entry("Is not set on Casablanca cluster", kubeSchedulerCasablanca, false),
Entry("Should be set to false on CIS-compliant cluster", kubeSchedulerCISCompliant, true),
Entry("Should be set to false on Dublin cluster", kubeSchedulerDublin, true),
)
@@ -53,7 +45,6 @@ var _ = Describe("Scheduler", func() {
Expect(IsInsecureBindAddressAbsentOrLoopback(params)).To(Equal(expected))
},
Entry("Is not absent on insecure cluster", []string{"--address=1.2.3.4"}, false),
- Entry("Is not absent nor set to loopback on Casablanca cluster", kubeSchedulerCasablanca, false),
Entry("Is not absent nor set to loopback on Dublin cluster", kubeSchedulerDublin, false),
Entry("Should be absent or set to loopback on CIS-compliant cluster", kubeSchedulerCISCompliant, true),
)
diff --git a/test/security/k8s/tools/casablanca/get_customization_scripts.sh b/test/security/k8s/tools/casablanca/get_customization_scripts.sh
deleted file mode 100755
index 028f002fc..000000000
--- a/test/security/k8s/tools/casablanca/get_customization_scripts.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-wget \
- 'https://docs.onap.org/en/casablanca/_downloads/0b365a2342af5abd655f1724b962f5b5/openstack-rancher.sh' \
- 'https://docs.onap.org/en/casablanca/_downloads/b20b581d56982e9f15a72527a358d56b/openstack-k8s-node.sh'
diff --git a/test/security/k8s/tools/casablanca/imported/openstack-k8s-node.sh b/test/security/k8s/tools/casablanca/imported/openstack-k8s-node.sh
deleted file mode 100644
index b8462aa5e..000000000
--- a/test/security/k8s/tools/casablanca/imported/openstack-k8s-node.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-DOCKER_VERSION=17.03
-KUBECTL_VERSION=1.11.2
-HELM_VERSION=2.9.1
-
-# setup root access - default login: oom/oom - comment out to restrict access too ssh key only
-sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
-sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
-service sshd restart
-echo -e "oom\noom" | passwd root
-
-apt-get update
-curl https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh | sh
-mkdir -p /etc/systemd/system/docker.service.d/
-cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
-[Service]
-ExecStart=
-ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10001
-EOF
-systemctl daemon-reload
-systemctl restart docker
-apt-mark hold docker-ce
-
-IP_ADDY=`ip address |grep ens|grep inet|awk '{print $2}'| awk -F / '{print $1}'`
-HOSTNAME=`hostname`
-
-echo "$IP_ADDY $HOSTNAME" >> /etc/hosts
-
-docker login -u docker -p docker nexus3.onap.org:10001
-
-sudo apt-get install make -y
-
-sudo curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl
-sudo chmod +x ./kubectl
-sudo mv ./kubectl /usr/local/bin/kubectl
-sudo mkdir ~/.kube
-wget http://storage.googleapis.com/kubernetes-helm/helm-v${HELM_VERSION}-linux-amd64.tar.gz
-sudo tar -zxvf helm-v${HELM_VERSION}-linux-amd64.tar.gz
-sudo mv linux-amd64/helm /usr/local/bin/helm
-
-# install nfs
-sudo apt-get install nfs-common -y
-
-
-exit 0
diff --git a/test/security/k8s/tools/casablanca/imported/openstack-rancher.sh b/test/security/k8s/tools/casablanca/imported/openstack-rancher.sh
deleted file mode 100644
index bcf542aed..000000000
--- a/test/security/k8s/tools/casablanca/imported/openstack-rancher.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-DOCKER_VERSION=17.03
-RANCHER_VERSION=1.6.22
-KUBECTL_VERSION=1.11.2
-HELM_VERSION=2.9.1
-
-# setup root access - default login: oom/oom - comment out to restrict access too ssh key only
-sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
-sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
-service sshd restart
-echo -e "oom\noom" | passwd root
-
-apt-get update
-curl https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh | sh
-mkdir -p /etc/systemd/system/docker.service.d/
-cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
-[Service]
-ExecStart=
-ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10001
-EOF
-systemctl daemon-reload
-systemctl restart docker
-apt-mark hold docker-ce
-
-IP_ADDY=`ip address |grep ens|grep inet|awk '{print $2}'| awk -F / '{print $1}'`
-HOSTNAME=`hostname`
-
-echo "$IP_ADDY $HOSTNAME" >> /etc/hosts
-
-docker login -u docker -p docker nexus3.onap.org:10001
-
-sudo apt-get install make -y
-
-sudo docker run -d --restart=unless-stopped -p 8080:8080 --name rancher_server rancher/server:v$RANCHER_VERSION
-sudo curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl
-sudo chmod +x ./kubectl
-sudo mv ./kubectl /usr/local/bin/kubectl
-sudo mkdir ~/.kube
-wget http://storage.googleapis.com/kubernetes-helm/helm-v${HELM_VERSION}-linux-amd64.tar.gz
-sudo tar -zxvf helm-v${HELM_VERSION}-linux-amd64.tar.gz
-sudo mv linux-amd64/helm /usr/local/bin/helm
-
-# nfs server
-sudo apt-get install nfs-kernel-server -y
-
-sudo mkdir -p /nfs_share
-sudo chown nobody:nogroup /nfs_share/
-
-
-exit 0
diff --git a/test/security/k8s/vagrant/casablanca/Vagrantfile b/test/security/k8s/vagrant/casablanca/Vagrantfile
deleted file mode 100644
index c7e6883ce..000000000
--- a/test/security/k8s/vagrant/casablanca/Vagrantfile
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- mode: ruby -*-
-# -*- coding: utf-8 -*-
-
-host_ip = "192.168.121.1"
-
-rancher_port = 8080
-
-vm_memory = 2 * 1024
-vm_cpus = 1
-vm_box = "generic/ubuntu1604"
-
-cluster = [
- { name: 'master', hostname: 'master', ip: '172.17.3.100' },
- { name: 'worker', hostname: 'worker', ip: '172.17.3.101' }
-]
-
-Vagrant.configure('2') do |config|
- cluster.each do |node|
- config.vm.define node[:name] do |config|
- config.vm.box = vm_box
- config.vm.hostname = node[:hostname]
-
- config.vm.provider :virtualbox do |v|
- v.name = node[:name]
- v.memory = vm_memory
- v.cpus = vm_cpus
- end
-
- config.vm.provider :libvirt do |v|
- v.memory = vm_memory
- v.cpus = vm_cpus
- end
-
- config.vm.network :private_network, ip: node[:ip]
- config.vm.provision :shell, inline: "echo nameserver #{host_ip} | resolvconf -a eth0.inet"
-
- if node[:name] == 'master'
- config.vm.network "forwarded_port", guest: rancher_port, host: rancher_port
- config.vm.provision :shell, path: "../../tools/casablanca/imported/openstack-rancher.sh"
- config.vm.provision :shell, path: "../../tools/casablanca/get_ranchercli.sh"
- end
-
- if node[:name] == 'worker'
- config.vm.provision :shell, path: "../../tools/casablanca/imported/openstack-k8s-node.sh"
- end
- end
- end
-end
diff --git a/test/security/k8s/vagrant/dublin/cluster.yml b/test/security/k8s/vagrant/dublin/cluster.yml
index df93a8863..e4eef11cd 100644
--- a/test/security/k8s/vagrant/dublin/cluster.yml
+++ b/test/security/k8s/vagrant/dublin/cluster.yml
@@ -32,7 +32,6 @@ network:
plugin: canal
authentication:
strategy: x509
-ssh_key_path: *ssh_key_path
ssh_agent_auth: false
authorization:
mode: rbac
diff --git a/test/security/requirements.txt b/test/security/requirements.txt
new file mode 100644
index 000000000..8683da168
--- /dev/null
+++ b/test/security/requirements.txt
@@ -0,0 +1,2 @@
+kubernetes
+colorama
diff --git a/test/security/setup.py b/test/security/setup.py
new file mode 100644
index 000000000..7e71bda03
--- /dev/null
+++ b/test/security/setup.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+import setuptools
+
+setuptools.setup(
+ install_requires=[
+ 'kubernetes',
+ 'colorama'
+ ]
+)
diff --git a/test/security/sslendpoints/.dockerignore b/test/security/sslendpoints/.dockerignore
new file mode 100644
index 000000000..7bc4c9048
--- /dev/null
+++ b/test/security/sslendpoints/.dockerignore
@@ -0,0 +1,5 @@
+.git*
+.dockerignore
+Dockerfile
+Makefile
+bin/
diff --git a/test/security/sslendpoints/.gitignore b/test/security/sslendpoints/.gitignore
new file mode 100644
index 000000000..035097d0f
--- /dev/null
+++ b/test/security/sslendpoints/.gitignore
@@ -0,0 +1,2 @@
+# No binaries should be committed.
+/bin/
diff --git a/test/security/sslendpoints/Dockerfile b/test/security/sslendpoints/Dockerfile
new file mode 100644
index 000000000..415101e44
--- /dev/null
+++ b/test/security/sslendpoints/Dockerfile
@@ -0,0 +1,11 @@
+FROM golang:1.13.8 AS build
+
+WORKDIR /opt/onap.local/sslendpoints
+COPY . /opt/onap.local/sslendpoints
+RUN go mod download
+RUN CGO_ENABLED=0 go build -ldflags '-extldflags "-static"'
+
+FROM scratch
+COPY --from=build /opt/onap.local/sslendpoints/sslendpoints /bin/sslendpoints
+ENTRYPOINT ["/bin/sslendpoints"]
+CMD ["--help"]
diff --git a/test/security/sslendpoints/Makefile b/test/security/sslendpoints/Makefile
new file mode 100644
index 000000000..e853dc4dd
--- /dev/null
+++ b/test/security/sslendpoints/Makefile
@@ -0,0 +1,47 @@
+BUILD_DIR = bin
+BUILD_DOCKER_IMAGE = sslendpoints-build-img
+BUILD_DOCKER_CONTAINER = sslendpoints-build
+BINARIES = sslendpoints
+BINARIES := $(addprefix ${BUILD_DIR}/, ${BINARIES})
+
+.PHONY: all
+all: docker-build
+
+.PHONY: build
+build: ${BUILD_DIR}
+ go build -o "${BINARIES}"
+
+.PHONY: clean
+clean: clean-docker-build clean-build
+
+.PHONY: test
+test:
+ go test ./...
+
+.PHONY: docker-build
+docker-build: ${BINARIES}
+ docker rm "${BUILD_DOCKER_CONTAINER}"
+
+${BINARIES}: docker-container | ${BUILD_DIR}
+ docker cp "${BUILD_DOCKER_CONTAINER}:/$@" $@
+
+.PHONY: docker-container
+docker-container: docker-build-img
+ docker create --name "${BUILD_DOCKER_CONTAINER}" "${BUILD_DOCKER_IMAGE}"
+
+.PHONY: docker-build-img
+docker-build-img:
+ docker build --tag "${BUILD_DOCKER_IMAGE}" .
+
+${BUILD_DIR}:
+ mkdir -p "${BUILD_DIR}"
+
+.PHONY: clean-docker-build
+clean-docker-build:
+ -docker rm "${BUILD_DOCKER_CONTAINER}"
+ -docker rmi "${BUILD_DOCKER_IMAGE}"
+
+.PHONY: clean-build
+clean-build:
+ -rm -f ${BINARIES}
+ -rmdir ${BUILD_DIR}
diff --git a/test/security/sslendpoints/README.rst b/test/security/sslendpoints/README.rst
new file mode 100644
index 000000000..257946d88
--- /dev/null
+++ b/test/security/sslendpoints/README.rst
@@ -0,0 +1,135 @@
+=====================
+ SSL endpoints check
+=====================
+
+Utility for checking if all of the ports exposed outside of Kubernetes cluster
+use SSL tunnels.
+
+Prerequisites
+-------------
+
+Configuration
+~~~~~~~~~~~~~
+
+``-kubeconfig``
+ Optional unless ``$HOME`` is not set. Defaults to ``$HOME/.kube/config``.
+
+``-xfail``
+ Optional list of services with corresponding NodePorts which do not use SSL
+ tunnels. These ports are known as "expected failures" and will not be
+ checked.
+
+Dependencies
+~~~~~~~~~~~~
+
+- nmap_
+
+.. _nmap: https://nmap.org/book/install.html
+
+Build (local)
+~~~~~~~~~~~~~
+
+- go_ (1.11+, tested on 1.13)
+
+.. _go: https://golang.org/doc/install
+
+Build (Docker)
+~~~~~~~~~~~~~~
+
+- Docker_ engine
+- make (optional)
+
+.. _Docker: https://docs.docker.com/install
+
+Test
+~~~~
+
+- Ginkgo_
+- GolangCI-Lint_ (optional)
+
+.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
+.. _GolangCI-Lint: https://github.com/golangci/golangci-lint#install
+
+Building
+--------
+
+Command (local)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ mkdir bin
+ $ go build -o bin/sslendpoints
+
+Additional ``bin`` directory and specifying ``go build`` output are used to
+declutter project and maintain compatibility with Docker-based process. Running
+``go build`` without parameters will create ``sslendpoints`` binary in current
+directory.
+
+Command (Docker)
+~~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ make # or commands from corresponding "make" targets
+
+
+Running
+-------
+
+Command (local)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ bin/sslendpoints [-kubeconfig KUBECONFIG] [-xfail XFAIL]
+
+Command (Docker)
+~~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ docker run --rm --volume $KUBECONFIG:/.kube/config \
+ sslendpoints-build-img /bin/sslendpoints
+
+ $ docker run --rm --volume $KUBECONFIG:/opt/config \
+ sslendpoints-build-img /bin/sslendpoints -kubeconfig /opt/config
+
+ $ docker run --rm \
+ --volume $KUBECONFIG:/opt/config \
+ --volume $XFAIL:/opt/xfail \
+ sslendpoints-build-img /bin/sslendpoints \
+ -kubeconfig /opt/config
+ -xfail /opt/xfail
+
+Output
+~~~~~~
+
+.. code-block:: shell
+
+ $ ./sslendpoints -kubeconfig ~/.kube/config.onap
+ 2020/03/17 10:40:29 Host 192.168.2.10
+ 2020/03/17 10:40:29 PORT SERVICE
+ 2020/03/17 10:40:29 30203 sdnc-dgbuilder
+ 2020/03/17 10:40:29 30204 sdc-be
+ 2020/03/17 10:40:29 30207 sdc-fe
+ 2020/03/17 10:40:29 30220 aai-sparky-be
+ 2020/03/17 10:40:29 30226 message-router
+ 2020/03/17 10:40:29 30233 aai
+ 2020/03/17 10:40:29 30256 sdc-wfd-fe
+ 2020/03/17 10:40:29 30257 sdc-wfd-be
+ 2020/03/17 10:40:29 30264 sdc-dcae-fe
+ 2020/03/17 10:40:29 30266 sdc-dcae-dt
+ 2020/03/17 10:40:29 30279 aai-babel
+ 2020/03/17 10:40:29 30406 so-vnfm-adapter
+ 2020/03/17 10:40:29 There are 12 non-SSL NodePorts in the cluster
+
+
+Testing
+-------
+
+.. code-block:: shell
+
+ $ go test ./... # basic
+ $ ginkgo -r # pretty
+ $ golangci-lint run # linters
diff --git a/test/security/sslendpoints/go.mod b/test/security/sslendpoints/go.mod
new file mode 100644
index 000000000..6037ee0ae
--- /dev/null
+++ b/test/security/sslendpoints/go.mod
@@ -0,0 +1,16 @@
+module onap.local/sslendpoints
+
+go 1.13
+
+require (
+ github.com/Ullaakut/nmap v2.0.0+incompatible
+ github.com/imdario/mergo v0.3.8 // indirect
+ github.com/onsi/ginkgo v1.10.1
+ github.com/onsi/gomega v1.7.0
+ golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d // indirect
+ golang.org/x/time v0.0.0-20191024005414-555d28b269f0 // indirect
+ k8s.io/api v0.17.3
+ k8s.io/apimachinery v0.17.3
+ k8s.io/client-go v0.0.0-20190819141724-e14f31a72a77
+ k8s.io/utils v0.0.0-20200124190032-861946025e34 // indirect
+)
diff --git a/test/security/sslendpoints/go.sum b/test/security/sslendpoints/go.sum
new file mode 100644
index 000000000..2ed062aa4
--- /dev/null
+++ b/test/security/sslendpoints/go.sum
@@ -0,0 +1,168 @@
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
+github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
+github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
+github.com/Ullaakut/nmap v2.0.0+incompatible h1:tNXub052dsnG8+yrgpph9nhVixIBdpRRgzvmQoc8eBA=
+github.com/Ullaakut/nmap v2.0.0+incompatible/go.mod h1:fkC066hwfcoKwlI7DS2ARTggSVtBTZYCjVH1TzuTMaQ=
+github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v0.0.0-20160705203006-01aeca54ebda/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
+github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
+github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
+github.com/evanphx/json-patch v0.0.0-20190203023257-5858425f7550/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
+github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
+github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
+github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
+github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
+github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
+github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
+github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
+github.com/gogo/protobuf v0.0.0-20171007142547-342cbe0a0415/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
+github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d h1:3PaI8p3seN09VjbTYC/QWlUZdZ1qS1zGjy7LH2Wt07I=
+github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
+github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/google/btree v0.0.0-20160524151835-7d79101e329e/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
+github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
+github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d h1:7XGaL1e6bYS1yIonGp9761ExpPPV1ui0SAC59Yube9k=
+github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
+github.com/gophercloud/gophercloud v0.0.0-20190126172459-c818fa66e4c8/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4=
+github.com/gregjones/httpcache v0.0.0-20170728041850-787624de3eb7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
+github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
+github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
+github.com/imdario/mergo v0.3.8 h1:CGgOkSJeqMRmt0D9XLWExdT4m4F1vd3FV3VPt+0VxkQ=
+github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
+github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
+github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
+github.com/json-iterator/go v1.1.8 h1:QiWkFLKq0T7mpzwOTu6BzNDbfTE8OLrYhVKYMLF46Ok=
+github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
+github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
+github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.10.1 h1:q/mM8GF/n0shIN8SaAZ0V+jnLPzen6WIVZdiwrRlMlo=
+github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
+github.com/onsi/gomega v0.0.0-20190113212917-5533ce8a0da3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
+github.com/onsi/gomega v1.7.0 h1:XPnZz8VVBHjVsy1vzJmRwIcSwiUO+JFfrv/xGiigmME=
+github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
+github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
+github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
+github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+golang.org/x/crypto v0.0.0-20181025213731-e84da0312774/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190812203447-cdfb69ac37fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20191004110552-13f9640d40b9 h1:rjwSpXsdiK0dV8/Naq3kAw9ymfAeJIyd0upUIElB+lI=
+golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
+golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/time v0.0.0-20161028155119-f51c12702a4d/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs=
+golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0 h1:KxkO13IPW4Lslp2bz+KHP2E3gtFlrIGNThxkZQ3g+4c=
+google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
+gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/inf.v0 v0.9.0/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
+gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
+gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+k8s.io/api v0.0.0-20190819141258-3544db3b9e44/go.mod h1:AOxZTnaXR/xiarlQL0JUfwQPxjmKDvVYoRp58cA7lUo=
+k8s.io/api v0.17.3 h1:XAm3PZp3wnEdzekNkcmj/9Y1zdmQYJ1I4GKSBBZ8aG0=
+k8s.io/api v0.17.3/go.mod h1:YZ0OTkuw7ipbe305fMpIdf3GLXZKRigjtZaV5gzC2J0=
+k8s.io/apimachinery v0.0.0-20190817020851-f2f3a405f61d/go.mod h1:3jediapYqJ2w1BFw7lAZPCx7scubsTfosqHkhXCWJKw=
+k8s.io/apimachinery v0.17.3 h1:f+uZV6rm4/tHE7xXgLyToprg6xWairaClGVkm2t8omg=
+k8s.io/apimachinery v0.17.3/go.mod h1:gxLnyZcGNdZTCLnq3fgzyg2A5BVCHTNDFrw8AmuJ+0g=
+k8s.io/client-go v0.0.0-20190819141724-e14f31a72a77 h1:w1BoabVnPpPqQCY3sHK4qVwa12Lk8ip1pKMR1C+qbdo=
+k8s.io/client-go v0.0.0-20190819141724-e14f31a72a77/go.mod h1:DmkJD5UDP87MVqUQ5VJ6Tj9Oen8WzXPhk3la4qpyG4g=
+k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
+k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
+k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
+k8s.io/klog v0.3.1/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
+k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=
+k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
+k8s.io/kube-openapi v0.0.0-20190228160746-b3a7cee44a30/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc=
+k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
+k8s.io/utils v0.0.0-20190221042446-c2654d5206da/go.mod h1:8k8uAuAQ0rXslZKaEWd0c3oVhZz7sSzSiPnVZayjIX0=
+k8s.io/utils v0.0.0-20200124190032-861946025e34 h1:HjlUD6M0K3P8nRXmr2B9o4F9dUy9TCj/aEpReeyi6+k=
+k8s.io/utils v0.0.0-20200124190032-861946025e34/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
+sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
+sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs=
+sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
diff --git a/test/security/sslendpoints/main.go b/test/security/sslendpoints/main.go
new file mode 100644
index 000000000..ddad51c68
--- /dev/null
+++ b/test/security/sslendpoints/main.go
@@ -0,0 +1,174 @@
+package main
+
+import (
+ "encoding/csv"
+ "flag"
+ "log"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
+ "k8s.io/client-go/kubernetes"
+ "k8s.io/client-go/tools/clientcmd"
+
+ "github.com/Ullaakut/nmap"
+
+ "onap.local/sslendpoints/ports"
+)
+
+const (
+ ipv4AddrType = "ipv4"
+
+ xfailComma = ' '
+ xfailComment = '#'
+ xfailFields = 2
+)
+
+var (
+ kubeconfig *string
+ namespace *string
+ xfailName *string
+)
+
+func main() {
+ if home := os.Getenv("HOME"); home != "" {
+ kubeconfig = flag.String("kubeconfig", filepath.Join(home, ".kube", "config"), "(optional) absolute path to the kubeconfig file")
+ } else {
+ kubeconfig = flag.String("kubeconfig", "", "absolute path to the kubeconfig file")
+ }
+ namespace = flag.String("namespace", "", "(optional) name of specific namespace to scan")
+ xfailName = flag.String("xfail", "", "(optional) absolute path to the expected failures file")
+ flag.Parse()
+
+ var listOptions metav1.ListOptions
+ if *namespace != "" {
+ listOptions = metav1.ListOptions{FieldSelector: "metadata.namespace=" + *namespace}
+ }
+
+ var xfails map[uint16]string
+ if *xfailName != "" {
+ xfailFile, err := os.Open(*xfailName)
+ if err != nil {
+ log.Printf("Unable to open expected failures file: %v", err)
+ log.Println("All non-SSL NodePorts will be reported")
+ }
+ defer xfailFile.Close()
+
+ r := csv.NewReader(xfailFile)
+ r.Comma = xfailComma
+ r.Comment = xfailComment
+ r.FieldsPerRecord = xfailFields
+
+ xfailData, err := r.ReadAll()
+ if err != nil {
+ log.Printf("Unable to read expected failures file: %v", err)
+ log.Println("All non-SSL NodePorts will be reported")
+ }
+
+ var ok bool
+ xfails, ok = ports.ConvertNodePorts(xfailData)
+ if !ok {
+ log.Println("No usable data in expected failures file")
+ log.Println("All non-SSL NodePorts will be reported")
+ }
+ }
+
+ // use the current context in kubeconfig
+ config, err := clientcmd.BuildConfigFromFlags("", *kubeconfig)
+ if err != nil {
+ log.Panicf("Unable to build cluster config: %v", err)
+ }
+
+ // create the clientset
+ clientset, err := kubernetes.NewForConfig(config)
+ if err != nil {
+ log.Panicf("Unable to build client: %v", err)
+ }
+
+ // get list of nodes to extract addresses for running scan
+ nodes, err := clientset.CoreV1().Nodes().List(metav1.ListOptions{})
+ if err != nil {
+ log.Panicf("Unable to get list of nodes: %v", err)
+ }
+
+ // filter out addresses for running scan
+ addresses, ok := ports.FilterIPAddresses(nodes)
+ if !ok {
+ log.Println("There are no IP addresses to run scan")
+ os.Exit(0)
+ }
+
+ // get list of services to extract nodeport information
+ services, err := clientset.CoreV1().Services("").List(listOptions)
+ if err != nil {
+ log.Panicf("Unable to get list of services: %v", err)
+ }
+
+ // filter out nodeports with corresponding services from service list
+ nodeports, ok := ports.FilterNodePorts(services)
+ if !ok {
+ log.Println("There are no NodePorts in the cluster")
+ os.Exit(0)
+ }
+
+ // filter out expected failures here before running the scan
+ ports.FilterXFailNodePorts(xfails, nodeports)
+
+ // extract ports for running the scan
+ var ports []string
+ for port := range nodeports {
+ ports = append(ports, strconv.Itoa(int(port)))
+ }
+
+ // run nmap on the first address found for given cluster [1] filtering out SSL-tunelled ports
+ // [1] https://kubernetes.io/docs/concepts/services-networking/service/#nodeport
+ // "Each node proxies that port (the same port number on every Node) into your Service."
+ scanner, err := nmap.NewScanner(
+ nmap.WithTargets(addresses[0]),
+ nmap.WithPorts(ports...),
+ nmap.WithServiceInfo(),
+ nmap.WithTimingTemplate(nmap.TimingAggressive),
+ nmap.WithFilterPort(func(p nmap.Port) bool {
+ if p.Service.Tunnel == "ssl" {
+ return false
+ }
+ if strings.HasPrefix(p.State.State, "closed") {
+ return false
+ }
+ if strings.HasPrefix(p.State.State, "filtered") {
+ return false
+ }
+ return true
+ }),
+ )
+ if err != nil {
+ log.Panicf("Unable to create nmap scanner: %v", err)
+ }
+
+ result, _, err := scanner.Run()
+ if err != nil {
+ log.Panicf("Scan failed: %v", err)
+ }
+
+ // scan was run on a single host
+ if len(result.Hosts) < 1 {
+ log.Panicln("No host information in scan results")
+ }
+
+ // host address in the results might be ipv4 or mac
+ for _, address := range result.Hosts[0].Addresses {
+ if address.AddrType == ipv4AddrType {
+ log.Printf("Host %s\n", address)
+ }
+ }
+ log.Printf("PORT\tSERVICE")
+ for _, port := range result.Hosts[0].Ports {
+ log.Printf("%d\t%s\n", port.ID, nodeports[port.ID])
+ }
+
+ // report non-SSL services and their number
+ log.Printf("There are %d non-SSL NodePorts in the cluster\n", len(result.Hosts[0].Ports))
+ os.Exit(len(result.Hosts[0].Ports))
+}
diff --git a/test/security/sslendpoints/ports/ports.go b/test/security/sslendpoints/ports/ports.go
new file mode 100644
index 000000000..dae7dbec7
--- /dev/null
+++ b/test/security/sslendpoints/ports/ports.go
@@ -0,0 +1,66 @@
+package ports
+
+import (
+ "log"
+ "strconv"
+
+ v1 "k8s.io/api/core/v1"
+)
+
+// ConvertNodePorts converts CSV data to NodePorts map.
+func ConvertNodePorts(data [][]string) (map[uint16]string, bool) {
+ result := make(map[uint16]string)
+ for _, record := range data {
+ port, err := strconv.Atoi(record[1])
+ if err != nil {
+ log.Printf("Unable to parse port field: %v", err)
+ continue
+ }
+ result[uint16(port)] = record[0]
+ }
+ return result, len(result) > 0
+}
+
+// FilterXFailNodePorts removes NodePorts expected to fail from map.
+func FilterXFailNodePorts(xfails, nodeports map[uint16]string) {
+ for port, xfailService := range xfails {
+ service, ok := nodeports[port]
+ if !ok {
+ continue
+ }
+ if service != xfailService {
+ continue
+ }
+ delete(nodeports, port)
+ }
+}
+
+// FilterNodePorts extracts NodePorts from ServiceList.
+func FilterNodePorts(services *v1.ServiceList) (map[uint16]string, bool) {
+ nodeports := make(map[uint16]string)
+ for _, service := range services.Items {
+ for _, port := range service.Spec.Ports {
+ if port.NodePort != 0 {
+ nodeports[uint16(port.NodePort)] = service.ObjectMeta.Name
+ }
+ }
+ }
+ return nodeports, len(nodeports) > 0
+}
+
+// FilterIPAddresses extracts IP addresses from NodeList.
+// External IP addresses take precedence over internal ones.
+func FilterIPAddresses(nodes *v1.NodeList) ([]string, bool) {
+ addresses := make([]string, 0)
+ for _, node := range nodes.Items {
+ for _, address := range node.Status.Addresses {
+ switch address.Type {
+ case "InternalIP":
+ addresses = append(addresses, address.Address)
+ case "ExternalIP":
+ addresses = append([]string{address.Address}, addresses...)
+ }
+ }
+ }
+ return addresses, len(addresses) > 0
+}
diff --git a/test/security/sslendpoints/ports/ports_suite_test.go b/test/security/sslendpoints/ports/ports_suite_test.go
new file mode 100644
index 000000000..8a6431e5e
--- /dev/null
+++ b/test/security/sslendpoints/ports/ports_suite_test.go
@@ -0,0 +1,13 @@
+package ports_test
+
+import (
+ "testing"
+
+ . "github.com/onsi/ginkgo"
+ . "github.com/onsi/gomega"
+)
+
+func TestNodeports(t *testing.T) {
+ RegisterFailHandler(Fail)
+ RunSpecs(t, "Nodeports Suite")
+}
diff --git a/test/security/sslendpoints/ports/ports_test.go b/test/security/sslendpoints/ports/ports_test.go
new file mode 100644
index 000000000..2f4f042fa
--- /dev/null
+++ b/test/security/sslendpoints/ports/ports_test.go
@@ -0,0 +1,423 @@
+package ports_test
+
+import (
+ "strconv"
+
+ . "github.com/onsi/ginkgo"
+ . "github.com/onsi/gomega"
+
+ v1 "k8s.io/api/core/v1"
+ metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
+
+ . "onap.local/sslendpoints/ports"
+)
+
+var _ = Describe("Ports", func() {
+ const (
+ notNodePort = 0
+ nodePortO = 30200
+ nodePortN = 30201
+ nodePortA = 30202
+ nodePortP = 30203
+ serviceR = "serviceR"
+ serviceL = "serviceL"
+ serviceZ = "serviceZ"
+
+ notParsablePort1 = "3p1c"
+ notParsablePort2 = "0n4p"
+ notParsablePort3 = "5GxD"
+
+ externalIpControl = "1.2.3.4"
+ internalIpControl = "192.168.121.100"
+ internalIpWorker = "192.168.121.200"
+ hostnameControl = "onap-control-1"
+ hostnameWorker = "onap-worker-1"
+ )
+
+ var (
+ csvSomeUnparsable [][]string
+ csvAllUnparsable [][]string
+
+ servicesEmpty *v1.ServiceList
+ servicesSingleWithNodePort *v1.ServiceList
+ servicesSingleWithMultipleNodePorts *v1.ServiceList
+ servicesManyWithoutNodePorts *v1.ServiceList
+ servicesManyWithNodePort *v1.ServiceList
+ servicesManyWithMultipleNodePorts *v1.ServiceList
+ servicesManyMixedNodePorts *v1.ServiceList
+
+ nodesEmpty *v1.NodeList
+ nodesSingleWithIP *v1.NodeList
+ nodesSingleWithBothIPs *v1.NodeList
+ nodesManyWithHostnames *v1.NodeList
+ nodesManyWithMixedIPs *v1.NodeList
+ )
+
+ BeforeEach(func() {
+ csvSomeUnparsable = [][]string{
+ {serviceR, strconv.Itoa(nodePortO)},
+ {serviceL, strconv.Itoa(nodePortN)},
+ {serviceZ, notParsablePort1},
+ }
+ csvAllUnparsable = [][]string{
+ {serviceR, notParsablePort1},
+ {serviceL, notParsablePort2},
+ {serviceZ, notParsablePort3},
+ }
+
+ servicesEmpty = &v1.ServiceList{}
+ servicesSingleWithNodePort = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceR},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortO},
+ },
+ },
+ },
+ },
+ }
+ servicesSingleWithMultipleNodePorts = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceR},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortO},
+ {NodePort: nodePortN},
+ },
+ },
+ },
+ },
+ }
+ servicesManyWithoutNodePorts = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: notNodePort},
+ },
+ },
+ },
+ {
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: notNodePort},
+ },
+ },
+ },
+ },
+ }
+ servicesManyWithNodePort = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceR},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortO},
+ },
+ },
+ },
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceL},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortN},
+ },
+ },
+ },
+ },
+ }
+ servicesManyWithMultipleNodePorts = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceR},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortO},
+ {NodePort: nodePortN},
+ },
+ },
+ },
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceL},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortA},
+ {NodePort: nodePortP},
+ },
+ },
+ },
+ },
+ }
+ servicesManyMixedNodePorts = &v1.ServiceList{
+ Items: []v1.Service{
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceR},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: notNodePort},
+ },
+ },
+ },
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceL},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortO},
+ },
+ },
+ },
+ {
+ ObjectMeta: metav1.ObjectMeta{Name: serviceZ},
+ Spec: v1.ServiceSpec{
+ Ports: []v1.ServicePort{
+ {NodePort: nodePortN},
+ {NodePort: nodePortA},
+ },
+ },
+ },
+ },
+ }
+
+ nodesEmpty = &v1.NodeList{}
+ nodesSingleWithIP = &v1.NodeList{
+ Items: []v1.Node{
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
+ },
+ },
+ },
+ },
+ }
+ nodesSingleWithBothIPs = &v1.NodeList{
+ Items: []v1.Node{
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "ExternalIP", Address: externalIpControl},
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
+ },
+ },
+ },
+ },
+ }
+ nodesManyWithHostnames = &v1.NodeList{
+ Items: []v1.Node{
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "Hostname", Address: hostnameControl},
+ },
+ },
+ },
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "Hostname", Address: hostnameWorker},
+ },
+ },
+ },
+ },
+ }
+ nodesManyWithMixedIPs = &v1.NodeList{
+ Items: []v1.Node{
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "ExternalIP", Address: externalIpControl},
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
+ },
+ },
+ },
+ {
+ Status: v1.NodeStatus{
+ Addresses: []v1.NodeAddress{
+ {Type: "InternalIP", Address: internalIpWorker},
+ {Type: "Hostname", Address: hostnameWorker},
+ },
+ },
+ },
+ },
+ }
+ })
+
+ Describe("CSV data to NodePorts conversion", func() {
+ Context("With no data", func() {
+ It("should return an empty map", func() {
+ cnp, ok := ConvertNodePorts([][]string{})
+ Expect(ok).To(BeFalse())
+ Expect(cnp).To(BeEmpty())
+ })
+ })
+ Context("With some ports unparsable", func() {
+ It("should return only parsable records", func() {
+ expected := map[uint16]string{nodePortO: serviceR, nodePortN: serviceL}
+ cnp, ok := ConvertNodePorts(csvSomeUnparsable)
+ Expect(ok).To(BeTrue())
+ Expect(cnp).To(Equal(expected))
+ })
+ })
+ Context("With all ports unparsable", func() {
+ It("should return an empty map", func() {
+ cnp, ok := ConvertNodePorts(csvAllUnparsable)
+ Expect(ok).To(BeFalse())
+ Expect(cnp).To(BeEmpty())
+ })
+ })
+ })
+
+ Describe("NodePorts expected to fail filtering", func() {
+ Context("With no data", func() {
+ It("should leave nodeports unchanged", func() {
+ nodeports := map[uint16]string{nodePortO: serviceR}
+ expected := make(map[uint16]string)
+ for k, v := range nodeports {
+ expected[k] = v
+ }
+ FilterXFailNodePorts(map[uint16]string{}, nodeports)
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With port absent in NodePorts", func() {
+ It("should leave nodeports unchanged", func() {
+ xfail := map[uint16]string{nodePortP: serviceZ}
+ nodeports := map[uint16]string{nodePortO: serviceR}
+ expected := make(map[uint16]string)
+ for k, v := range nodeports {
+ expected[k] = v
+ }
+ FilterXFailNodePorts(xfail, nodeports)
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With other service than in NodePorts", func() {
+ It("should leave nodeports unchanged", func() {
+ xfail := map[uint16]string{nodePortO: serviceZ}
+ nodeports := map[uint16]string{nodePortO: serviceR}
+ expected := make(map[uint16]string)
+ for k, v := range nodeports {
+ expected[k] = v
+ }
+ FilterXFailNodePorts(xfail, nodeports)
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With all NodePorts expected to fail", func() {
+ It("should leave no nodeports", func() {
+ xfail := map[uint16]string{nodePortO: serviceR, nodePortN: serviceL}
+ nodeports := map[uint16]string{nodePortO: serviceR, nodePortN: serviceL}
+ FilterXFailNodePorts(xfail, nodeports)
+ Expect(nodeports).To(BeEmpty())
+ })
+ })
+ })
+
+ Describe("NodePorts extraction", func() {
+ Context("With empty service list", func() {
+ It("should report no NodePorts", func() {
+ nodeports, ok := FilterNodePorts(servicesEmpty)
+ Expect(ok).To(BeFalse())
+ Expect(nodeports).To(BeEmpty())
+ })
+ })
+ Context("With service using single NodePort", func() {
+ It("should report single NodePort", func() {
+ expected := map[uint16]string{nodePortO: serviceR}
+ nodeports, ok := FilterNodePorts(servicesSingleWithNodePort)
+ Expect(ok).To(BeTrue())
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With service using multiple NodePorts", func() {
+ It("should report all NodePorts", func() {
+ expected := map[uint16]string{nodePortO: serviceR, nodePortN: serviceR}
+ nodeports, ok := FilterNodePorts(servicesSingleWithMultipleNodePorts)
+ Expect(ok).To(BeTrue())
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With many services using no NodePorts", func() {
+ It("should report no NodePorts", func() {
+ nodeports, ok := FilterNodePorts(servicesManyWithoutNodePorts)
+ Expect(ok).To(BeFalse())
+ Expect(nodeports).To(BeEmpty())
+ })
+ })
+ Context("With services using single NodePort", func() {
+ It("should report all NodePorts", func() {
+ expected := map[uint16]string{nodePortO: serviceR, nodePortN: serviceL}
+ nodeports, ok := FilterNodePorts(servicesManyWithNodePort)
+ Expect(ok).To(BeTrue())
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With services using multiple NodePorts", func() {
+ It("should report all NodePorts", func() {
+ expected := map[uint16]string{
+ nodePortO: serviceR, nodePortN: serviceR,
+ nodePortA: serviceL, nodePortP: serviceL,
+ }
+ nodeports, ok := FilterNodePorts(servicesManyWithMultipleNodePorts)
+ Expect(ok).To(BeTrue())
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ Context("With mixed services", func() {
+ It("should report all NodePorts", func() {
+ expected := map[uint16]string{
+ nodePortO: serviceL, nodePortN: serviceZ, nodePortA: serviceZ,
+ }
+ nodeports, ok := FilterNodePorts(servicesManyMixedNodePorts)
+ Expect(ok).To(BeTrue())
+ Expect(nodeports).To(Equal(expected))
+ })
+ })
+ })
+
+ Describe("IP addresses extraction", func() {
+ Context("With empty node list", func() {
+ It("should report no IP addresses", func() {
+ addresses, ok := FilterIPAddresses(nodesEmpty)
+ Expect(ok).To(BeFalse())
+ Expect(addresses).To(BeEmpty())
+ })
+ })
+ Context("With nodes using only hostnames", func() {
+ It("should report no IP addresses", func() {
+ addresses, ok := FilterIPAddresses(nodesManyWithHostnames)
+ Expect(ok).To(BeFalse())
+ Expect(addresses).To(BeEmpty())
+ })
+ })
+ Context("With node using only internal IP", func() {
+ It("should report internal IP", func() {
+ expected := []string{internalIpControl}
+ addresses, ok := FilterIPAddresses(nodesSingleWithIP)
+ Expect(ok).To(BeTrue())
+ Expect(addresses).To(Equal(expected))
+ })
+ })
+ Context("With node in the cloud", func() {
+ It("should report all IPs in correct order", func() {
+ expected := []string{externalIpControl, internalIpControl}
+ addresses, ok := FilterIPAddresses(nodesSingleWithBothIPs)
+ Expect(ok).To(BeTrue())
+ Expect(addresses).To(Equal(expected))
+ })
+ })
+ Context("With nodes in the mixed cloud", func() {
+ It("should report external IP as the first one", func() {
+ addresses, ok := FilterIPAddresses(nodesManyWithMixedIPs)
+ Expect(ok).To(BeTrue())
+ Expect(addresses[0]).To(Equal(externalIpControl))
+ })
+ })
+ })
+})
diff --git a/test/security/tox.ini b/test/security/tox.ini
new file mode 100644
index 000000000..7ebf8e4c8
--- /dev/null
+++ b/test/security/tox.ini
@@ -0,0 +1,9 @@
+[tox]
+envlist = security
+skipsdist = True
+
+[testenv]
+deps = -r{toxinidir}/requirements.txt
+
+[testenv:security]
+basepython = python3.8
diff --git a/test/vcpe/bin/setup.sh b/test/vcpe/bin/setup.sh
index 5614f1a70..30c25cb62 100755
--- a/test/vcpe/bin/setup.sh
+++ b/test/vcpe/bin/setup.sh
@@ -2,7 +2,7 @@
# COPYRIGHT NOTICE STARTS HERE
#
-# Copyright 2019 Samsung Electronics Co., Ltd.
+# Copyright 2019-2020 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -32,15 +32,8 @@ then
yum -y install python-devel gcc;
fi
+pip_setup_path="$(dirname $0)/../"
+
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
python get-pip.py
-pip install -I \
- ipaddress \
- pyyaml \
- mysql-connector-python \
- progressbar2 \
- python-novaclient \
- python-openstackclient \
- python-heatclient \
- kubernetes \
- netaddr
+pip install --no-cache-dir -I ${pip_setup_path}
diff --git a/test/vcpe/cleanvGMUX.py b/test/vcpe/cleanvGMUX.py
index f5269532e..f1e91a757 100755
--- a/test/vcpe/cleanvGMUX.py
+++ b/test/vcpe/cleanvGMUX.py
@@ -4,8 +4,6 @@ import sys
import logging
import requests
import json
-from datetime import datetime
-import time
gmux_ip = ''
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
@@ -32,14 +30,14 @@ def clean_gmux():
interfaces = response.get('interfaces').get('interface')
for inf in interfaces:
- name = inf.get('name')
- if name.startswith('vxlanTun10'):
+ name = inf.get('name')
+ if name.startswith('vxlanTun10'):
logger.debug('name = {0}'.format(name))
delete_interface_v3po_l2(name)
for inf in interfaces:
- name = inf.get('name')
- if name.startswith('vxlanTun10'):
+ name = inf.get('name')
+ if name.startswith('vxlanTun10'):
logger.debug('name = {0}'.format(name))
delete_interface(name)
@@ -52,7 +50,7 @@ def delete_interface(interface_name):
url = '{0}/interface/{1}'.format(base_url, interface_name)
r = requests.delete(url, headers=headers, auth=auth)
logger.debug(r)
-
+
if __name__ == '__main__':
gmux_ip = sys.argv[1]
base_url = 'http://{0}:8183/restconf/config/ietf-interfaces:interfaces'.format(gmux_ip)
diff --git a/test/vcpe/clouds.yaml.example b/test/vcpe/clouds.yaml.example
new file mode 100644
index 000000000..0b2bdc070
--- /dev/null
+++ b/test/vcpe/clouds.yaml.example
@@ -0,0 +1,23 @@
+clouds:
+ integration-release-daily:
+ auth:
+ auth_url: 'http://10.12.25.2:5000'
+ username: 'kxi'
+ user_domain_id: 'default'
+ project_domain_id: 'default'
+ tenant_id: '712b6016580e410b9abfec9ca34953ce'
+ password: 'n3JhGMGuDzD8'
+ project_domain_name: 'Integration-Release-Daily'
+ region_name: 'RegionOne'
+ identity_api_version: '3'
+ integration-sb-07:
+ auth:
+ auth_url: 'http://10.12.25.2:5000'
+ username: 'kxi'
+ user_domain_id: 'default'
+ project_domain_id: 'default'
+ tenant_id: '1e097c6713e74fd7ac8e4295e605ee1e'
+ password: 'n3JhGMGuDzD8'
+ project_domain_name: 'Integration-SB-07'
+ region_name: 'RegionOne'
+ identity_api_version: '3'
diff --git a/test/vcpe/config_sdnc_so.py b/test/vcpe/config_sdnc_so.py
index b19c93acf..6e0257720 100755
--- a/test/vcpe/config_sdnc_so.py
+++ b/test/vcpe/config_sdnc_so.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
import logging
-from vcpecommon import *
+from vcpecommon import * # pylint: disable=W0614
import csar_parser
@@ -22,7 +22,7 @@ def insert_customer_service_to_sdnc(vcpecommon):
parser.parse_csar(csar_file)
cmds = []
- if False:
+ if False: # pylint: disable=W0125
cmds.append("INSERT INTO SERVICE_MODEL (`service_uuid`, `model_yaml`,`invariant_uuid`,`version`,`name`," \
"`description`,`type`,`category`,`ecomp_naming`,`service_instance_name_prefix`,`filename`," \
"`naming_policy`) values ('{0}', null, '{1}',null,'{2}', 'vCPEService', 'Service','Network L1-3'," \
@@ -34,7 +34,7 @@ def insert_customer_service_to_sdnc(vcpecommon):
for model in parser.vnf_models:
if 'tunnel' in model['modelCustomizationName'].lower() or 'brg' in model['modelCustomizationName'].lower():
- if False:
+ if False: # pylint: disable=W0125
cmds.append("INSERT INTO ALLOTTED_RESOURCE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`," \
"`uuid`,`version`,`naming_policy`,`ecomp_generated_naming`,`depending_service`,`role`,`type`," \
"`service_dependency`,`allotted_resource_type`) VALUES ('{0}',NULL,'{1}','{2}','1.0'," \
@@ -44,7 +44,7 @@ def insert_customer_service_to_sdnc(vcpecommon):
cmds.append("UPDATE ALLOTTED_RESOURCE_MODEL SET `ecomp_generated_naming`='Y' " \
"WHERE `customization_uuid`='{0}'".format(model['modelCustomizationId']))
else:
- if False:
+ if False: # pylint: disable=W0125
cmds.append("INSERT INTO VF_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`," \
"`name`,`naming_policy`,`ecomp_generated_naming`,`avail_zone_max_count`,`nf_function`," \
"`nf_code`,`nf_type`,`nf_role`,`vendor`,`vendor_version`) VALUES ('{0}',NULL,'{1}','{2}'," \
@@ -54,7 +54,7 @@ def insert_customer_service_to_sdnc(vcpecommon):
cmds.append("UPDATE VF_MODEL SET `ecomp_generated_naming`='Y' " \
"WHERE `customization_uuid`='{0}'".format(model['modelCustomizationId']))
- if False:
+ if False: # pylint: disable=W0125
model = parser.vfmodule_models[0]
cmds.append("INSERT INTO VF_MODULE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`," \
"`vf_module_type`,`availability_zone_count`,`ecomp_generated_vm_assignments`) VALUES ('{0}', NULL," \
@@ -68,29 +68,38 @@ def insert_customer_service_to_sdnc(vcpecommon):
def insert_customer_service_to_so(vcpecommon):
logger = logging.getLogger(__name__)
cmds = []
- if True:
- csar_file = vcpecommon.find_file('rescust', 'csar', 'csar')
- parser = csar_parser.CsarParser()
- parser.parse_csar(csar_file)
- cmds.append("INSERT INTO service_recipe (ACTION, VERSION_STR, DESCRIPTION, ORCHESTRATION_URI, " \
- "SERVICE_PARAM_XSD, RECIPE_TIMEOUT, SERVICE_TIMEOUT_INTERIM, CREATION_TIMESTAMP, " \
- "SERVICE_MODEL_UUID) VALUES ('createInstance','1','{0}'," \
- "'/mso/async/services/CreateVcpeResCustService',NULL,181,NULL, NOW()," \
- "'{1}');".format(parser.svc_model['modelName'], parser.svc_model['modelVersionId']))
- logger.info(
- 'Please manually run the following sql command in SO catalogdb database to insert customer service recipe')
- logger.info('\n'.join(cmds))
- #vcpecommon.execute_cmds_so_db(cmds)
+ csar_file = vcpecommon.find_file('rescust', 'csar', 'csar')
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(csar_file)
+ cmds.append("INSERT IGNORE INTO service_recipe (ACTION, VERSION_STR, DESCRIPTION, ORCHESTRATION_URI, " \
+ "SERVICE_PARAM_XSD, RECIPE_TIMEOUT, SERVICE_TIMEOUT_INTERIM, CREATION_TIMESTAMP, " \
+ "SERVICE_MODEL_UUID) VALUES ('createInstance','1','{0}'," \
+ "'/mso/async/services/CreateVcpeResCustService',NULL,181,NULL, NOW()," \
+ "'{1}');".format(parser.svc_model['modelName'], parser.svc_model['modelVersionId']))
+ if vcpecommon.oom_mode:
+ logger.info('Inserting vcpe customer service workflow entry into SO catalogdb')
+ vcpecommon.execute_cmds_so_db(cmds)
+ else:
+ logger.info('\n\nManually run a command from Rancher node to insert vcpe'
+ 'customer service workflow entry in SO catalogdb:\n'
+ '\nkubectl -n {0} exec {1}-mariadb-galera-mariadb-galera-0'
+ ' -- mysql -uroot -psecretpassword catalogdb -e '
+ '"'.format(vcpecommon.onap_namespace,
+ vcpecommon.onap_environment) + '\n'.join(cmds) + '"')
def insert_sdnc_ip_pool(vcpecommon):
- logger = logging.getLogger(__name__)
- logger.info('Inserting SDNC ip pool to SDNC DB')
- cmds = []
- # Get the VGWs network address
- vgw_net = '.'.join(vcpecommon.preload_network_config['mux_gw'][0].split('.')[:3])
- row_values = []
- # Prepare single INSERT statement with all IP values
- for ip in range(22,250):
- row_values.append("('', 'VGW', 'AVAILABLE','{0}.{1}')".format(vgw_net,ip))
- cmds.append("INSERT IGNORE INTO IPV4_ADDRESS_POOL VALUES" + ', '.join(row_values) + ';')
- vcpecommon.execute_cmds_mariadb(cmds)
+ if vcpecommon.oom_mode:
+ logger = logging.getLogger(__name__)
+ logger.info('Inserting SDNC ip pool to SDNC DB')
+ cmds = []
+ # Get the VGWs network address
+ vgw_net = '.'.join(vcpecommon.preload_network_config['mux_gw'][0].split('.')[:3])
+ row_values = []
+ # Prepare single INSERT statement with all IP values
+ for ip in range(22,250):
+ row_values.append("('', 'VGW', 'AVAILABLE','{0}.{1}')".format(vgw_net,ip))
+ cmds.append("INSERT IGNORE INTO IPV4_ADDRESS_POOL VALUES" + ', '.join(row_values) + ';')
+ vcpecommon.execute_cmds_mariadb(cmds)
+ else:
+ # Ip pool should have been inserted manually according to the documentation
+ pass
diff --git a/test/vcpe/csar_parser.py b/test/vcpe/csar_parser.py
index 7046070e5..da1903cd4 100755
--- a/test/vcpe/csar_parser.py
+++ b/test/vcpe/csar_parser.py
@@ -178,7 +178,7 @@ class CsarParser:
},
"""
node_dic = svc_template['topology_template']['groups']
- for node_name, v in node_dic.items():
+ for node_name, v in node_dic.items(): # pylint: disable=W0612
if v['type'].startswith('org.openecomp.groups.VfModule'):
model = {
'modelType': 'vfModule',
@@ -199,7 +199,7 @@ class CsarParser:
self.vnf_models = [] # this version only support a single VNF in the service template
self.vfmodule_models = [] # this version only support a single VF module in the service template
- svc_template = yaml.load(file(filename, 'r'))
+ svc_template = yaml.load(file(filename, 'r')) # pylint: disable=E0602
self.get_service_model_info(svc_template)
self.get_vnf_and_network_model_info(svc_template)
self.get_vfmodule_model_info(svc_template)
diff --git a/test/vcpe/get_info.py b/test/vcpe/get_info.py
index 2d52a07b7..f86cfb204 100755
--- a/test/vcpe/get_info.py
+++ b/test/vcpe/get_info.py
@@ -1,26 +1,20 @@
#!/usr/bin/env python
-import time
import logging
import json
-import mysql.connector
-import ipaddress
-import re
-import sys
-import base64
-from vcpecommon import *
-import preload
-import vcpe_custom_service
+from vcpecommon import * # pylint: disable=W0614
+import argparse
+# Run the script with [-h|--help] to get usage info
logging.basicConfig(level=logging.INFO, format='%(message)s')
-vcpecommon = VcpeCommon()
+parser = argparse.ArgumentParser(formatter_class=
+ argparse.ArgumentDefaultsHelpFormatter)
+parser.add_argument('--config',help='Configuration file path',default=None)
+args = parser.parse_args()
+
+vcpecommon = VcpeCommon(cfg_file=args.config)
nodes=['brg', 'bng', 'mux', 'dhcp']
hosts = vcpecommon.get_vm_ip(nodes)
print(json.dumps(hosts, indent=4, sort_keys=True))
-
-
-
-
-
diff --git a/test/vcpe/healthcheck-k8s.py b/test/vcpe/healthcheck-k8s.py
index ae33e25b2..2d58cc9d3 100755
--- a/test/vcpe/healthcheck-k8s.py
+++ b/test/vcpe/healthcheck-k8s.py
@@ -2,7 +2,6 @@
import argparse
import json
-import logging
from subprocess import Popen,PIPE,STDOUT,check_output,CalledProcessError
import sys
diff --git a/test/vcpe/loop.py b/test/vcpe/loop.py
index 3dc1948f8..1636e397c 100755
--- a/test/vcpe/loop.py
+++ b/test/vcpe/loop.py
@@ -2,21 +2,21 @@
import time
import logging
-import json
-import mysql.connector
-import ipaddress
-import re
-import sys
-import base64
-from vcpecommon import *
-import preload
+from vcpecommon import * # pylint: disable=W0614
import commands
import vcpe_custom_service
+import argparse
+# Run the script with [-h|--help] to get usage info
logging.basicConfig(level=logging.INFO, format='%(message)s')
-cpecommon = VcpeCommon()
+parser = argparse.ArgumentParser(formatter_class=
+ argparse.ArgumentDefaultsHelpFormatter)
+parser.add_argument('--config',help='Configuration file path',default=None)
+args = parser.parse_args()
+
+cpecommon = VcpeCommon(cfg_file=args.config)
custom = vcpe_custom_service.CustomService(cpecommon)
nodes=['mux']
diff --git a/test/vcpe/preload.py b/test/vcpe/preload.py
index f99d8dee1..62c036c83 100755
--- a/test/vcpe/preload.py
+++ b/test/vcpe/preload.py
@@ -4,7 +4,6 @@ import requests
import json
import sys
from datetime import datetime
-from vcpecommon import *
import csar_parser
import logging
import base64
@@ -27,14 +26,14 @@ class Preload:
with open(template_file) as json_input:
json_data = json.load(json_input)
stk = [json_data]
- while len(stk) > 0:
+ while stk:
data = stk.pop()
for k, v in data.items():
if type(v) is dict:
stk.append(v)
elif type(v) is list:
stk.extend(v)
- elif type(v) is str or type(v) is unicode:
+ elif type(v) is str or type(v) is unicode: # pylint: disable=E0602
if self.vcpecommon.template_variable_symbol in v:
data[k] = self.replace(v, replace_dict)
else:
@@ -103,9 +102,9 @@ class Preload:
self.logger.info('Preloading network ' + network_role)
self.logger.info('template_file:' + template_file)
if 'networkgra' in template_file:
- return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_network_gra_url)
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_network_gra_url)
else:
- return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_network_url)
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_network_url)
def preload(self, template_file, replace_dict, url):
self.logger.debug('tempalte_file:'+ template_file)
@@ -166,10 +165,10 @@ class Preload:
'${suffix}': name_suffix}
replace_dict.update(common_dict)
self.logger.info('Preloading VF Module ' + vfmodule_name)
- if gra_api_flag:
- return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_gra_url)
+ if gra_api_flag:
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_gra_url)
else:
- return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_vnf_url)
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_vnf_url)
def preload_all_networks(self, template_file, name_suffix):
common_dict = {'${' + k + '}': v for k, v in self.vcpecommon.common_preload_config.items()}
@@ -180,11 +179,90 @@ class Preload:
return None
return common_dict
+ def aai_region_query(self, req_method, json=None, verify=False):
+ """
+ Perform actual AAI API request for region
+ :param req_method: request method ({'get','put'})
+ :param json: Json payload
+ :param verify: SSL verify mode
+ :return:
+ """
+ url, headers, auth = (self.vcpecommon.aai_region_query_url,
+ self.vcpecommon.aai_headers,
+ self.vcpecommon.aai_userpass)
+ try:
+ if req_method == 'get':
+ request = requests.get(url, headers=headers, auth=auth,
+ verify=verify)
+ elif req_method == 'put':
+ request = requests.put(url, headers=headers, auth=auth,
+ verify=verify, json=json)
+ else:
+ raise requests.exceptions.RequestException
+ except requests.exceptions.RequestException as e:
+ self.logger.error("Error connecting to AAI API. Error details: " + str(e.message))
+ return False
+ try:
+ assert request.status_code == 200
+ except AssertionError:
+ self.logger.error('AAI request failed. API returned http code ' + str(request.status_code))
+ return False
+ try:
+ return request.json()
+ except ValueError as e:
+ if req_method == 'get':
+ self.logger.error('Unable to parse AAI response: ' + e.message)
+ return False
+ elif req_method == 'put':
+ return request.ok
+ else:
+ return False
+
+ def preload_aai_data(self, template_aai_region_data):
+ """
+ Update aai region data with identity-url
+ :param template_aai_region_data: path to region data template
+ :return:
+ """
+ request = self.aai_region_query('get')
+ if request:
+ # Check if identity-url already updated (for idempotency)
+ self.logger.debug("Regiond data acquired from AAI:\n" + json.dumps(request,indent=4))
+ try:
+ assert request['identity-url']
+ except KeyError:
+ pass
+ else:
+ self.logger.info('Identity-url already present in {0} data, not updating'.format(self.vcpecommon.cloud['--os-region-name']))
+ return
+
+ # Get resource_version and relationship_list from region data
+ resource_version = request['resource-version']
+ relationship_list = request['relationship-list']
+
+ replace_dict = {'${identity-url}': self.vcpecommon.cloud['--os-auth-url'],
+ '${identity_api_version}': self.vcpecommon.cloud['--os-identity-api-version'],
+ '${region_name}': self.vcpecommon.cloud['--os-region-name'],
+ '${resource_version}': resource_version
+ }
+ json_data = self.generate_json(template_aai_region_data, replace_dict)
+ json_data['relationship-list'] = relationship_list
+ self.logger.debug('Region update payload:\n' + json.dumps(json_data,indent=4))
+ else:
+ sys.exit(1)
+
+ # Update region data
+ request = self.aai_region_query('put', json_data)
+ if request:
+ self.logger.info('Successully updated identity-url in {0} '
+ 'region'.format(self.vcpecommon.cloud['--os-region-name']))
+ else:
+ sys.exit(1)
+
def test(self):
# this is for testing purpose
name_suffix = datetime.now().strftime('%Y%m%d%H%M')
- vcpecommon = VcpeCommon()
- preloader = Preload(vcpecommon)
+ preloader = Preload(self.vcpecommon)
network_dict = {'${' + k + '}': v for k, v in self.vcpecommon.common_preload_config.items()}
template_file = 'preload_templates/template.network.json'
diff --git a/test/vcpe/preload_templates/simple_neutron_heat.yaml b/test/vcpe/preload_templates/simple_neutron_heat.yaml
index f64248be8..1df0c7192 100644
--- a/test/vcpe/preload_templates/simple_neutron_heat.yaml
+++ b/test/vcpe/preload_templates/simple_neutron_heat.yaml
@@ -6,9 +6,9 @@ parameters:
description: Name of the Neutron Network
default: ONAP-NW1
shared:
- type: boolean
- description: Shared amongst tenants
- default: False
+ type: boolean
+ description: Shared amongst tenants
+ default: False
outputs:
network_id:
description: Openstack network identifier
diff --git a/test/vcpe/preload_templates/template.network.json b/test/vcpe/preload_templates/template.network.json
index 2acb9e363..e544ba73e 100644
--- a/test/vcpe/preload_templates/template.network.json
+++ b/test/vcpe/preload_templates/template.network.json
@@ -31,7 +31,7 @@
"VNF-API:dhcp-enabled": "N",
"VNF-API:gateway-address": "${subnet_gateway}"
}
- ]
+ ]
},
"VNF-API:sdnc-request-header": {
"VNF-API:svc-action": "reserve",
diff --git a/test/vcpe/preload_templates/template.networkgra.json b/test/vcpe/preload_templates/template.networkgra.json
new file mode 100644
index 000000000..45e8d0e9a
--- /dev/null
+++ b/test/vcpe/preload_templates/template.networkgra.json
@@ -0,0 +1,33 @@
+{
+ "input": {
+ "sdnc-request-header": {
+ "svc-request-id": "robotgra-1234",
+ "svc-action": "reserve"
+ },
+ "preload-network-topology-information": {
+ "network-topology-identifier-structure": {
+ "network-name": "${network_name}",
+ "network-role": "${network_role}",
+ "network-type": "${network_type}",
+ "network-technology": "neutron"
+ },
+ "subnets": [
+ {
+ "start-address": "${subnet_start_ip}",
+ "cidr-mask": "24",
+ "ip-version": "4",
+ "dhcp-enabled": "N",
+ "gateway-address": "${subnet_gateway}",
+ "subnet-name": "${network_name}"
+ }
+ ],
+ "physical-network-name": "Not Aplicable",
+ "is-provider-network": "FALSE",
+ "is-shared-network": "FALSE",
+ "is-external-network": "FALSE",
+ "vpn-bindings": [],
+ "network-policy": [],
+ "route-table-reference": []
+ }
+ }
+}
diff --git a/test/vcpe/preload_templates/template.vcpe_bng_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_bng_vfmodule.json
index 2792cfde6..a0f14a3bf 100644
--- a/test/vcpe/preload_templates/template.vcpe_bng_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_bng_vfmodule.json
@@ -1,172 +1,170 @@
{
- "VNF-API:input": {
- "VNF-API:request-information": {
- "VNF-API:request-id": "robot12",
- "VNF-API:notification-url": "https://so.onap.org",
- "VNF-API:order-number": "robot12",
- "VNF-API:request-sub-action": "SUPP",
- "VNF-API:request-action": "PreloadVNFRequest",
- "VNF-API:source": "VID",
- "VNF-API:order-version": "1.0"
- },
- "VNF-API:sdnc-request-header": {
- "VNF-API:svc-action": "reserve",
- "VNF-API:svc-notification-url": "https://son.onap.org",
- "VNF-API:svc-request-id": "robot12"
- },
- "VNF-API:vnf-topology-information": {
- "vnf-topology-identifier": {
- "service-type": "${service_type}",
- "vnf-type": "${vnf_type}",
- "generic-vnf-name": "${generic_vnf_name}",
- "generic-vnf-type": "${generic_vnf_type}",
- "vnf-name": "${vnf_name}"
- },
- "VNF-API:vnf-parameters": [
- {
- "vnf-parameter-name": "vcpe_image_name",
- "vnf-parameter-value": "vbng-casa-base-ubuntu-16-04"
- },
- {
- "vnf-parameter-name": "vcpe_flavor_name",
- "vnf-parameter-value": "m1.medium"
- },
- {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "${public_net_id}"
- },
- {
- "vnf-parameter-name": "brgemu_bng_private_net_id",
- "vnf-parameter-value": "${brg_bng_net}"
- },
- {
- "vnf-parameter-name": "brgemu_bng_private_subnet_id",
- "vnf-parameter-value": "${brg_bng_subnet}"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_net_id",
- "vnf-parameter-value": "${bng_mux_net}"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_subnet_id",
- "vnf-parameter-value": "${bng_mux_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "${oam_onap_net}"
- },
- {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "${oam_onap_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- },
- {
- "vnf-parameter-name": "cpe_signal_net_id",
- "vnf-parameter-value": "${cpe_signal_net}"
- },
- {
- "vnf-parameter-name": "cpe_signal_subnet_id",
- "vnf-parameter-value": "${cpe_signal_subnet}"
- },
- {
- "vnf-parameter-name": "brgemu_bng_private_net_cidr",
- "vnf-parameter-value": "10.3.0.0/24"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_net_cidr",
- "vnf-parameter-value": "10.1.0.0/24"
- },
- {
- "vnf-parameter-name": "cpe_signal_private_net_cidr",
- "vnf-parameter-value": "10.4.0.0/24"
- },
- {
- "vnf-parameter-name": "sdnc_ip_addr",
- "vnf-parameter-value": "${sdnc_oam_ip}"
- },
- {
- "vnf-parameter-name": "vbng_private_ip_0",
- "vnf-parameter-value": "10.3.0.1"
- },
- {
- "vnf-parameter-name": "vbng_private_ip_1",
- "vnf-parameter-value": "10.0.101.10"
- },
- {
- "vnf-parameter-name": "vbng_private_ip_2",
- "vnf-parameter-value": "10.4.0.3"
- },
- {
- "vnf-parameter-name": "vbng_private_ip_3",
- "vnf-parameter-value": "10.1.0.10"
- },
- {
- "vnf-parameter-name": "vbng_name_0",
- "vnf-parameter-value": "zdcpe1cpe01bng01_${suffix}"
- },
- {
- "vnf-parameter-name": "vnf_id",
- "vnf-parameter-value": "vCPE_Infrastructure_Metro_vBNG_demo_app"
- },
- {
- "vnf-parameter-name": "vf_module_id",
- "vnf-parameter-value": "vCPE_Intrastructure_Metro_vBNG"
- },
- {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.102"
- },
- {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- },
- {
- "vnf-parameter-name": "repo_url_blob",
- "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "vnf-parameter-name": "repo_url_artifacts",
- "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.1.1"
- },
- {
- "vnf-parameter-name": "script_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vbng_key"
- },
- {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "${pub_key}"
- },
- {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_url",
- "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_branch",
- "vnf-parameter-value": "stable/1704"
- },
- {
- "vnf-parameter-name": "vpp_patch_url",
- "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-radius-client-for-vbng/src/patches/Vpp-Integrate-FreeRADIUS-Client-for-vBNG.patch"
- }
- ],
- "VNF-API:vnf-assignments": {
- }
- }
- }
+ "VNF-API:input": {
+ "VNF-API:request-information": {
+ "VNF-API:request-id": "robot12",
+ "VNF-API:notification-url": "https://so.onap.org",
+ "VNF-API:order-number": "robot12",
+ "VNF-API:request-sub-action": "SUPP",
+ "VNF-API:request-action": "PreloadVNFRequest",
+ "VNF-API:source": "VID",
+ "VNF-API:order-version": "1.0"
+ },
+ "VNF-API:sdnc-request-header": {
+ "VNF-API:svc-action": "reserve",
+ "VNF-API:svc-notification-url": "https://son.onap.org",
+ "VNF-API:svc-request-id": "robot12"
+ },
+ "VNF-API:vnf-topology-information": {
+ "vnf-topology-identifier": {
+ "service-type": "${service_type}",
+ "vnf-type": "${vnf_type}",
+ "generic-vnf-name": "${generic_vnf_name}",
+ "generic-vnf-type": "${generic_vnf_type}",
+ "vnf-name": "${vnf_name}"
+ },
+ "VNF-API:vnf-parameters": [
+ {
+ "vnf-parameter-name": "vcpe_image_name",
+ "vnf-parameter-value": "vbng-casa-base-ubuntu-16-04"
+ },
+ {
+ "vnf-parameter-name": "vcpe_flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "${public_net_id}"
+ },
+ {
+ "vnf-parameter-name": "brgemu_bng_private_net_id",
+ "vnf-parameter-value": "${brg_bng_net}"
+ },
+ {
+ "vnf-parameter-name": "brgemu_bng_private_subnet_id",
+ "vnf-parameter-value": "${brg_bng_subnet}"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_net_id",
+ "vnf-parameter-value": "${bng_mux_net}"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_subnet_id",
+ "vnf-parameter-value": "${bng_mux_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "${oam_onap_net}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "${oam_onap_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_net_id",
+ "vnf-parameter-value": "${cpe_signal_net}"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_subnet_id",
+ "vnf-parameter-value": "${cpe_signal_subnet}"
+ },
+ {
+ "vnf-parameter-name": "brgemu_bng_private_net_cidr",
+ "vnf-parameter-value": "10.3.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_net_cidr",
+ "vnf-parameter-value": "10.1.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_private_net_cidr",
+ "vnf-parameter-value": "10.4.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "sdnc_ip_addr",
+ "vnf-parameter-value": "${sdnc_oam_ip}"
+ },
+ {
+ "vnf-parameter-name": "vbng_private_ip_0",
+ "vnf-parameter-value": "10.3.0.1"
+ },
+ {
+ "vnf-parameter-name": "vbng_private_ip_1",
+ "vnf-parameter-value": "10.0.101.10"
+ },
+ {
+ "vnf-parameter-name": "vbng_private_ip_2",
+ "vnf-parameter-value": "10.4.0.3"
+ },
+ {
+ "vnf-parameter-name": "vbng_private_ip_3",
+ "vnf-parameter-value": "10.1.0.10"
+ },
+ {
+ "vnf-parameter-name": "vbng_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01bng01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vnf_id",
+ "vnf-parameter-value": "vCPE_Infrastructure_Metro_vBNG_demo_app"
+ },
+ {
+ "vnf-parameter-name": "vf_module_id",
+ "vnf-parameter-value": "vCPE_Intrastructure_Metro_vBNG"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.102"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "repo_url_blob",
+ "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "vnf-parameter-name": "repo_url_artifacts",
+ "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.1.1"
+ },
+ {
+ "vnf-parameter-name": "script_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vbng_key"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "${pub_key}"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_url",
+ "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_branch",
+ "vnf-parameter-value": "stable/1704"
+ },
+ {
+ "vnf-parameter-name": "vpp_patch_url",
+ "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-radius-client-for-vbng/src/patches/Vpp-Integrate-FreeRADIUS-Client-for-vBNG.patch"
+ }
+ ],
+ "VNF-API:vnf-assignments": {}
+ }
+ }
}
-
diff --git a/test/vcpe/preload_templates/template.vcpe_bnggra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_bnggra_vfmodule.json
index 544d8e1b1..ee804c42f 100644
--- a/test/vcpe/preload_templates/template.vcpe_bnggra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_bnggra_vfmodule.json
@@ -1,173 +1,174 @@
{
- "GENERIC-RESOURCE-API:input": {
- "GENERIC-RESOURCE-API:request-information": {
- "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
- "GENERIC-RESOURCE-API:order-number": "robot12",
- "GENERIC-RESOURCE-API:order-version": "1.0",
- "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
- "GENERIC-RESOURCE-API:request-id": "robot12",
- "GENERIC-RESOURCE-API:source": "VID"
- },
- "GENERIC-RESOURCE-API:sdnc-request-header": {
- "GENERIC-RESOURCE-API:svc-action": "reserve",
- "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
- "GENERIC-RESOURCE-API:svc-request-id": "robot12"
- },
- "preload-vf-module-topology-information": {
- "vf-module-topology": {
- "vf-module-parameters": {
- "param": [
- {
- "name": "vcpe_image_name",
- "value": "vbng-casa-base-ubuntu-16-04"
- },
- {
- "name": "vcpe_flavor_name",
- "value": "m1.medium"
- },
- {
- "name": "public_net_id",
- "value": "${public_net_id}"
- },
- {
- "name": "brgemu_bng_private_net_id",
- "value": "${brg_bng_net}"
- },
- {
- "name": "brgemu_bng_private_subnet_id",
- "value": "${brg_bng_subnet}"
- },
- {
- "name": "bng_gmux_private_net_id",
- "value": "${bng_mux_net}"
- },
- {
- "name": "bng_gmux_private_subnet_id",
- "value": "${bng_mux_subnet}"
- },
- {
- "name": "onap_private_net_id",
- "value": "${oam_onap_net}"
- },
- {
- "name": "onap_private_subnet_id",
- "value": "${oam_onap_subnet}"
- },
- {
- "name": "onap_private_net_cidr",
- "value": "10.0.0.0/16"
- },
- {
- "name": "cpe_signal_net_id",
- "value": "${cpe_signal_net}"
- },
- {
- "name": "cpe_signal_subnet_id",
- "value": "${cpe_signal_subnet}"
- },
- {
- "name": "brgemu_bng_private_net_cidr",
- "value": "10.3.0.0/24"
- },
- {
- "name": "bng_gmux_private_net_cidr",
- "value": "10.1.0.0/24"
- },
- {
- "name": "cpe_signal_private_net_cidr",
- "value": "10.4.0.0/24"
- },
- {
- "name": "sdnc_ip_addr",
- "value": "${sdnc_oam_ip}"
- },
- {
- "name": "vbng_private_ip_0",
- "value": "10.3.0.1"
- },
- {
- "name": "vbng_private_ip_1",
- "value": "10.0.101.10"
- },
- {
- "name": "vbng_private_ip_2",
- "value": "10.4.0.3"
- },
- {
- "name": "vbng_private_ip_3",
- "value": "10.1.0.10"
- },
- {
- "name": "vbng_name_0",
- "value": "zdcpe1cpe01bng01_${suffix}"
- },
- {
- "name": "vnf_id",
- "value": "vCPE_Infrastructure_Metro_vBNG_demo_app"
- },
- {
- "name": "vf_module_id",
- "value": "vCPE_Intrastructure_Metro_vBNG"
- },
- {
- "name": "dcae_collector_ip",
- "value": "10.0.4.102"
- },
- {
- "name": "dcae_collector_port",
- "value": "8080"
- },
- {
- "name": "repo_url_blob",
- "value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "name": "repo_url_artifacts",
- "value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "name": "demo_artifacts_version",
- "value": "1.1.1"
- },
- {
- "name": "script_version",
- "value": "1.3.0"
- },
- {
- "name": "key_name",
- "value": "vbng_key"
- },
- {
- "name": "pub_key",
- "value": "${pub_key}"
- },
- {
- "name": "cloud_env",
- "value": "openstack"
- },
- {
- "name": "vpp_source_repo_url",
- "value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "name": "vpp_source_repo_branch",
- "value": "stable/1704"
- },
- {
- "name": "vpp_patch_url",
- "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-radius-client-for-vbng/src/patches/Vpp-Integrate-FreeRADIUS-Client-for-vBNG.patch"
- }
- ] },
- "vf-module-assignments": {},
- "vf-module-topology-identifier": {
- "vf-module-name": "${vnf_name}"
- }
- },
- "vnf-resource-assignments": {},
- "vnf-topology-identifier-structure": {
- "nf-type": "${vnf_type}",
- "vnf-id": "${vnf_name}"
- }
+ "GENERIC-RESOURCE-API:input": {
+ "GENERIC-RESOURCE-API:request-information": {
+ "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
+ "GENERIC-RESOURCE-API:order-number": "robot12",
+ "GENERIC-RESOURCE-API:order-version": "1.0",
+ "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
+ "GENERIC-RESOURCE-API:request-id": "robot12",
+ "GENERIC-RESOURCE-API:source": "VID"
+ },
+ "GENERIC-RESOURCE-API:sdnc-request-header": {
+ "GENERIC-RESOURCE-API:svc-action": "reserve",
+ "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
+ "GENERIC-RESOURCE-API:svc-request-id": "robot12"
+ },
+ "preload-vf-module-topology-information": {
+ "vf-module-topology": {
+ "vf-module-parameters": {
+ "param": [
+ {
+ "name": "vcpe_image_name",
+ "value": "vbng-casa-base-ubuntu-16-04"
+ },
+ {
+ "name": "vcpe_flavor_name",
+ "value": "m1.medium"
+ },
+ {
+ "name": "public_net_id",
+ "value": "${public_net_id}"
+ },
+ {
+ "name": "brgemu_bng_private_net_id",
+ "value": "${brg_bng_net}"
+ },
+ {
+ "name": "brgemu_bng_private_subnet_id",
+ "value": "${brg_bng_subnet}"
+ },
+ {
+ "name": "bng_gmux_private_net_id",
+ "value": "${bng_mux_net}"
+ },
+ {
+ "name": "bng_gmux_private_subnet_id",
+ "value": "${bng_mux_subnet}"
+ },
+ {
+ "name": "onap_private_net_id",
+ "value": "${oam_onap_net}"
+ },
+ {
+ "name": "onap_private_subnet_id",
+ "value": "${oam_onap_subnet}"
+ },
+ {
+ "name": "onap_private_net_cidr",
+ "value": "10.0.0.0/16"
+ },
+ {
+ "name": "cpe_signal_net_id",
+ "value": "${cpe_signal_net}"
+ },
+ {
+ "name": "cpe_signal_subnet_id",
+ "value": "${cpe_signal_subnet}"
+ },
+ {
+ "name": "brgemu_bng_private_net_cidr",
+ "value": "10.3.0.0/24"
+ },
+ {
+ "name": "bng_gmux_private_net_cidr",
+ "value": "10.1.0.0/24"
+ },
+ {
+ "name": "cpe_signal_private_net_cidr",
+ "value": "10.4.0.0/24"
+ },
+ {
+ "name": "sdnc_ip_addr",
+ "value": "${sdnc_oam_ip}"
+ },
+ {
+ "name": "vbng_private_ip_0",
+ "value": "10.3.0.1"
+ },
+ {
+ "name": "vbng_private_ip_1",
+ "value": "10.0.101.10"
+ },
+ {
+ "name": "vbng_private_ip_2",
+ "value": "10.4.0.3"
+ },
+ {
+ "name": "vbng_private_ip_3",
+ "value": "10.1.0.10"
+ },
+ {
+ "name": "vbng_name_0",
+ "value": "zdcpe1cpe01bng01_${suffix}"
+ },
+ {
+ "name": "vnf_id",
+ "value": "vCPE_Infrastructure_Metro_vBNG_demo_app"
+ },
+ {
+ "name": "vf_module_id",
+ "value": "vCPE_Intrastructure_Metro_vBNG"
+ },
+ {
+ "name": "dcae_collector_ip",
+ "value": "10.0.4.102"
+ },
+ {
+ "name": "dcae_collector_port",
+ "value": "8080"
+ },
+ {
+ "name": "repo_url_blob",
+ "value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "name": "repo_url_artifacts",
+ "value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "name": "demo_artifacts_version",
+ "value": "1.1.1"
+ },
+ {
+ "name": "script_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "key_name",
+ "value": "vbng_key"
+ },
+ {
+ "name": "pub_key",
+ "value": "${pub_key}"
+ },
+ {
+ "name": "cloud_env",
+ "value": "openstack"
+ },
+ {
+ "name": "vpp_source_repo_url",
+ "value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "name": "vpp_source_repo_branch",
+ "value": "stable/1704"
+ },
+ {
+ "name": "vpp_patch_url",
+ "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-radius-client-for-vbng/src/patches/Vpp-Integrate-FreeRADIUS-Client-for-vBNG.patch"
}
+ ]
+ },
+ "vf-module-assignments": {},
+ "vf-module-topology-identifier": {
+ "vf-module-name": "${vnf_name}"
+ }
+ },
+ "vnf-resource-assignments": {},
+ "vnf-topology-identifier-structure": {
+ "nf-type": "${vnf_type}",
+ "vnf-id": "${vnf_name}"
}
+ }
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_brgemu_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_brgemu_vfmodule.json
index 2630ee7af..cf7888860 100644
--- a/test/vcpe/preload_templates/template.vcpe_brgemu_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_brgemu_vfmodule.json
@@ -1,138 +1,138 @@
{
- "VNF-API:input": {
- "VNF-API:request-information": {
- "VNF-API:notification-url": "https://so.onap.org",
- "VNF-API:order-number": "robot12",
- "VNF-API:order-version": "1.0",
- "VNF-API:request-action": "PreloadVNFRequest",
- "VNF-API:request-id": "robot12",
- "VNF-API:request-sub-action": "SUPP",
- "VNF-API:source": "VID"
- },
- "VNF-API:sdnc-request-header": {
- "VNF-API:svc-action": "reserve",
- "VNF-API:svc-notification-url": "https://son.onap.org",
- "VNF-API:svc-request-id": "robot12"
- },
- "VNF-API:vnf-topology-information": {
- "VNF-API:vnf-assignments": {},
- "VNF-API:vnf-parameters": [
- {
- "vnf-parameter-name": "vcpe_image_name",
- "vnf-parameter-value": "vbrg-casa-base-ubuntu-16-04"
- },
- {
- "vnf-parameter-name": "vcpe_flavor_name",
- "vnf-parameter-value": "m1.medium"
- },
- {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "${public_net_id}"
- },
- {
- "vnf-parameter-name": "vbrgemu_bng_private_net_id",
- "vnf-parameter-value": "${brg_bng_net}"
- },
- {
- "vnf-parameter-name": "vbrgemu_bng_private_subnet_id",
- "vnf-parameter-value": "${brg_bng_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "${oam_onap_net}"
- },
- {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "${oam_onap_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- },
- {
- "vnf-parameter-name": "compile_state",
- "vnf-parameter-value": "done"
- },
- {
- "vnf-parameter-name": "vbrgemu_bng_private_net_cidr",
- "vnf-parameter-value": "10.3.0.0/24"
- },
- {
- "vnf-parameter-name": "vbrgemu_private_ip_0",
- "vnf-parameter-value": "10.3.0.21"
- },
- {
- "vnf-parameter-name": "sdnc_ip",
- "vnf-parameter-value": "${sdnc_oam_ip}"
- },
- {
- "vnf-parameter-name": "vbrgemu_name_0",
- "vnf-parameter-value": "zdcpe1cpe01brgemu01_${suffix}"
- },
- {
- "vnf-parameter-name": "vnf_id",
- "vnf-parameter-value": "vCPE_Infrastructure_BGREMU_demo_app_${suffix}"
- },
- {
- "vnf-parameter-name": "vf_module_id",
- "vnf-parameter-value": "vCPE_Customer_BRGEMU_${suffix}"
- },
- {
- "vnf-parameter-name": "repo_url_blob",
- "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "vnf-parameter-name": "repo_url_artifacts",
- "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.1.1"
- },
- {
- "vnf-parameter-name": "script_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vbrgemu_key"
- },
- {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "${pub_key}"
- },
- {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_url",
- "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_branch",
- "vnf-parameter-value": "stable/1704"
- },
- {
- "vnf-parameter-name": "hc2vpp_source_repo_url",
- "vnf-parameter-value": "https://gerrit.fd.io/r/hc2vpp"
- },
- {
- "vnf-parameter-name": "hc2vpp_source_repo_branch",
- "vnf-parameter-value": "stable/1704"
- },
- {
- "vnf-parameter-name": "vpp_patch_url",
- "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-option-82-for-vbrg/src/patches/VPP-Add-Option82-Nat-Filter-For-vBRG.patch"
- }
- ],
- "vnf-topology-identifier": {
- "generic-vnf-name": "${generic_vnf_name}",
- "generic-vnf-type": "${generic_vnf_type}",
- "service-type": "${service_type}",
- "vnf-name": "${vnf_name}",
- "vnf-type": "${vnf_type}"
- }
+ "VNF-API:input": {
+ "VNF-API:request-information": {
+ "VNF-API:notification-url": "https://so.onap.org",
+ "VNF-API:order-number": "robot12",
+ "VNF-API:order-version": "1.0",
+ "VNF-API:request-action": "PreloadVNFRequest",
+ "VNF-API:request-id": "robot12",
+ "VNF-API:request-sub-action": "SUPP",
+ "VNF-API:source": "VID"
+ },
+ "VNF-API:sdnc-request-header": {
+ "VNF-API:svc-action": "reserve",
+ "VNF-API:svc-notification-url": "https://son.onap.org",
+ "VNF-API:svc-request-id": "robot12"
+ },
+ "VNF-API:vnf-topology-information": {
+ "VNF-API:vnf-assignments": {},
+ "VNF-API:vnf-parameters": [
+ {
+ "vnf-parameter-name": "vcpe_image_name",
+ "vnf-parameter-value": "vbrg-casa-base-ubuntu-16-04"
+ },
+ {
+ "vnf-parameter-name": "vcpe_flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "${public_net_id}"
+ },
+ {
+ "vnf-parameter-name": "vbrgemu_bng_private_net_id",
+ "vnf-parameter-value": "${brg_bng_net}"
+ },
+ {
+ "vnf-parameter-name": "vbrgemu_bng_private_subnet_id",
+ "vnf-parameter-value": "${brg_bng_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "${oam_onap_net}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "${oam_onap_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "compile_state",
+ "vnf-parameter-value": "done"
+ },
+ {
+ "vnf-parameter-name": "vbrgemu_bng_private_net_cidr",
+ "vnf-parameter-value": "10.3.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "vbrgemu_private_ip_0",
+ "vnf-parameter-value": "10.3.0.21"
+ },
+ {
+ "vnf-parameter-name": "sdnc_ip",
+ "vnf-parameter-value": "${sdnc_oam_ip}"
+ },
+ {
+ "vnf-parameter-name": "vbrgemu_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01brgemu01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vnf_id",
+ "vnf-parameter-value": "vCPE_Infrastructure_BGREMU_demo_app_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vf_module_id",
+ "vnf-parameter-value": "vCPE_Customer_BRGEMU_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "repo_url_blob",
+ "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "vnf-parameter-name": "repo_url_artifacts",
+ "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.1.1"
+ },
+ {
+ "vnf-parameter-name": "script_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vbrgemu_key"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "${pub_key}"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_url",
+ "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_branch",
+ "vnf-parameter-value": "stable/1704"
+ },
+ {
+ "vnf-parameter-name": "hc2vpp_source_repo_url",
+ "vnf-parameter-value": "https://gerrit.fd.io/r/hc2vpp"
+ },
+ {
+ "vnf-parameter-name": "hc2vpp_source_repo_branch",
+ "vnf-parameter-value": "stable/1704"
+ },
+ {
+ "vnf-parameter-name": "vpp_patch_url",
+ "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-option-82-for-vbrg/src/patches/VPP-Add-Option82-Nat-Filter-For-vBRG.patch"
}
+ ],
+ "vnf-topology-identifier": {
+ "generic-vnf-name": "${generic_vnf_name}",
+ "generic-vnf-type": "${generic_vnf_type}",
+ "service-type": "${service_type}",
+ "vnf-name": "${vnf_name}",
+ "vnf-type": "${vnf_type}"
+ }
}
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_brgemugra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_brgemugra_vfmodule.json
index 4d5b2d425..f27826138 100644
--- a/test/vcpe/preload_templates/template.vcpe_brgemugra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_brgemugra_vfmodule.json
@@ -1,141 +1,142 @@
{
- "GENERIC-RESOURCE-API:input": {
- "GENERIC-RESOURCE-API:request-information": {
- "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
- "GENERIC-RESOURCE-API:order-number": "robot12",
- "GENERIC-RESOURCE-API:order-version": "1.0",
- "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
- "GENERIC-RESOURCE-API:request-id": "robot12",
- "GENERIC-RESOURCE-API:source": "VID"
- },
- "GENERIC-RESOURCE-API:sdnc-request-header": {
- "GENERIC-RESOURCE-API:svc-action": "reserve",
- "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
- "GENERIC-RESOURCE-API:svc-request-id": "robot12"
- },
- "preload-vf-module-topology-information": {
- "vf-module-topology": {
- "vf-module-parameters": {
- "param": [
- {
- "name": "vcpe_image_name",
- "value": "vbrg-casa-base-ubuntu-16-04"
- },
- {
- "name": "vcpe_flavor_name",
- "value": "m1.medium"
- },
- {
- "name": "public_net_id",
- "value": "${public_net_id}"
- },
- {
- "name": "vbrgemu_bng_private_net_id",
- "value": "${brg_bng_net}"
- },
- {
- "name": "vbrgemu_bng_private_subnet_id",
- "value": "${brg_bng_subnet}"
- },
- {
- "name": "onap_private_net_id",
- "value": "${oam_onap_net}"
- },
- {
- "name": "onap_private_subnet_id",
- "value": "${oam_onap_subnet}"
- },
- {
- "name": "onap_private_net_cidr",
- "value": "10.0.0.0/16"
- },
- {
- "name": "compile_state",
- "value": "done"
- },
- {
- "name": "vbrgemu_bng_private_net_cidr",
- "value": "10.3.0.0/24"
- },
- {
- "name": "vbrgemu_private_ip_0",
- "value": "10.3.0.21"
- },
- {
- "name": "sdnc_ip",
- "value": "${sdnc_oam_ip}"
- },
- {
- "name": "vbrgemu_name_0",
- "value": "zdcpe1cpe01brgemu01_${suffix}"
- },
- {
- "name": "vnf_id",
- "value": "vCPE_Infrastructure_BGREMU_demo_app_${suffix}"
- },
- {
- "name": "vf_module_id",
- "value": "vCPE_Customer_BRGEMU_${suffix}"
- },
- {
- "name": "repo_url_blob",
- "value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "name": "repo_url_artifacts",
- "value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "name": "demo_artifacts_version",
- "value": "1.1.1"
- },
- {
- "name": "script_version",
- "value": "1.3.0"
- },
- {
- "name": "key_name",
- "value": "vbrgemu_key"
- },
- {
- "name": "pub_key",
- "value": "${pub_key}"
- },
- {
- "name": "cloud_env",
- "value": "openstack"
- },
- {
- "name": "vpp_source_repo_url",
- "value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "name": "vpp_source_repo_branch",
- "value": "stable/1704"
- },
- {
- "name": "hc2vpp_source_repo_url",
- "value": "https://gerrit.fd.io/r/hc2vpp"
- },
- {
- "name": "hc2vpp_source_repo_branch",
- "value": "stable/1704"
- },
- {
- "name": "vpp_patch_url",
- "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-option-82-for-vbrg/src/patches/VPP-Add-Option82-Nat-Filter-For-vBRG.patch"
- }
- ] },
- "vf-module-assignments": {},
- "vf-module-topology-identifier": {
- "vf-module-name": "${vnf_name}"
- }
- },
- "vnf-resource-assignments": {},
- "vnf-topology-identifier-structure": {
- "nf-type": "${vnf_type}",
- "vnf-id": "${vnf_name}"
- }
- }
+ "GENERIC-RESOURCE-API:input": {
+ "GENERIC-RESOURCE-API:request-information": {
+ "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
+ "GENERIC-RESOURCE-API:order-number": "robot12",
+ "GENERIC-RESOURCE-API:order-version": "1.0",
+ "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
+ "GENERIC-RESOURCE-API:request-id": "robot12",
+ "GENERIC-RESOURCE-API:source": "VID"
+ },
+ "GENERIC-RESOURCE-API:sdnc-request-header": {
+ "GENERIC-RESOURCE-API:svc-action": "reserve",
+ "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
+ "GENERIC-RESOURCE-API:svc-request-id": "robot12"
+ },
+ "preload-vf-module-topology-information": {
+ "vf-module-topology": {
+ "vf-module-parameters": {
+ "param": [
+ {
+ "name": "vcpe_image_name",
+ "value": "vbrg-casa-base-ubuntu-16-04"
+ },
+ {
+ "name": "vcpe_flavor_name",
+ "value": "m1.medium"
+ },
+ {
+ "name": "public_net_id",
+ "value": "${public_net_id}"
+ },
+ {
+ "name": "vbrgemu_bng_private_net_id",
+ "value": "${brg_bng_net}"
+ },
+ {
+ "name": "vbrgemu_bng_private_subnet_id",
+ "value": "${brg_bng_subnet}"
+ },
+ {
+ "name": "onap_private_net_id",
+ "value": "${oam_onap_net}"
+ },
+ {
+ "name": "onap_private_subnet_id",
+ "value": "${oam_onap_subnet}"
+ },
+ {
+ "name": "onap_private_net_cidr",
+ "value": "10.0.0.0/16"
+ },
+ {
+ "name": "compile_state",
+ "value": "done"
+ },
+ {
+ "name": "vbrgemu_bng_private_net_cidr",
+ "value": "10.3.0.0/24"
+ },
+ {
+ "name": "vbrgemu_private_ip_0",
+ "value": "10.3.0.21"
+ },
+ {
+ "name": "sdnc_ip",
+ "value": "${sdnc_oam_ip}"
+ },
+ {
+ "name": "vbrgemu_name_0",
+ "value": "zdcpe1cpe01brgemu01_${suffix}"
+ },
+ {
+ "name": "vnf_id",
+ "value": "vCPE_Infrastructure_BGREMU_demo_app_${suffix}"
+ },
+ {
+ "name": "vf_module_id",
+ "value": "vCPE_Customer_BRGEMU_${suffix}"
+ },
+ {
+ "name": "repo_url_blob",
+ "value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "name": "repo_url_artifacts",
+ "value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "name": "demo_artifacts_version",
+ "value": "1.1.1"
+ },
+ {
+ "name": "script_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "key_name",
+ "value": "vbrgemu_key"
+ },
+ {
+ "name": "pub_key",
+ "value": "${pub_key}"
+ },
+ {
+ "name": "cloud_env",
+ "value": "openstack"
+ },
+ {
+ "name": "vpp_source_repo_url",
+ "value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "name": "vpp_source_repo_branch",
+ "value": "stable/1704"
+ },
+ {
+ "name": "hc2vpp_source_repo_url",
+ "value": "https://gerrit.fd.io/r/hc2vpp"
+ },
+ {
+ "name": "hc2vpp_source_repo_branch",
+ "value": "stable/1704"
+ },
+ {
+ "name": "vpp_patch_url",
+ "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-option-82-for-vbrg/src/patches/VPP-Add-Option82-Nat-Filter-For-vBRG.patch"
+ }
+ ]
+ },
+ "vf-module-assignments": {},
+ "vf-module-topology-identifier": {
+ "vf-module-name": "${vnf_name}"
+ }
+ },
+ "vnf-resource-assignments": {},
+ "vnf-topology-identifier-structure": {
+ "nf-type": "${vnf_type}",
+ "vnf-id": "${vnf_name}"
}
+ }
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_gmux_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_gmux_vfmodule.json
index a1ebf2ad8..ad0f9184c 100644
--- a/test/vcpe/preload_templates/template.vcpe_gmux_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_gmux_vfmodule.json
@@ -1,160 +1,158 @@
{
- "VNF-API:input": {
- "VNF-API:request-information": {
- "VNF-API:request-id": "robot12",
- "VNF-API:notification-url": "https://so.onap.org",
- "VNF-API:order-number": "robot12",
- "VNF-API:request-sub-action": "SUPP",
- "VNF-API:request-action": "PreloadVNFRequest",
- "VNF-API:source": "VID",
- "VNF-API:order-version": "1.0"
- },
- "VNF-API:sdnc-request-header": {
- "VNF-API:svc-action": "reserve",
- "VNF-API:svc-notification-url": "https://son.onap.org",
- "VNF-API:svc-request-id": "robot12"
- },
- "VNF-API:vnf-topology-information": {
- "vnf-topology-identifier": {
- "service-type": "${service_type}",
- "vnf-type": "${vnf_type}",
- "generic-vnf-name": "${generic_vnf_name}",
- "generic-vnf-type": "${generic_vnf_type}",
- "vnf-name": "${vnf_name}"
- },
- "VNF-API:vnf-parameters": [
- {
- "vnf-parameter-name": "vcpe_image_name",
- "vnf-parameter-value": "vgmux-casa-base-ubuntu-16-04"
- },
- {
- "vnf-parameter-name": "vcpe_flavor_name",
- "vnf-parameter-value": "m1.medium"
- },
- {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "${public_net_id}"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_net_id",
- "vnf-parameter-value": "${bng_mux_net}"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_subnet_id",
- "vnf-parameter-value": "${bng_mux_subnet}"
- },
- {
- "vnf-parameter-name": "mux_gw_private_net_id",
- "vnf-parameter-value": "${mux_gw_net}"
- },
- {
- "vnf-parameter-name": "mux_gw_private_subnet_id",
- "vnf-parameter-value": "${mux_gw_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "${oam_onap_net}"
- },
- {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "${oam_onap_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- },
- {
- "vnf-parameter-name": "mux_gw_private_net_cidr",
- "vnf-parameter-value": "10.5.0.0/24"
- },
- {
- "vnf-parameter-name": "bng_gmux_private_net_cidr",
- "vnf-parameter-value": "10.1.0.0/24"
- },
- {
- "vnf-parameter-name": "vgmux_private_ip_0",
- "vnf-parameter-value": "10.1.0.21"
- },
- {
- "vnf-parameter-name": "vgmux_private_ip_1",
- "vnf-parameter-value": "10.0.101.21"
- },
- {
- "vnf-parameter-name": "vgmux_private_ip_2",
- "vnf-parameter-value": "10.5.0.21"
- },
- {
- "vnf-parameter-name": "vgmux_name_0",
- "vnf-parameter-value": "zdcpe1cpe01mux01_${suffix}"
- },
- {
- "vnf-parameter-name": "vnf_id",
- "vnf-parameter-value": "vCPE_Infrastructure_Metro_vGMUX_demo_app"
- },
- {
- "vnf-parameter-name": "vf_module_id",
- "vnf-parameter-value": "vCPE_Intrastructure_Metro_vGMUX"
- },
- {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.102"
- },
- {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- },
- {
- "vnf-parameter-name": "repo_url_blob",
- "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "vnf-parameter-name": "repo_url_artifacts",
- "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.1.1"
- },
- {
- "vnf-parameter-name": "script_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vgmux_key"
- },
- {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "${pub_key}"
- },
- {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_url",
- "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "vnf-parameter-name": "vpp_source_repo_branch",
- "vnf-parameter-value": "stable/1704"
- },
- {
- "vnf-parameter-name": "vpp_patch_url",
- "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Vpp-Add-VES-agent-for-vG-MUX.patch"
- },
- {
- "vnf-parameter-name": "hc2vpp_patch_url",
- "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Hc2vpp-Add-VES-agent-for-vG-MUX.patch"
- },
- {
- "vnf-parameter-name": "libevel_patch_url",
- "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/vCPE-vG-MUX-libevel-fixup.patch"
- }
- ],
- "VNF-API:vnf-assignments": {
- }
- }
- }
+ "VNF-API:input": {
+ "VNF-API:request-information": {
+ "VNF-API:request-id": "robot12",
+ "VNF-API:notification-url": "https://so.onap.org",
+ "VNF-API:order-number": "robot12",
+ "VNF-API:request-sub-action": "SUPP",
+ "VNF-API:request-action": "PreloadVNFRequest",
+ "VNF-API:source": "VID",
+ "VNF-API:order-version": "1.0"
+ },
+ "VNF-API:sdnc-request-header": {
+ "VNF-API:svc-action": "reserve",
+ "VNF-API:svc-notification-url": "https://son.onap.org",
+ "VNF-API:svc-request-id": "robot12"
+ },
+ "VNF-API:vnf-topology-information": {
+ "vnf-topology-identifier": {
+ "service-type": "${service_type}",
+ "vnf-type": "${vnf_type}",
+ "generic-vnf-name": "${generic_vnf_name}",
+ "generic-vnf-type": "${generic_vnf_type}",
+ "vnf-name": "${vnf_name}"
+ },
+ "VNF-API:vnf-parameters": [
+ {
+ "vnf-parameter-name": "vcpe_image_name",
+ "vnf-parameter-value": "vgmux-casa-base-ubuntu-16-04"
+ },
+ {
+ "vnf-parameter-name": "vcpe_flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "${public_net_id}"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_net_id",
+ "vnf-parameter-value": "${bng_mux_net}"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_subnet_id",
+ "vnf-parameter-value": "${bng_mux_subnet}"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_net_id",
+ "vnf-parameter-value": "${mux_gw_net}"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_subnet_id",
+ "vnf-parameter-value": "${mux_gw_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "${oam_onap_net}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "${oam_onap_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_net_cidr",
+ "vnf-parameter-value": "10.5.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "bng_gmux_private_net_cidr",
+ "vnf-parameter-value": "10.1.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "vgmux_private_ip_0",
+ "vnf-parameter-value": "10.1.0.21"
+ },
+ {
+ "vnf-parameter-name": "vgmux_private_ip_1",
+ "vnf-parameter-value": "10.0.101.21"
+ },
+ {
+ "vnf-parameter-name": "vgmux_private_ip_2",
+ "vnf-parameter-value": "10.5.0.21"
+ },
+ {
+ "vnf-parameter-name": "vgmux_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01mux01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vnf_id",
+ "vnf-parameter-value": "vCPE_Infrastructure_Metro_vGMUX_demo_app"
+ },
+ {
+ "vnf-parameter-name": "vf_module_id",
+ "vnf-parameter-value": "vCPE_Intrastructure_Metro_vGMUX"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.102"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "repo_url_blob",
+ "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "vnf-parameter-name": "repo_url_artifacts",
+ "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.1.1"
+ },
+ {
+ "vnf-parameter-name": "script_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vgmux_key"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "${pub_key}"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_url",
+ "vnf-parameter-value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "vnf-parameter-name": "vpp_source_repo_branch",
+ "vnf-parameter-value": "stable/1704"
+ },
+ {
+ "vnf-parameter-name": "vpp_patch_url",
+ "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Vpp-Add-VES-agent-for-vG-MUX.patch"
+ },
+ {
+ "vnf-parameter-name": "hc2vpp_patch_url",
+ "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Hc2vpp-Add-VES-agent-for-vG-MUX.patch"
+ },
+ {
+ "vnf-parameter-name": "libevel_patch_url",
+ "vnf-parameter-value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/vCPE-vG-MUX-libevel-fixup.patch"
+ }
+ ],
+ "VNF-API:vnf-assignments": {}
+ }
+ }
}
-
diff --git a/test/vcpe/preload_templates/template.vcpe_gmuxgra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_gmuxgra_vfmodule.json
index b97ac941b..09ae40440 100644
--- a/test/vcpe/preload_templates/template.vcpe_gmuxgra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_gmuxgra_vfmodule.json
@@ -1,161 +1,162 @@
{
- "GENERIC-RESOURCE-API:input": {
- "GENERIC-RESOURCE-API:request-information": {
- "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
- "GENERIC-RESOURCE-API:order-number": "robot12",
- "GENERIC-RESOURCE-API:order-version": "1.0",
- "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
- "GENERIC-RESOURCE-API:request-id": "robot12",
- "GENERIC-RESOURCE-API:source": "VID"
- },
- "GENERIC-RESOURCE-API:sdnc-request-header": {
- "GENERIC-RESOURCE-API:svc-action": "reserve",
- "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
- "GENERIC-RESOURCE-API:svc-request-id": "robot12"
- },
- "preload-vf-module-topology-information": {
- "vf-module-topology": {
- "vf-module-parameters": {
- "param": [
- {
- "name": "vcpe_image_name",
- "value": "vgmux-casa-base-ubuntu-16-04"
- },
- {
- "name": "vcpe_flavor_name",
- "value": "m1.medium"
- },
- {
- "name": "public_net_id",
- "value": "${public_net_id}"
- },
- {
- "name": "bng_gmux_private_net_id",
- "value": "${bng_mux_net}"
- },
- {
- "name": "bng_gmux_private_subnet_id",
- "value": "${bng_mux_subnet}"
- },
- {
- "name": "mux_gw_private_net_id",
- "value": "${mux_gw_net}"
- },
- {
- "name": "mux_gw_private_subnet_id",
- "value": "${mux_gw_subnet}"
- },
- {
- "name": "onap_private_net_id",
- "value": "${oam_onap_net}"
- },
- {
- "name": "onap_private_subnet_id",
- "value": "${oam_onap_subnet}"
- },
- {
- "name": "onap_private_net_cidr",
- "value": "10.0.0.0/16"
- },
- {
- "name": "mux_gw_private_net_cidr",
- "value": "10.5.0.0/24"
- },
- {
- "name": "bng_gmux_private_net_cidr",
- "value": "10.1.0.0/24"
- },
- {
- "name": "vgmux_private_ip_0",
- "value": "10.1.0.21"
- },
- {
- "name": "vgmux_private_ip_1",
- "value": "10.0.101.21"
- },
- {
- "name": "vgmux_private_ip_2",
- "value": "10.5.0.21"
- },
- {
- "name": "vgmux_name_0",
- "value": "zdcpe1cpe01mux01_${suffix}"
- },
- {
- "name": "vnf_id",
- "value": "vCPE_Infrastructure_Metro_vGMUX_demo_app"
- },
- {
- "name": "vf_module_id",
- "value": "vCPE_Intrastructure_Metro_vGMUX"
- },
- {
- "name": "dcae_collector_ip",
- "value": "10.0.4.102"
- },
- {
- "name": "dcae_collector_port",
- "value": "8080"
- },
- {
- "name": "repo_url_blob",
- "value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "name": "repo_url_artifacts",
- "value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "name": "demo_artifacts_version",
- "value": "1.1.1"
- },
- {
- "name": "script_version",
- "value": "1.3.0"
- },
- {
- "name": "key_name",
- "value": "vgmux_key"
- },
- {
- "name": "pub_key",
- "value": "${pub_key}"
- },
- {
- "name": "cloud_env",
- "value": "openstack"
- },
- {
- "name": "vpp_source_repo_url",
- "value": "https://gerrit.fd.io/r/vpp"
- },
- {
- "name": "vpp_source_repo_branch",
- "value": "stable/1704"
- },
- {
- "name": "vpp_patch_url",
- "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Vpp-Add-VES-agent-for-vG-MUX.patch"
- },
- {
- "name": "hc2vpp_patch_url",
- "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Hc2vpp-Add-VES-agent-for-vG-MUX.patch"
- },
- {
- "name": "libevel_patch_url",
- "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/vCPE-vG-MUX-libevel-fixup.patch"
- }
- ] },
- "vf-module-assignments": {},
- "vf-module-topology-identifier": {
- "vf-module-name": "${vnf_name}"
- }
- },
- "vnf-resource-assignments": {},
- "vnf-topology-identifier-structure": {
- "nf-type": "${vnf_type}",
- "vnf-id": "${vnf_name}"
- }
+ "GENERIC-RESOURCE-API:input": {
+ "GENERIC-RESOURCE-API:request-information": {
+ "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
+ "GENERIC-RESOURCE-API:order-number": "robot12",
+ "GENERIC-RESOURCE-API:order-version": "1.0",
+ "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
+ "GENERIC-RESOURCE-API:request-id": "robot12",
+ "GENERIC-RESOURCE-API:source": "VID"
+ },
+ "GENERIC-RESOURCE-API:sdnc-request-header": {
+ "GENERIC-RESOURCE-API:svc-action": "reserve",
+ "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
+ "GENERIC-RESOURCE-API:svc-request-id": "robot12"
+ },
+ "preload-vf-module-topology-information": {
+ "vf-module-topology": {
+ "vf-module-parameters": {
+ "param": [
+ {
+ "name": "vcpe_image_name",
+ "value": "vgmux-casa-base-ubuntu-16-04"
+ },
+ {
+ "name": "vcpe_flavor_name",
+ "value": "m1.medium"
+ },
+ {
+ "name": "public_net_id",
+ "value": "${public_net_id}"
+ },
+ {
+ "name": "bng_gmux_private_net_id",
+ "value": "${bng_mux_net}"
+ },
+ {
+ "name": "bng_gmux_private_subnet_id",
+ "value": "${bng_mux_subnet}"
+ },
+ {
+ "name": "mux_gw_private_net_id",
+ "value": "${mux_gw_net}"
+ },
+ {
+ "name": "mux_gw_private_subnet_id",
+ "value": "${mux_gw_subnet}"
+ },
+ {
+ "name": "onap_private_net_id",
+ "value": "${oam_onap_net}"
+ },
+ {
+ "name": "onap_private_subnet_id",
+ "value": "${oam_onap_subnet}"
+ },
+ {
+ "name": "onap_private_net_cidr",
+ "value": "10.0.0.0/16"
+ },
+ {
+ "name": "mux_gw_private_net_cidr",
+ "value": "10.5.0.0/24"
+ },
+ {
+ "name": "bng_gmux_private_net_cidr",
+ "value": "10.1.0.0/24"
+ },
+ {
+ "name": "vgmux_private_ip_0",
+ "value": "10.1.0.21"
+ },
+ {
+ "name": "vgmux_private_ip_1",
+ "value": "10.0.101.21"
+ },
+ {
+ "name": "vgmux_private_ip_2",
+ "value": "10.5.0.21"
+ },
+ {
+ "name": "vgmux_name_0",
+ "value": "zdcpe1cpe01mux01_${suffix}"
+ },
+ {
+ "name": "vnf_id",
+ "value": "vCPE_Infrastructure_Metro_vGMUX_demo_app"
+ },
+ {
+ "name": "vf_module_id",
+ "value": "vCPE_Intrastructure_Metro_vGMUX"
+ },
+ {
+ "name": "dcae_collector_ip",
+ "value": "10.0.4.102"
+ },
+ {
+ "name": "dcae_collector_port",
+ "value": "8080"
+ },
+ {
+ "name": "repo_url_blob",
+ "value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "name": "repo_url_artifacts",
+ "value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "name": "demo_artifacts_version",
+ "value": "1.1.1"
+ },
+ {
+ "name": "script_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "key_name",
+ "value": "vgmux_key"
+ },
+ {
+ "name": "pub_key",
+ "value": "${pub_key}"
+ },
+ {
+ "name": "cloud_env",
+ "value": "openstack"
+ },
+ {
+ "name": "vpp_source_repo_url",
+ "value": "https://gerrit.fd.io/r/vpp"
+ },
+ {
+ "name": "vpp_source_repo_branch",
+ "value": "stable/1704"
+ },
+ {
+ "name": "vpp_patch_url",
+ "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Vpp-Add-VES-agent-for-vG-MUX.patch"
+ },
+ {
+ "name": "hc2vpp_patch_url",
+ "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/Hc2vpp-Add-VES-agent-for-vG-MUX.patch"
+ },
+ {
+ "name": "libevel_patch_url",
+ "value": "https://git.onap.org/demo/plain/vnfs/vCPE/vpp-ves-agent-for-vgmux/src/patches/vCPE-vG-MUX-libevel-fixup.patch"
}
+ ]
+ },
+ "vf-module-assignments": {},
+ "vf-module-topology-identifier": {
+ "vf-module-name": "${vnf_name}"
+ }
+ },
+ "vnf-resource-assignments": {},
+ "vnf-topology-identifier-structure": {
+ "nf-type": "${vnf_type}",
+ "vnf-id": "${vnf_name}"
}
+ }
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_gwgra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_gwgra_vfmodule.json
index 48883b993..6a78370b3 100644
--- a/test/vcpe/preload_templates/template.vcpe_gwgra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_gwgra_vfmodule.json
@@ -1,133 +1,134 @@
{
- "GENERIC-RESOURCE-API:input": {
- "GENERIC-RESOURCE-API:request-information": {
- "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
- "GENERIC-RESOURCE-API:order-number": "robot12",
- "GENERIC-RESOURCE-API:order-version": "1.0",
- "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
- "GENERIC-RESOURCE-API:request-id": "robot12",
- "GENERIC-RESOURCE-API:source": "VID"
- },
- "GENERIC-RESOURCE-API:sdnc-request-header": {
- "GENERIC-RESOURCE-API:svc-action": "reserve",
- "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
- "GENERIC-RESOURCE-API:svc-request-id": "robot12"
- },
- "preload-vf-module-topology-information": {
- "vf-module-topology": {
- "vf-module-parameters": {
- "param": [
- {
- "name": "vcpe_image_name",
- "value": "vgw-casa-base-ubuntu-16-04"
- },
- {
- "name": "vcpe_flavor_name",
- "value": "m1.medium"
- },
- {
- "name": "public_net_id",
- "value": "${public_net_id}"
- },
- {
- "name": "mux_gw_private_net_id",
- "value": "${mux_gw_net}"
- },
- {
- "name": "mux_gw_private_subnet_id",
- "value": "${mux_gw_subnet}"
- },
- {
- "name": "mux_gw_private_net_cidr",
- "value": "10.5.0.0/24"
- },
- {
- "name": "cpe_public_net_id",
- "value": "${cpe_public_net}"
- },
- {
- "name": "cpe_public_subnet_id",
- "value": "${cpe_public_subnet}"
- },
- {
- "name": "cpe_public_net_cidr",
- "value": "10.2.0.0/24"
- },
- {
- "name": "vgw_private_ip_0",
- "value": "10.5.0.92"
- },
- {
- "name": "vgw_private_ip_1",
- "value": "10.0.101.92"
- },
- {
- "name": "vgw_private_ip_2",
- "value": "10.2.0.7"
- },
- {
- "name": "vgw_name_0",
- "value": "zdcpe1cpe01gw01_${suffix}"
- },
- {
- "name": "mux_ip_addr",
- "value": "10.5.0.21"
- },
- {
- "name": "vg_vgmux_tunnel_vni",
- "value": "92"
- },
- {
- "name": "onap_private_net_id",
- "value": "${oam_onap_net}"
- },
- {
- "name": "onap_private_subnet_id",
- "value": "${oam_onap_subnet}"
- },
- {
- "name": "onap_private_net_cidr",
- "value": "10.0.0.0/16"
- },
- {
- "name": "repo_url_blob",
- "value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "name": "repo_url_artifacts",
- "value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "name": "demo_artifacts_version",
- "value": "1.1.1"
- },
- {
- "name": "script_version",
- "value": "1.3.0"
- },
- {
- "name": "key_name",
- "value": "vgw_key"
- },
- {
- "name": "pub_key",
- "value": "${pub_key}"
- },
- {
- "name": "cloud_env",
- "value": "openstack"
- }
- ] },
- "vf-module-assignments": {},
- "vf-module-topology-identifier": {
- "vf-module-name": "VGW2BRG-${brg_mac}"
- }
- },
- "vnf-resource-assignments": {},
- "vnf-topology-identifier-structure": {
- "nf-type": "vgw",
- "vnf-id": "VGW2BRG-${brg_mac}"
- }
- }
- }
+ "GENERIC-RESOURCE-API:input": {
+ "GENERIC-RESOURCE-API:request-information": {
+ "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
+ "GENERIC-RESOURCE-API:order-number": "robot12",
+ "GENERIC-RESOURCE-API:order-version": "1.0",
+ "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
+ "GENERIC-RESOURCE-API:request-id": "robot12",
+ "GENERIC-RESOURCE-API:source": "VID"
+ },
+ "GENERIC-RESOURCE-API:sdnc-request-header": {
+ "GENERIC-RESOURCE-API:svc-action": "reserve",
+ "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
+ "GENERIC-RESOURCE-API:svc-request-id": "robot12"
+ },
+ "preload-vf-module-topology-information": {
+ "vf-module-topology": {
+ "vf-module-parameters": {
+ "param": [
+ {
+ "name": "vcpe_image_name",
+ "value": "vgw-casa-base-ubuntu-16-04"
+ },
+ {
+ "name": "vcpe_flavor_name",
+ "value": "m1.medium"
+ },
+ {
+ "name": "public_net_id",
+ "value": "${public_net_id}"
+ },
+ {
+ "name": "mux_gw_private_net_id",
+ "value": "${mux_gw_net}"
+ },
+ {
+ "name": "mux_gw_private_subnet_id",
+ "value": "${mux_gw_subnet}"
+ },
+ {
+ "name": "mux_gw_private_net_cidr",
+ "value": "10.5.0.0/24"
+ },
+ {
+ "name": "cpe_public_net_id",
+ "value": "${cpe_public_net}"
+ },
+ {
+ "name": "cpe_public_subnet_id",
+ "value": "${cpe_public_subnet}"
+ },
+ {
+ "name": "cpe_public_net_cidr",
+ "value": "10.2.0.0/24"
+ },
+ {
+ "name": "vgw_private_ip_0",
+ "value": "10.5.0.92"
+ },
+ {
+ "name": "vgw_private_ip_1",
+ "value": "10.0.101.92"
+ },
+ {
+ "name": "vgw_private_ip_2",
+ "value": "10.2.0.7"
+ },
+ {
+ "name": "vgw_name_0",
+ "value": "zdcpe1cpe01gw01_${suffix}"
+ },
+ {
+ "name": "mux_ip_addr",
+ "value": "10.5.0.21"
+ },
+ {
+ "name": "vg_vgmux_tunnel_vni",
+ "value": "92"
+ },
+ {
+ "name": "onap_private_net_id",
+ "value": "${oam_onap_net}"
+ },
+ {
+ "name": "onap_private_subnet_id",
+ "value": "${oam_onap_subnet}"
+ },
+ {
+ "name": "onap_private_net_cidr",
+ "value": "10.0.0.0/16"
+ },
+ {
+ "name": "repo_url_blob",
+ "value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "name": "repo_url_artifacts",
+ "value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "name": "demo_artifacts_version",
+ "value": "1.1.1"
+ },
+ {
+ "name": "script_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "key_name",
+ "value": "vgw_key"
+ },
+ {
+ "name": "pub_key",
+ "value": "${pub_key}"
+ },
+ {
+ "name": "cloud_env",
+ "value": "openstack"
+ }
+ ]
+ },
+ "vf-module-assignments": {},
+ "vf-module-topology-identifier": {
+ "vf-module-name": "VGW2BRG-${brg_mac}"
+ }
+ },
+ "vnf-resource-assignments": {},
+ "vnf-topology-identifier-structure": {
+ "nf-type": "vgw",
+ "vnf-id": "VGW2BRG-${brg_mac}"
+ }
+ }
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_infra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_infra_vfmodule.json
index 1f30e9452..3699bcd69 100644
--- a/test/vcpe/preload_templates/template.vcpe_infra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_infra_vfmodule.json
@@ -1,180 +1,178 @@
{
- "VNF-API:input": {
- "VNF-API:request-information": {
- "VNF-API:request-id": "robot12",
- "VNF-API:notification-url": "https://so.onap.org",
- "VNF-API:order-number": "robot12",
- "VNF-API:request-sub-action": "SUPP",
- "VNF-API:request-action": "PreloadVNFRequest",
- "VNF-API:source": "VID",
- "VNF-API:order-version": "1.0"
- },
- "VNF-API:sdnc-request-header": {
- "VNF-API:svc-action": "reserve",
- "VNF-API:svc-notification-url": "https://son.onap.org",
- "VNF-API:svc-request-id": "robot12"
- },
- "VNF-API:vnf-topology-information": {
- "vnf-topology-identifier": {
- "service-type": "${service_type}",
- "vnf-type": "${vnf_type}",
- "generic-vnf-name": "${generic_vnf_name}",
- "generic-vnf-type": "${generic_vnf_type}",
- "vnf-name": "${vnf_name}"
- },
- "VNF-API:vnf-parameters": [
- {
- "vnf-parameter-name": "vcpe_image_name",
- "vnf-parameter-value": "ubuntu-16-04-cloud-amd64"
- },
- {
- "vnf-parameter-name": "vcpe_flavor_name",
- "vnf-parameter-value": "m1.medium"
- },
- {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "${public_net_id}"
- },
- {
- "vnf-parameter-name": "cpe_signal_net_id",
- "vnf-parameter-value": "${cpe_signal_net}"
- },
- {
- "vnf-parameter-name": "cpe_signal_subnet_id",
- "vnf-parameter-value": "${cpe_signal_subnet}"
- },
- {
- "vnf-parameter-name": "cpe_public_net_id",
- "vnf-parameter-value": "${cpe_public_net}"
- },
- {
- "vnf-parameter-name": "cpe_public_subnet_id",
- "vnf-parameter-value": "${cpe_public_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "${oam_onap_net}"
- },
- {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "${oam_onap_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- },
- {
- "vnf-parameter-name": "cpe_signal_net_cidr",
- "vnf-parameter-value": "10.4.0.0/24"
- },
- {
- "vnf-parameter-name": "cpe_public_net_cidr",
- "vnf-parameter-value": "10.2.0.0/24"
- },
- {
- "vnf-parameter-name": "vdhcp_private_ip_0",
- "vnf-parameter-value": "10.4.0.1"
- },
- {
- "vnf-parameter-name": "vdhcp_private_ip_1",
- "vnf-parameter-value": "10.0.101.1"
- },
- {
- "vnf-parameter-name": "vaaa_private_ip_0",
- "vnf-parameter-value": "10.4.0.2"
- },
- {
- "vnf-parameter-name": "vaaa_private_ip_1",
- "vnf-parameter-value": "10.0.101.2"
- },
- {
- "vnf-parameter-name": "vdns_private_ip_0",
- "vnf-parameter-value": "10.2.0.1"
- },
- {
- "vnf-parameter-name": "vdns_private_ip_1",
- "vnf-parameter-value": "10.0.101.3"
- },
- {
- "vnf-parameter-name": "vweb_private_ip_0",
- "vnf-parameter-value": "10.2.0.10"
- },
- {
- "vnf-parameter-name": "vweb_private_ip_1",
- "vnf-parameter-value": "10.0.101.40"
- },
- {
- "vnf-parameter-name": "mr_ip_addr",
- "vnf-parameter-value": "${mr_ip_addr}"
- },
- {
- "vnf-parameter-name": "mr_ip_port",
- "vnf-parameter-value": "${mr_ip_port}"
- },
- {
- "vnf-parameter-name": "vaaa_name_0",
- "vnf-parameter-value": "zdcpe1cpe01aaa01_${suffix}"
- },
- {
- "vnf-parameter-name": "vdns_name_0",
- "vnf-parameter-value": "zdcpe1cpe01dns01_${suffix}"
- },
- {
- "vnf-parameter-name": "vdhcp_name_0",
- "vnf-parameter-value": "zdcpe1cpe01dhcp01_${suffix}"
- },
- {
- "vnf-parameter-name": "vweb_name_0",
- "vnf-parameter-value": "zdcpe1cpe01web01_${suffix}"
- },
- {
- "vnf-parameter-name": "vnf_id",
- "vnf-parameter-value": "vCPE_Infrastructure_demo_app_${suffix}"
- },
- {
- "vnf-parameter-name": "vf_module_id",
- "vnf-parameter-value": "vCPE_Intrastructure_${suffix}"
- },
- {
- "vnf-parameter-name": "dcae_collector_ip",
- "vnf-parameter-value": "10.0.4.102"
- },
- {
- "vnf-parameter-name": "dcae_collector_port",
- "vnf-parameter-value": "8080"
- },
- {
- "vnf-parameter-name": "repo_url_blob",
- "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "vnf-parameter-name": "repo_url_artifacts",
- "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "install_script_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vaaa_key"
- },
- {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "${pub_key}"
- },
- {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- }
- ],
- "VNF-API:vnf-assignments": {
- }
- }
- }
+ "VNF-API:input": {
+ "VNF-API:request-information": {
+ "VNF-API:request-id": "robot12",
+ "VNF-API:notification-url": "https://so.onap.org",
+ "VNF-API:order-number": "robot12",
+ "VNF-API:request-sub-action": "SUPP",
+ "VNF-API:request-action": "PreloadVNFRequest",
+ "VNF-API:source": "VID",
+ "VNF-API:order-version": "1.0"
+ },
+ "VNF-API:sdnc-request-header": {
+ "VNF-API:svc-action": "reserve",
+ "VNF-API:svc-notification-url": "https://son.onap.org",
+ "VNF-API:svc-request-id": "robot12"
+ },
+ "VNF-API:vnf-topology-information": {
+ "vnf-topology-identifier": {
+ "service-type": "${service_type}",
+ "vnf-type": "${vnf_type}",
+ "generic-vnf-name": "${generic_vnf_name}",
+ "generic-vnf-type": "${generic_vnf_type}",
+ "vnf-name": "${vnf_name}"
+ },
+ "VNF-API:vnf-parameters": [
+ {
+ "vnf-parameter-name": "vcpe_image_name",
+ "vnf-parameter-value": "ubuntu-16-04-cloud-amd64"
+ },
+ {
+ "vnf-parameter-name": "vcpe_flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "${public_net_id}"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_net_id",
+ "vnf-parameter-value": "${cpe_signal_net}"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_subnet_id",
+ "vnf-parameter-value": "${cpe_signal_subnet}"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_net_id",
+ "vnf-parameter-value": "${cpe_public_net}"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_subnet_id",
+ "vnf-parameter-value": "${cpe_public_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "${oam_onap_net}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "${oam_onap_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "cpe_signal_net_cidr",
+ "vnf-parameter-value": "10.4.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_net_cidr",
+ "vnf-parameter-value": "10.2.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "vdhcp_private_ip_0",
+ "vnf-parameter-value": "10.4.0.1"
+ },
+ {
+ "vnf-parameter-name": "vdhcp_private_ip_1",
+ "vnf-parameter-value": "10.0.101.1"
+ },
+ {
+ "vnf-parameter-name": "vaaa_private_ip_0",
+ "vnf-parameter-value": "10.4.0.2"
+ },
+ {
+ "vnf-parameter-name": "vaaa_private_ip_1",
+ "vnf-parameter-value": "10.0.101.2"
+ },
+ {
+ "vnf-parameter-name": "vdns_private_ip_0",
+ "vnf-parameter-value": "10.2.0.1"
+ },
+ {
+ "vnf-parameter-name": "vdns_private_ip_1",
+ "vnf-parameter-value": "10.0.101.3"
+ },
+ {
+ "vnf-parameter-name": "vweb_private_ip_0",
+ "vnf-parameter-value": "10.2.0.10"
+ },
+ {
+ "vnf-parameter-name": "vweb_private_ip_1",
+ "vnf-parameter-value": "10.0.101.40"
+ },
+ {
+ "vnf-parameter-name": "mr_ip_addr",
+ "vnf-parameter-value": "${mr_ip_addr}"
+ },
+ {
+ "vnf-parameter-name": "mr_ip_port",
+ "vnf-parameter-value": "${mr_ip_port}"
+ },
+ {
+ "vnf-parameter-name": "vaaa_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01aaa01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vdns_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01dns01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vdhcp_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01dhcp01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vweb_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01web01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vnf_id",
+ "vnf-parameter-value": "vCPE_Infrastructure_demo_app_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "vf_module_id",
+ "vnf-parameter-value": "vCPE_Intrastructure_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_ip",
+ "vnf-parameter-value": "10.0.4.102"
+ },
+ {
+ "vnf-parameter-name": "dcae_collector_port",
+ "vnf-parameter-value": "8080"
+ },
+ {
+ "vnf-parameter-name": "repo_url_blob",
+ "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "vnf-parameter-name": "repo_url_artifacts",
+ "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "install_script_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vaaa_key"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "${pub_key}"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
+ }
+ ],
+ "VNF-API:vnf-assignments": {}
+ }
+ }
}
-
diff --git a/test/vcpe/preload_templates/template.vcpe_infragra_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_infragra_vfmodule.json
index 0b076b448..59a657245 100644
--- a/test/vcpe/preload_templates/template.vcpe_infragra_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_infragra_vfmodule.json
@@ -1,182 +1,182 @@
{
- "GENERIC-RESOURCE-API:input": {
- "GENERIC-RESOURCE-API:request-information": {
- "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
- "GENERIC-RESOURCE-API:order-number": "robot12",
- "GENERIC-RESOURCE-API:order-version": "1.0",
- "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
- "GENERIC-RESOURCE-API:request-id": "robot12",
- "GENERIC-RESOURCE-API:source": "VID"
- },
- "GENERIC-RESOURCE-API:sdnc-request-header": {
- "GENERIC-RESOURCE-API:svc-action": "reserve",
- "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
- "GENERIC-RESOURCE-API:svc-request-id": "robot12"
- },
- "preload-vf-module-topology-information": {
- "vf-module-topology": {
- "vf-module-parameters": {
- "param": [
- {
- "name": "vcpe_image_name",
- "value": "ubuntu-16-04-cloud-amd64"
- },
- {
- "name": "vcpe_flavor_name",
- "value": "m1.medium"
- },
- {
- "name": "public_net_id",
- "value": "${public_net_id}"
- },
- {
- "name": "cpe_signal_net_id",
- "value": "${cpe_signal_net}"
- },
- {
- "name": "cpe_signal_subnet_id",
- "value": "${cpe_signal_subnet}"
- },
- {
- "name": "cpe_public_net_id",
- "value": "${cpe_public_net}"
- },
- {
- "name": "cpe_public_subnet_id",
- "value": "${cpe_public_subnet}"
- },
- {
- "name": "onap_private_net_id",
- "value": "${oam_onap_net}"
- },
- {
- "name": "onap_private_subnet_id",
- "value": "${oam_onap_subnet}"
- },
- {
- "name": "onap_private_net_cidr",
- "value": "10.0.0.0/16"
- },
- {
- "name": "cpe_signal_net_cidr",
- "value": "10.4.0.0/24"
- },
- {
- "name": "cpe_public_net_cidr",
- "value": "10.2.0.0/24"
- },
- {
- "name": "vdhcp_private_ip_0",
- "value": "10.4.0.1"
- },
- {
- "name": "vdhcp_private_ip_1",
- "value": "10.0.101.1"
- },
- {
- "name": "vaaa_private_ip_0",
- "value": "10.4.0.2"
- },
- {
- "name": "vaaa_private_ip_1",
- "value": "10.0.101.2"
- },
- {
- "name": "vdns_private_ip_0",
- "value": "10.2.0.1"
- },
- {
- "name": "vdns_private_ip_1",
- "value": "10.0.101.3"
- },
- {
- "name": "vweb_private_ip_0",
- "value": "10.2.0.10"
- },
- {
- "name": "vweb_private_ip_1",
- "value": "10.0.101.40"
- },
- {
- "name": "mr_ip_addr",
- "value": "${mr_ip_addr}"
- },
- {
- "name": "mr_ip_port",
- "value": "${mr_ip_port}"
- },
- {
- "name": "vaaa_name_0",
- "value": "zdcpe1cpe01aaa01_${suffix}"
- },
- {
- "name": "vdns_name_0",
- "value": "zdcpe1cpe01dns01_${suffix}"
- },
- {
- "name": "vdhcp_name_0",
- "value": "zdcpe1cpe01dhcp01_${suffix}"
- },
- {
- "name": "vweb_name_0",
- "value": "zdcpe1cpe01web01_${suffix}"
- },
- {
- "name": "vnf_id",
- "value": "vCPE_Infrastructure_demo_app_${suffix}"
- },
- {
- "name": "vf_module_id",
- "value": "vCPE_Intrastructure_${suffix}"
- },
- {
- "name": "dcae_collector_ip",
- "value": "10.0.4.102"
- },
- {
- "name": "dcae_collector_port",
- "value": "8080"
- },
- {
- "name": "repo_url_blob",
- "value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "name": "repo_url_artifacts",
- "value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "name": "demo_artifacts_version",
- "value": "1.3.0"
- },
- {
- "name": "install_script_version",
- "value": "1.3.0"
- },
- {
- "name": "key_name",
- "value": "vaaa_key"
- },
- {
- "name": "pub_key",
- "value": "${pub_key}"
- },
- {
- "name": "cloud_env",
- "value": "openstack"
- }
-
- ] },
- "vf-module-assignments": {},
- "vf-module-topology-identifier": {
- "vf-module-name": "${vnf_name}"
- }
- },
- "vnf-resource-assignments": {},
- "vnf-topology-identifier-structure": {
- "nf-type": "${vnf_type}",
- "vnf-id": "${vnf_name}"
- }
- }
+ "GENERIC-RESOURCE-API:input": {
+ "GENERIC-RESOURCE-API:request-information": {
+ "GENERIC-RESOURCE-API:notification-url": "https://so.onap.org",
+ "GENERIC-RESOURCE-API:order-number": "robot12",
+ "GENERIC-RESOURCE-API:order-version": "1.0",
+ "GENERIC-RESOURCE-API:request-action": "PreloadVfModuleRequest",
+ "GENERIC-RESOURCE-API:request-id": "robot12",
+ "GENERIC-RESOURCE-API:source": "VID"
+ },
+ "GENERIC-RESOURCE-API:sdnc-request-header": {
+ "GENERIC-RESOURCE-API:svc-action": "reserve",
+ "GENERIC-RESOURCE-API:svc-notification-url": "https://son.onap.org",
+ "GENERIC-RESOURCE-API:svc-request-id": "robot12"
+ },
+ "preload-vf-module-topology-information": {
+ "vf-module-topology": {
+ "vf-module-parameters": {
+ "param": [
+ {
+ "name": "vcpe_image_name",
+ "value": "ubuntu-16-04-cloud-amd64"
+ },
+ {
+ "name": "vcpe_flavor_name",
+ "value": "m1.medium"
+ },
+ {
+ "name": "public_net_id",
+ "value": "${public_net_id}"
+ },
+ {
+ "name": "cpe_signal_net_id",
+ "value": "${cpe_signal_net}"
+ },
+ {
+ "name": "cpe_signal_subnet_id",
+ "value": "${cpe_signal_subnet}"
+ },
+ {
+ "name": "cpe_public_net_id",
+ "value": "${cpe_public_net}"
+ },
+ {
+ "name": "cpe_public_subnet_id",
+ "value": "${cpe_public_subnet}"
+ },
+ {
+ "name": "onap_private_net_id",
+ "value": "${oam_onap_net}"
+ },
+ {
+ "name": "onap_private_subnet_id",
+ "value": "${oam_onap_subnet}"
+ },
+ {
+ "name": "onap_private_net_cidr",
+ "value": "10.0.0.0/16"
+ },
+ {
+ "name": "cpe_signal_net_cidr",
+ "value": "10.4.0.0/24"
+ },
+ {
+ "name": "cpe_public_net_cidr",
+ "value": "10.2.0.0/24"
+ },
+ {
+ "name": "vdhcp_private_ip_0",
+ "value": "10.4.0.1"
+ },
+ {
+ "name": "vdhcp_private_ip_1",
+ "value": "10.0.101.1"
+ },
+ {
+ "name": "vaaa_private_ip_0",
+ "value": "10.4.0.2"
+ },
+ {
+ "name": "vaaa_private_ip_1",
+ "value": "10.0.101.2"
+ },
+ {
+ "name": "vdns_private_ip_0",
+ "value": "10.2.0.1"
+ },
+ {
+ "name": "vdns_private_ip_1",
+ "value": "10.0.101.3"
+ },
+ {
+ "name": "vweb_private_ip_0",
+ "value": "10.2.0.10"
+ },
+ {
+ "name": "vweb_private_ip_1",
+ "value": "10.0.101.40"
+ },
+ {
+ "name": "mr_ip_addr",
+ "value": "${mr_ip_addr}"
+ },
+ {
+ "name": "mr_ip_port",
+ "value": "${mr_ip_port}"
+ },
+ {
+ "name": "vaaa_name_0",
+ "value": "zdcpe1cpe01aaa01_${suffix}"
+ },
+ {
+ "name": "vdns_name_0",
+ "value": "zdcpe1cpe01dns01_${suffix}"
+ },
+ {
+ "name": "vdhcp_name_0",
+ "value": "zdcpe1cpe01dhcp01_${suffix}"
+ },
+ {
+ "name": "vweb_name_0",
+ "value": "zdcpe1cpe01web01_${suffix}"
+ },
+ {
+ "name": "vnf_id",
+ "value": "vCPE_Infrastructure_demo_app_${suffix}"
+ },
+ {
+ "name": "vf_module_id",
+ "value": "vCPE_Intrastructure_${suffix}"
+ },
+ {
+ "name": "dcae_collector_ip",
+ "value": "10.0.4.102"
+ },
+ {
+ "name": "dcae_collector_port",
+ "value": "8080"
+ },
+ {
+ "name": "repo_url_blob",
+ "value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "name": "repo_url_artifacts",
+ "value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "name": "demo_artifacts_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "install_script_version",
+ "value": "1.3.0"
+ },
+ {
+ "name": "key_name",
+ "value": "vaaa_key"
+ },
+ {
+ "name": "pub_key",
+ "value": "${pub_key}"
+ },
+ {
+ "name": "cloud_env",
+ "value": "openstack"
+ }
+ ]
+ },
+ "vf-module-assignments": {},
+ "vf-module-topology-identifier": {
+ "vf-module-name": "${vnf_name}"
+ }
+ },
+ "vnf-resource-assignments": {},
+ "vnf-topology-identifier-structure": {
+ "nf-type": "${vnf_type}",
+ "vnf-id": "${vnf_name}"
}
+ }
+ }
}
diff --git a/test/vcpe/preload_templates/template.vcpe_vgw_vfmodule.json b/test/vcpe/preload_templates/template.vcpe_vgw_vfmodule.json
index bd288b4dd..4b660d14e 100644
--- a/test/vcpe/preload_templates/template.vcpe_vgw_vfmodule.json
+++ b/test/vcpe/preload_templates/template.vcpe_vgw_vfmodule.json
@@ -1,130 +1,130 @@
{
- "VNF-API:input": {
- "VNF-API:request-information": {
- "VNF-API:notification-url": "https://so.onap.org",
- "VNF-API:order-number": "robot12",
- "VNF-API:order-version": "1.0",
- "VNF-API:request-action": "PreloadVNFRequest",
- "VNF-API:request-id": "robot12",
- "VNF-API:request-sub-action": "SUPP",
- "VNF-API:source": "VID"
- },
- "VNF-API:sdnc-request-header": {
- "VNF-API:svc-action": "reserve",
- "VNF-API:svc-notification-url": "https://son.onap.org",
- "VNF-API:svc-request-id": "robot12"
- },
- "VNF-API:vnf-topology-information": {
- "VNF-API:vnf-assignments": {},
- "VNF-API:vnf-parameters": [
- {
- "vnf-parameter-name": "vcpe_image_name",
- "vnf-parameter-value": "vgw-casa-base-ubuntu-16-04"
- },
- {
- "vnf-parameter-name": "vcpe_flavor_name",
- "vnf-parameter-value": "m1.medium"
- },
- {
- "vnf-parameter-name": "public_net_id",
- "vnf-parameter-value": "${public_net_id}"
- },
- {
- "vnf-parameter-name": "mux_gw_private_net_id",
- "vnf-parameter-value": "${mux_gw_net}"
- },
- {
- "vnf-parameter-name": "mux_gw_private_subnet_id",
- "vnf-parameter-value": "${mux_gw_subnet}"
- },
- {
- "vnf-parameter-name": "mux_gw_private_net_cidr",
- "vnf-parameter-value": "10.5.0.0/24"
- },
- {
- "vnf-parameter-name": "cpe_public_net_id",
- "vnf-parameter-value": "${cpe_public_net}"
- },
- {
- "vnf-parameter-name": "cpe_public_subnet_id",
- "vnf-parameter-value": "${cpe_public_subnet}"
- },
- {
- "vnf-parameter-name": "cpe_public_net_cidr",
- "vnf-parameter-value": "10.2.0.0/24"
- },
- {
- "vnf-parameter-name": "vgw_private_ip_0",
- "vnf-parameter-value": "10.5.0.126"
- },
- {
- "vnf-parameter-name": "vgw_private_ip_1",
- "vnf-parameter-value": "10.0.101.126"
- },
- {
- "vnf-parameter-name": "vgw_private_ip_2",
- "vnf-parameter-value": "10.2.0.41"
- },
- {
- "vnf-parameter-name": "vgw_name_0",
- "vnf-parameter-value": "zdcpe1cpe01gw01_${suffix}"
- },
- {
- "vnf-parameter-name": "mux_ip_addr",
- "vnf-parameter-value": "10.5.0.21"
- },
- {
- "vnf-parameter-name": "vg_vgmux_tunnel_vni",
- "vnf-parameter-value": "126"
- },
- {
- "vnf-parameter-name": "onap_private_net_id",
- "vnf-parameter-value": "${oam_onap_net}"
- },
- {
- "vnf-parameter-name": "onap_private_subnet_id",
- "vnf-parameter-value": "${oam_onap_subnet}"
- },
- {
- "vnf-parameter-name": "onap_private_net_cidr",
- "vnf-parameter-value": "10.0.0.0/16"
- },
- {
- "vnf-parameter-name": "repo_url_blob",
- "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
- },
- {
- "vnf-parameter-name": "repo_url_artifacts",
- "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
- },
- {
- "vnf-parameter-name": "demo_artifacts_version",
- "vnf-parameter-value": "1.1.1"
- },
- {
- "vnf-parameter-name": "script_version",
- "vnf-parameter-value": "1.3.0"
- },
- {
- "vnf-parameter-name": "key_name",
- "vnf-parameter-value": "vgw_key"
- },
- {
- "vnf-parameter-name": "pub_key",
- "vnf-parameter-value": "${pub_key}"
- },
- {
- "vnf-parameter-name": "cloud_env",
- "vnf-parameter-value": "openstack"
- }
- ],
- "vnf-topology-identifier": {
- "generic-vnf-name": "GENERIC-VGW-VNF-NAME0",
- "generic-vnf-type": "GENERIC-VGW-VNF-TYPE",
- "service-type": "VGW-SERVICE-TYPE",
- "vnf-name": "VGW2BRG-${brg_mac}",
- "vnf-type": "VGW-VNF-TYPE"
- }
+ "VNF-API:input": {
+ "VNF-API:request-information": {
+ "VNF-API:notification-url": "https://so.onap.org",
+ "VNF-API:order-number": "robot12",
+ "VNF-API:order-version": "1.0",
+ "VNF-API:request-action": "PreloadVNFRequest",
+ "VNF-API:request-id": "robot12",
+ "VNF-API:request-sub-action": "SUPP",
+ "VNF-API:source": "VID"
+ },
+ "VNF-API:sdnc-request-header": {
+ "VNF-API:svc-action": "reserve",
+ "VNF-API:svc-notification-url": "https://son.onap.org",
+ "VNF-API:svc-request-id": "robot12"
+ },
+ "VNF-API:vnf-topology-information": {
+ "VNF-API:vnf-assignments": {},
+ "VNF-API:vnf-parameters": [
+ {
+ "vnf-parameter-name": "vcpe_image_name",
+ "vnf-parameter-value": "vgw-casa-base-ubuntu-16-04"
+ },
+ {
+ "vnf-parameter-name": "vcpe_flavor_name",
+ "vnf-parameter-value": "m1.medium"
+ },
+ {
+ "vnf-parameter-name": "public_net_id",
+ "vnf-parameter-value": "${public_net_id}"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_net_id",
+ "vnf-parameter-value": "${mux_gw_net}"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_subnet_id",
+ "vnf-parameter-value": "${mux_gw_subnet}"
+ },
+ {
+ "vnf-parameter-name": "mux_gw_private_net_cidr",
+ "vnf-parameter-value": "10.5.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_net_id",
+ "vnf-parameter-value": "${cpe_public_net}"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_subnet_id",
+ "vnf-parameter-value": "${cpe_public_subnet}"
+ },
+ {
+ "vnf-parameter-name": "cpe_public_net_cidr",
+ "vnf-parameter-value": "10.2.0.0/24"
+ },
+ {
+ "vnf-parameter-name": "vgw_private_ip_0",
+ "vnf-parameter-value": "10.5.0.126"
+ },
+ {
+ "vnf-parameter-name": "vgw_private_ip_1",
+ "vnf-parameter-value": "10.0.101.126"
+ },
+ {
+ "vnf-parameter-name": "vgw_private_ip_2",
+ "vnf-parameter-value": "10.2.0.41"
+ },
+ {
+ "vnf-parameter-name": "vgw_name_0",
+ "vnf-parameter-value": "zdcpe1cpe01gw01_${suffix}"
+ },
+ {
+ "vnf-parameter-name": "mux_ip_addr",
+ "vnf-parameter-value": "10.5.0.21"
+ },
+ {
+ "vnf-parameter-name": "vg_vgmux_tunnel_vni",
+ "vnf-parameter-value": "126"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_id",
+ "vnf-parameter-value": "${oam_onap_net}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_subnet_id",
+ "vnf-parameter-value": "${oam_onap_subnet}"
+ },
+ {
+ "vnf-parameter-name": "onap_private_net_cidr",
+ "vnf-parameter-value": "10.0.0.0/16"
+ },
+ {
+ "vnf-parameter-name": "repo_url_blob",
+ "vnf-parameter-value": "https://nexus.onap.org/content/sites/raw"
+ },
+ {
+ "vnf-parameter-name": "repo_url_artifacts",
+ "vnf-parameter-value": "https://nexus.onap.org/content/repositories/releases"
+ },
+ {
+ "vnf-parameter-name": "demo_artifacts_version",
+ "vnf-parameter-value": "1.1.1"
+ },
+ {
+ "vnf-parameter-name": "script_version",
+ "vnf-parameter-value": "1.3.0"
+ },
+ {
+ "vnf-parameter-name": "key_name",
+ "vnf-parameter-value": "vgw_key"
+ },
+ {
+ "vnf-parameter-name": "pub_key",
+ "vnf-parameter-value": "${pub_key}"
+ },
+ {
+ "vnf-parameter-name": "cloud_env",
+ "vnf-parameter-value": "openstack"
}
+ ],
+ "vnf-topology-identifier": {
+ "generic-vnf-name": "GENERIC-VGW-VNF-NAME0",
+ "generic-vnf-type": "GENERIC-VGW-VNF-TYPE",
+ "service-type": "VGW-SERVICE-TYPE",
+ "vnf-name": "VGW2BRG-${brg_mac}",
+ "vnf-type": "VGW-VNF-TYPE"
+ }
}
-} \ No newline at end of file
+ }
+}
diff --git a/test/vcpe/preload_templates/template_aai_region_data.json b/test/vcpe/preload_templates/template_aai_region_data.json
new file mode 100644
index 000000000..c574630f8
--- /dev/null
+++ b/test/vcpe/preload_templates/template_aai_region_data.json
@@ -0,0 +1,11 @@
+{
+ "cloud-owner": "CloudOwner",
+ "cloud-region-id": "${region_name}",
+ "cloud-type": "SharedNode",
+ "owner-defined-type": "OwnerType",
+ "cloud-region-version": "v1",
+ "identity-url": "${identity-url}/v${identity_api_version}",
+ "cloud-zone": "CloudZone",
+ "resource-version": "${resource_version}",
+ "relationship-list": ""
+}
diff --git a/test/vcpe/preload_templates/template_sniro_data.json b/test/vcpe/preload_templates/template_sniro_data.json
index c2c6421e6..952a42bac 100644
--- a/test/vcpe/preload_templates/template_sniro_data.json
+++ b/test/vcpe/preload_templates/template_sniro_data.json
@@ -1,37 +1,64 @@
{
- "solutionInfo" : {
- "placementInfo" : [
- {
- "cloudRegionId" : "RegionOne",
- "inventoryType" : "service",
- "resourceModuleName" : "${tunnelxconn_ar_name}",
- "serviceInstanceId" : "${vgmux_svc_instance_uuid}",
- "serviceResourceId" : "TUNNEL-RESOURCE-ID-REPLACE",
- "isRehome" : "False",
- "assignmentInfo": [ { "variableName": "cloudOwner", "variableValue": "CloudOwner"}, {"variableName": "vnfHostName", "variableValue": "vnfHostName" }]
- },
- {
- "cloudRegionId" : "RegionOne",
- "inventoryType" : "cloud",
- "resourceModuleName" : "${vgw_name}",
- "serviceInstanceId" : "vG-service-instance-id",
- "serviceResourceId" : "VGW-RESOURCE-ID-REPLACE",
- "isRehome" : "False",
- "assignmentInfo": [ { "variableName": "cloudOwner", "variableValue": "CloudOwner"}, {"variableName": "vnfHostName", "variableValue": "vnfHostName" }]
- },
- {
- "cloudRegionId" : "RegionOne",
- "inventoryType" : "service",
- "resourceModuleName" : "${brg_ar_name}",
- "serviceInstanceId" : "${vbrg_svc_instance_uuid}",
- "serviceResourceId" : "BRG-RESOURCE-ID-REPLACE",
- "isRehome" : "False",
- "assignmentInfo": [ { "variableName": "cloudOwner", "variableValue": "CloudOwner"}, {"variableName": "vnfHostName", "variableValue": "vnfHostName" }]
- }
- ]
- },
- "requestId" : "111-111-1111",
- "statusMessage" : "",
- "transactionId" : "111-111-1111",
- "requestState" : "complete"
+ "solutionInfo": {
+ "placementInfo": [
+ {
+ "cloudRegionId": "RegionOne",
+ "inventoryType": "service",
+ "resourceModuleName": "${tunnelxconn_ar_name}",
+ "serviceInstanceId": "${vgmux_svc_instance_uuid}",
+ "serviceResourceId": "TUNNEL-RESOURCE-ID-REPLACE",
+ "isRehome": "False",
+ "assignmentInfo": [
+ {
+ "variableName": "cloudOwner",
+ "variableValue": "CloudOwner"
+ },
+ {
+ "variableName": "vnfHostName",
+ "variableValue": "vnfHostName"
+ }
+ ]
+ },
+ {
+ "cloudRegionId": "RegionOne",
+ "inventoryType": "cloud",
+ "resourceModuleName": "${vgw_name}",
+ "serviceInstanceId": "vG-service-instance-id",
+ "serviceResourceId": "VGW-RESOURCE-ID-REPLACE",
+ "isRehome": "False",
+ "assignmentInfo": [
+ {
+ "variableName": "cloudOwner",
+ "variableValue": "CloudOwner"
+ },
+ {
+ "variableName": "vnfHostName",
+ "variableValue": "vnfHostName"
+ }
+ ]
+ },
+ {
+ "cloudRegionId": "RegionOne",
+ "inventoryType": "service",
+ "resourceModuleName": "${brg_ar_name}",
+ "serviceInstanceId": "${vbrg_svc_instance_uuid}",
+ "serviceResourceId": "BRG-RESOURCE-ID-REPLACE",
+ "isRehome": "False",
+ "assignmentInfo": [
+ {
+ "variableName": "cloudOwner",
+ "variableValue": "CloudOwner"
+ },
+ {
+ "variableName": "vnfHostName",
+ "variableValue": "vnfHostName"
+ }
+ ]
+ }
+ ]
+ },
+ "requestId": "111-111-1111",
+ "statusMessage": "",
+ "transactionId": "111-111-1111",
+ "requestState": "complete"
}
diff --git a/test/vcpe/preload_templates/template_sniro_request.json b/test/vcpe/preload_templates/template_sniro_request.json
index 6fdbb7072..33ff427f5 100644
--- a/test/vcpe/preload_templates/template_sniro_request.json
+++ b/test/vcpe/preload_templates/template_sniro_request.json
@@ -1,20 +1,20 @@
{
- "request" : {
- "urlPath" : "/sniro/api/v2/placement",
- "method" : "POST"
+ "request": {
+ "urlPath": "/sniro/api/v2/placement",
+ "method": "POST"
},
- "response" : {
- "status" : "202",
+ "response": {
+ "status": "202",
"body": "{\"requestId\": \"1111-111-11\"}"
},
- "postServeActions" : {
- "webhook" : {
- "headers" : {
- "Content-Type" : "application/json"
+ "postServeActions": {
+ "webhook": {
+ "headers": {
+ "Content-Type": "application/json"
},
- "method" : "POST",
- "base64Body" : "${base64_sniro_data}",
- "url" : " http://so-bpmn-infra.onap:8081/mso/WorkflowMessage/SNIROResponse"
+ "method": "POST",
+ "base64Body": "${base64_sniro_data}",
+ "url": " http://so-bpmn-infra.onap:8081/mso/WorkflowMessage/SNIROResponse"
}
}
}
diff --git a/test/vcpe/sdcutils.py b/test/vcpe/sdcutils.py
index a6f6a507b..470199ef8 100755
--- a/test/vcpe/sdcutils.py
+++ b/test/vcpe/sdcutils.py
@@ -1,10 +1,9 @@
#!/usr/bin/env python
-import sys
import logging
import requests
import json
-from vcpecommon import *
+from vcpecommon import * # pylint: disable=W0614
class SdcUtils:
@@ -18,7 +17,7 @@ class SdcUtils:
def download_vcpe_service_template(self):
"""
- :return:
+ :return:
"""
url = self.vcpecommon.sdc_service_list_url
@@ -84,6 +83,3 @@ class SdcUtils:
self.logger.debug('------- Creation subcategory request submitted to SDC, got response --------')
self.logger.debug('response code = %s' % resp.status_code )
self.logger.debug('---------------------------------------------------------------')
-
-
-
diff --git a/test/vcpe/setup.py b/test/vcpe/setup.py
new file mode 100644
index 000000000..7f9bbd0d6
--- /dev/null
+++ b/test/vcpe/setup.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+# This file is only meant to be a single source of truth for package
+# dependencies. It's consumed by bin/setup.sh and tox hence shouldn't
+# be run directly for package builds as currently vcpe scripts are not
+# provided as a python package.
+
+import setuptools
+
+# Define vCPE scripts dependencies below
+setuptools.setup(
+ install_requires=[
+ 'ipaddress',
+ 'pyyaml',
+ 'mysql-connector-python',
+ 'progressbar2',
+ 'python-novaclient',
+ 'python-openstackclient',
+ 'python-heatclient',
+ 'kubernetes',
+ 'netaddr'
+ ]
+)
diff --git a/test/vcpe/soutils.py b/test/vcpe/soutils.py
index 627d74b6a..95656312f 100755
--- a/test/vcpe/soutils.py
+++ b/test/vcpe/soutils.py
@@ -9,7 +9,7 @@ import progressbar
import time
import csar_parser
import preload
-from vcpecommon import *
+from vcpecommon import * # pylint: disable=W0614
class SoUtils:
@@ -121,7 +121,7 @@ class SoUtils:
def generate_vnf_or_network_request(self, req_type, instance_name, vnf_or_network_model, service_instance_id,
service_model):
if self.vcpecommon.gra_api_flag:
- self.testApi = 'GR_API'
+ self.testApi = 'GR_API'
req_details = {
'modelInfo': vnf_or_network_model,
'cloudConfiguration': {"lcpCloudRegionId": self.vcpecommon.os_region_name,
@@ -139,7 +139,7 @@ class SoUtils:
def generate_vfmodule_request(self, instance_name, vfmodule_model, service_instance_id,
service_model, vnf_instance_id, vnf_model):
if self.vcpecommon.gra_api_flag:
- self.testApi = 'GR_API'
+ self.testApi = 'GR_API'
req_details = {
'modelInfo': vfmodule_model,
'cloudConfiguration': {"lcpCloudRegionId": self.vcpecommon.os_region_name,
@@ -156,7 +156,7 @@ class SoUtils:
def generate_service_request(self, instance_name, model):
if self.vcpecommon.gra_api_flag:
- self.testApi = 'GR_API'
+ self.testApi = 'GR_API'
self.logger.info('testApi' + self.testApi)
@@ -183,10 +183,11 @@ class SoUtils:
req_details['owningEntity'] = {'owningEntityId': self.vcpecommon.owning_entity_id,
'owningEntityName': self.vcpecommon.owning_entity_name}
- def generate_custom_service_request(self, instance_name, model, brg_mac):
+ def generate_custom_service_request(self, instance_name, svc_model,
+ vfmodule_models, brg_mac):
brg_mac_enc = brg_mac.replace(':', '-')
req_details = {
- 'modelInfo': model,
+ 'modelInfo': svc_model,
'subscriberInfo': {'subscriberName': 'Kaneohe',
'globalSubscriberId': self.vcpecommon.global_subscriber_id},
'cloudConfiguration': {"lcpCloudRegionId": 'RegionOne', #self.vcpecommon.os_region_name,
@@ -201,7 +202,7 @@ class SoUtils:
'name': 'VfModuleNames',
'value': [
{
- 'VfModuleModelInvariantUuid': self.vcpecommon.vgw_VfModuleModelInvariantUuid,
+ 'VfModuleModelInvariantUuid': vfmodule_models[0]['modelInvariantId'],
'VfModuleName': 'VGW2BRG-{0}'.format(brg_mac_enc)
}
]
@@ -237,10 +238,11 @@ class SoUtils:
instance_name = '_'.join([self.vcpecommon.instance_name_prefix['service'],
parser.svc_model['modelName'][0:10], name_suffix])
instance_name = instance_name.lower()
- req = self.generate_custom_service_request(instance_name, parser.svc_model, brg_mac)
+ req = self.generate_custom_service_request(instance_name, parser.svc_model,
+ parser.vfmodule_models, brg_mac)
self.logger.info(json.dumps(req, indent=2, sort_keys=True))
self.logger.info('Creating custom service {0}.'.format(instance_name))
- req_id, svc_instance_id = self.submit_create_req(req, 'service')
+ req_id, svc_instance_id = self.submit_create_req(req, 'service') # pylint: disable=W0612
if not self.check_progress(req_id, 140):
return False
return True
@@ -299,14 +301,14 @@ class SoUtils:
req = self.generate_vnf_or_network_request('network', network_name, model, svc_instance_id,
parser.svc_model)
self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
- req_id, net_instance_id = self.submit_create_req(req, 'network', svc_instance_id)
+ req_id, net_instance_id = self.submit_create_req(req, 'network', svc_instance_id) # pylint: disable=W0612
if not self.check_progress(req_id, eta=20):
return None
self.logger.info('Changing subnet name to ' + self.vcpecommon.network_name_to_subnet_name(network_name))
self.vcpecommon.set_network_name(network_name)
subnet_name_changed = False
- for i in range(20):
+ for i in range(20): # pylint: disable=W0612
time.sleep(3)
if self.vcpecommon.set_subnet_name(network_name):
subnet_name_changed = True
@@ -342,11 +344,11 @@ class SoUtils:
preloader = preload.Preload(self.vcpecommon)
if self.vcpecommon.gra_api_flag:
- preloader.preload_vfmodule(vnf_template_file, svc_instance_id, parser.vnf_models[0], parser.vfmodule_models[0],
- preload_dict, name_suffix, True)
+ preloader.preload_vfmodule(vnf_template_file, svc_instance_id, parser.vnf_models[0], parser.vfmodule_models[0],
+ preload_dict, name_suffix, True)
else:
- preloader.preload_vfmodule(vnf_template_file, svc_instance_id, parser.vnf_models[0], parser.vfmodule_models[0],
- preload_dict, name_suffix, False)
+ preloader.preload_vfmodule(vnf_template_file, svc_instance_id, parser.vnf_models[0], parser.vfmodule_models[0],
+ preload_dict, name_suffix, False)
# create VF Module
if len(parser.vfmodule_models) == 1:
if not vnf_instance_id or not vnf_model:
@@ -361,7 +363,7 @@ class SoUtils:
req = self.generate_vfmodule_request(vfmodule_instance_name, model, svc_instance_id, parser.svc_model,
vnf_instance_id, vnf_model)
self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
- req_id, vfmodule_instance_id = self.submit_create_req(req, 'vfmodule', svc_instance_id, vnf_instance_id)
+ req_id, vfmodule_instance_id = self.submit_create_req(req, 'vfmodule', svc_instance_id, vnf_instance_id) # pylint: disable=W0612
if not self.check_progress(req_id, eta=70, interval=50):
self.logger.error('Failed to create VF Module {0}.'.format(vfmodule_instance_name))
return None
diff --git a/test/vcpe/tests/test_imports.py b/test/vcpe/tests/test_imports.py
new file mode 100644
index 000000000..63e551fa3
--- /dev/null
+++ b/test/vcpe/tests/test_imports.py
@@ -0,0 +1,17 @@
+import sys
+sys.path.append('./')
+
+# pylint: disable=W0611
+import vcpecommon
+import config_sdnc_so
+import csar_parser
+import preload
+import sdcutils
+import soutils
+import vcpe_custom_service
+
+# This will test whether all modules that vcpe scripts leverage
+# are included in setuptools configuration
+
+def test_imports():
+ pass
diff --git a/test/vcpe/tox.ini b/test/vcpe/tox.ini
new file mode 100644
index 000000000..bee3a65eb
--- /dev/null
+++ b/test/vcpe/tox.ini
@@ -0,0 +1,15 @@
+# tox (https://tox.readthedocs.io/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install --no-cache-dir tox"
+# and then run "tox" from this directory.
+
+[tox]
+envlist = pytest
+
+[testenv]
+deps = pytest
+
+[testenv:pytest]
+basepython = python3.8
+commands =
+ pytest
diff --git a/test/vcpe/vcpe.py b/test/vcpe/vcpe.py
index c768aa84d..e6038e719 100755
--- a/test/vcpe/vcpe.py
+++ b/test/vcpe/vcpe.py
@@ -4,7 +4,7 @@ import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(name)s.%(funcName)s(): %(message)s')
import sys
-from vcpecommon import *
+from vcpecommon import * # pylint: disable=W0614
import sdcutils
import soutils
from datetime import datetime
@@ -13,7 +13,15 @@ import vcpe_custom_service
import csar_parser
import config_sdnc_so
import json
+import urllib3
+import argparse
+from collections import OrderedDict
+# disable InsecureRequestWarning warning in requests < 2.16.0
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+# disable InsecureRequestWarning warning in requests >= 2.16.0
+from requests.packages.urllib3.exceptions import InsecureRequestWarning
+requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def config_sniro(vcpecommon, vgmux_svc_instance_uuid, vbrg_svc_instance_uuid):
logger = logging.getLogger(__name__)
@@ -60,22 +68,30 @@ def create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict, s
def deploy_brg_only():
- logger = logging.getLogger(__name__)
+ logger = logging.getLogger(__name__) # pylint: disable=W0612
- vcpecommon = VcpeCommon()
+ vcpecommon = VcpeCommon(cfg_file=args.config)
preload_dict = vcpecommon.load_preload_data()
# name_suffix = preload_dict['${brg_bng_net}'].split('_')[-1]
name_suffix = datetime.now().strftime('%Y%m%d%H%M')
# create multiple services based on the pre-determined order
svc_instance_uuid = vcpecommon.load_object(vcpecommon.svc_instance_uuid_file)
- for keyword in ['brg']:
+ for keyword in ['brgemu']:
+ keyword_vnf=keyword + "_"
+ keyword_gra=keyword + "gra_"
heatbridge = 'gmux' == keyword
csar_file = vcpecommon.find_file(keyword, 'csar', 'csar')
- vnf_template_file = vcpecommon.find_file(keyword, 'json', 'preload_templates')
- vcpecommon.increase_ip_address_or_vni_in_template(vnf_template_file, ['vbrgemu_private_ip_0'])
- svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
- name_suffix, heatbridge)
+ vnf_template_file = vcpecommon.find_file(keyword_vnf, 'json', 'preload_templates')
+ gra_template_file = vcpecommon.find_file(keyword_gra, 'json', 'preload_templates')
+ if vcpecommon.gra_api_flag:
+ vcpecommon.increase_ip_address_or_vni_in_template(gra_template_file, ['vbrgemu_private_ip_0'])
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, gra_template_file, preload_dict,
+ name_suffix, heatbridge)
+ else:
+ vcpecommon.increase_ip_address_or_vni_in_template(vnf_template_file, ['vbrgemu_private_ip_0'])
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
+ name_suffix, heatbridge)
if not svc_instance_uuid[keyword]:
sys.exit(1)
@@ -86,7 +102,7 @@ def deploy_brg_only():
def deploy_infra():
logger = logging.getLogger(__name__)
- vcpecommon = VcpeCommon()
+ vcpecommon = VcpeCommon(cfg_file=args.config)
# preload all VNF-API networks
network_template = vcpecommon.find_file('network.', 'json', 'preload_templates')
@@ -121,11 +137,11 @@ def deploy_infra():
vnf_template_file = vcpecommon.find_file(keyword_vnf, 'json', 'preload_templates')
gra_template_file = vcpecommon.find_file(keyword_gra, 'json', 'preload_templates')
if vcpecommon.gra_api_flag:
- svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, gra_template_file, preload_dict,
- name_suffix, heatbridge)
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, gra_template_file, preload_dict,
+ name_suffix, heatbridge)
else:
- svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
- name_suffix, heatbridge)
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
+ name_suffix, heatbridge)
if not svc_instance_uuid[keyword]:
sys.exit(1)
@@ -153,12 +169,12 @@ def deploy_infra():
def deploy_custom_service():
nodes = ['brg', 'mux']
- vcpecommon = VcpeCommon(nodes)
+ vcpecommon = VcpeCommon(nodes, cfg_file=args.config)
custom_service = vcpe_custom_service.CustomService(vcpecommon)
# clean up
host_dic = {k: vcpecommon.hosts[k] for k in nodes}
- if False:
+ if False: # pylint: disable=W0125
if not vcpecommon.delete_vxlan_interfaces(host_dic):
sys.exit(1)
custom_service.del_all_vgw_stacks(vcpecommon.vgw_name_keyword)
@@ -176,7 +192,7 @@ def deploy_custom_service():
def closed_loop(lossrate=0):
nodes = ['brg', 'mux']
logger = logging.getLogger('__name__')
- vcpecommon = VcpeCommon(nodes)
+ vcpecommon = VcpeCommon(nodes, cfg_file=args.config)
logger.info('Setting up closed loop policy')
policy_template_file = vcpecommon.find_file('operational.vcpe', 'json', 'preload_templates')
@@ -199,9 +215,9 @@ def closed_loop(lossrate=0):
def init_so_sdnc():
- logger = logging.getLogger('__name__')
- vcpecommon = VcpeCommon()
- #config_sdnc_so.insert_sdnc_ip_pool(vcpecommon)
+ logger = logging.getLogger('__name__') # pylint: disable=W0612
+ vcpecommon = VcpeCommon(cfg_file=args.config)
+ config_sdnc_so.insert_sdnc_ip_pool(vcpecommon)
config_sdnc_so.insert_customer_service_to_so(vcpecommon)
#config_sdnc_so.insert_customer_service_to_sdnc(vcpecommon)
vgw_vfmod_name_index= 0
@@ -209,13 +225,16 @@ def init_so_sdnc():
def init():
- vcpecommon = VcpeCommon()
+ vcpecommon = VcpeCommon(cfg_file=args.config)
init_sdc(vcpecommon)
download_vcpe_service_templates(vcpecommon)
+ preloader = preload.Preload(vcpecommon)
+ template_aai_region_data = vcpecommon.find_file('aai_region_data', 'json', 'preload_templates')
+ preloader.preload_aai_data(template_aai_region_data)
def init_sdc(vcpecommon):
- sdc = sdcutils.SdcUtils(vcpecommon)
+ sdc = sdcutils.SdcUtils(vcpecommon) # pylint: disable=W0612
# default SDC creates BRG - remove this in frankfurt
#sdc.create_allotted_resource_subcategory('BRG')
@@ -226,54 +245,82 @@ def download_vcpe_service_templates(vcpecommon):
def tmp_sniro():
- logger = logging.getLogger(__name__)
+ logger = logging.getLogger(__name__) # pylint: disable=W0612
- vcpecommon = VcpeCommon()
+ vcpecommon = VcpeCommon(cfg_file=args.config)
svc_instance_uuid = vcpecommon.load_object(vcpecommon.svc_instance_uuid_file)
# Setting up SNIRO
config_sniro(vcpecommon, svc_instance_uuid['gmux'], svc_instance_uuid['brgemu'])
-def test():
- vcpecommon = VcpeCommon()
+def test():
+ vcpecommon = VcpeCommon(cfg_file=args.config)
print("oom-k8s-04 public ip: %s" % (vcpecommon.get_vm_public_ip_by_nova('oom-k8s-04')))
-if __name__ == '__main__':
- print('----------------------------------------------------------------------------------------------------')
- print(' vcpe.py: Brief info about this program')
-# print(' vcpe.py sdc: Onboard VNFs, design and distribute vCPE services (under development)')
- print(' vcpe.py init: Add customer service data to SDNC and SO DBs.')
- print(' vcpe.py infra: Deploy infrastructure, including DHCP, AAA, DNS, Web Server, vBNG, vGMUX, vBRG.')
- print(' vcpe.py brg: Deploy brg only (for testing after infra succeeds).')
- print(' vcpe.py customer: Deploy customer service, including vGW and VxLANs')
- print(' vcpe.py loop: Test closed loop control (packet loss set to 22)')
- print(' vcpe.py noloss: Set vGMUX packet loss to 0')
- print('----------------------------------------------------------------------------------------------------')
+def get_arg_parser(modes):
+ """
+ Parse cmd line options and return ArgumentParser object
+ :param modes: map of supported script modes
+ :return: ArgumentParser object
+ """
+ # Build usage synopsis string
+ usage = "\n"*2
+ for k,v in modes.items():
+ usage += 'vcpe.py {0:12} {1}\n'.format(k + ':',v)
- if len(sys.argv) != 2:
- sys.exit()
+ parser = argparse.ArgumentParser(usage=usage, formatter_class=
+ argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('mode',metavar='MODE',
+ help='Script mode: {0}'.format('|'.join(modes.keys())),
+ choices=modes.keys())
+ parser.add_argument('--config',help='Configuration file path',default=None)
- if sys.argv[1] == 'sdc':
- print('Under development')
- elif sys.argv[1] == 'init':
- init()
- init_so_sdnc()
- elif sys.argv[1] == 'infra':
+ return parser
+
+if __name__ == '__main__':
+ # Supported modes matrix
+ # OrderedDict object has to be used to preserve desired modes
+ # order in synopsis text
+ modes = OrderedDict()
+# modes["sdc"] = "Onboard VNFs, design and distribute vCPE services (under development)"
+ modes["init"] = "Add customer service data to SDNC and SO DBs"
+ modes["infra"] = "Deploy infrastructure, including DHCP, AAA, DNS, Web Server, vBNG, vGMUX, vBRG"
+ modes["brg"] = "Deploy brg only (for testing after infra succeeds)"
+ modes["customer"] = "Deploy customer service, including vGW and VxLANs"
+ modes["loop"] = "Test closed loop control (packet loss set to 22)"
+ modes["noloss"] = "Set vGMUX packet loss to 0"
+ modes["test"] = ""
+ modes["sniro"] = "Config SNIRO homing emulator"
+
+ parser = get_arg_parser(modes)
+
+ try:
+ assert len(sys.argv) != 1
+ except AssertionError:
+ # No cmd line opts given, print help
+ parser.print_help()
+ sys.exit(1)
+ else:
+ args = parser.parse_args()
+
+ if args.mode == 'init':
+ init()
+ init_so_sdnc()
+ elif args.mode == 'infra':
#if 'y' == raw_input('Ready to deploy infrastructure? y/n: ').lower():
- deploy_infra()
- elif sys.argv[1] == 'customer':
- if 'y' == raw_input('Ready to deploy customer service? y/n: ').lower():
+ deploy_infra()
+ elif args.mode == 'customer':
+ if 'y' == raw_input('Ready to deploy customer service? y/n: ').lower(): # pylint: disable=E0602
deploy_custom_service()
- elif sys.argv[1] == 'loop':
+ elif args.mode == 'loop':
closed_loop(22)
- elif sys.argv[1] == 'noloss':
+ elif args.mode == 'noloss':
closed_loop(0)
- elif sys.argv[1] == 'brg':
+ elif args.mode == 'brg':
deploy_brg_only()
- elif sys.argv[1] == 'sniro':
+ elif args.mode == 'sniro':
tmp_sniro()
- elif sys.argv[1] == 'test':
+ elif args.mode == 'test':
test()
-
diff --git a/test/vcpe/vcpe_custom_service.py b/test/vcpe/vcpe_custom_service.py
index e2681fd11..973cd2723 100755
--- a/test/vcpe/vcpe_custom_service.py
+++ b/test/vcpe/vcpe_custom_service.py
@@ -2,8 +2,7 @@
import os
import requests
-import time
-from vcpecommon import *
+from vcpecommon import * # pylint: disable=W0614
from datetime import datetime
import soutils
import logging
@@ -68,10 +67,7 @@ class CustomService:
def create_custom_service(self, csar_file, vgw_template_file, vgw_gra_template_file, preload_dict=None):
name_suffix = datetime.now().strftime('%Y%m%d%H%M')
- if self.vcpecommon.oom_mode:
- brg_mac = str(raw_input("Enter the BRG MAC address: "))
- else:
- brg_mac = self.vcpecommon.get_brg_mac_from_sdnc()
+ brg_mac = self.vcpecommon.get_brg_mac_from_sdnc()
brg_mac_enc = brg_mac.replace(':', '-')
# get name index
self.vgw_vfmod_name_index= self.vcpecommon.load_object(self.vcpecommon.vgw_vfmod_name_index_file)
diff --git a/test/vcpe/vcpecommon.py b/test/vcpe/vcpecommon.py
index 371029e19..0e02987ff 100755
--- a/test/vcpe/vcpecommon.py
+++ b/test/vcpe/vcpecommon.py
@@ -10,98 +10,28 @@ import sys
import ipaddress
import mysql.connector
import requests
-import commands
+import subprocess
import time
+import yaml
from novaclient import client as openstackclient
+from openstack.config import loader
from kubernetes import client, config
from netaddr import IPAddress, IPNetwork
-######################################################################
-# Parts which must be updated / cross-checked during each deployment #
-# are marked as CHANGEME #
-######################################################################
-
class VcpeCommon:
- #############################################################################################
- # Set network prefix of k8s host external address; it's used for pod public IP autodetection
- # but can be overriden from user in case of autodetection failure
- external_net_addr = '10.12.0.0'
- external_net_prefix_len = 16
-
- #############################################################################################
- # set the openstack cloud access credentials here
- oom_mode = True
-
- #############################################################################################
- # set the gra_api flag
- #gra_api_flag= False
- gra_api_flag= True
-
- ###########################
- # set Openstack credentials
- # CHANGEME part
- cloud = {
- '--os-auth-url': 'http://10.12.25.2:5000',
- '--os-username': 'kxi',
- '--os-user-domain-id': 'default',
- '--os-project-domain-id': 'default',
- '--os-tenant-id': '712b6016580e410b9abfec9ca34953ce' if oom_mode else '1e097c6713e74fd7ac8e4295e605ee1e',
- '--os-region-name': 'RegionOne',
- '--os-password': 'n3JhGMGuDzD8',
- '--os-project-domain-name': 'Integration-Release-Daily' if oom_mode else 'Integration-SB-07',
- '--os-identity-api-version': '3'
- }
-
- ############################################################################
- # set oam and public network which must exist in openstack before deployment
- # CHANGEME part
- common_preload_config = {
- 'oam_onap_net': 'oam_network_exxC' if oom_mode else 'oam_onap_lAky',
- 'oam_onap_subnet': 'oam_network_exxC' if oom_mode else 'oam_onap_lAky',
- 'public_net': 'external',
- 'public_net_id': '971040b2-7059-49dc-b220-4fab50cb2ad4'
- }
-
- #############################################################################
- # Set name of Onap's k8s namespace and sdnc controller pod
- # CHANGEME part
- onap_namespace = 'onap'
- onap_environment = 'dev'
- sdnc_controller_pod = '-'.join([onap_environment, 'sdnc-sdnc-0'])
-
- template_variable_symbol = '${'
- cpe_vm_prefix = 'zdcpe'
-
- #############################################################################################
- # preloading network config
- # key=network role
- # value = [subnet_start_ip, subnet_gateway_ip]
- preload_network_config = {
- 'cpe_public': ['10.2.0.2', '10.2.0.1'],
- 'cpe_signal': ['10.4.0.2', '10.4.0.1'],
- 'brg_bng': ['10.3.0.2', '10.3.0.1'],
- 'bng_mux': ['10.1.0.10', '10.1.0.1'],
- 'mux_gw': ['10.5.0.10', '10.5.0.1']
- }
-
- dcae_ves_collector_name = 'dcae-bootstrap'
- global_subscriber_id = 'SDN-ETHERNET-INTERNET'
- project_name = 'Project-Demonstration'
- owning_entity_id = '520cc603-a3c4-4ec2-9ef4-ca70facd79c0'
- owning_entity_name = 'OE-Demonstration1'
-
- def __init__(self, extra_host_names=None):
+
+ def __init__(self, extra_host_names=None, cfg_file=None):
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
self.logger.info('Initializing configuration')
+ self.default_config = 'vcpeconfig.yaml'
- ##################################################################################################################################
- # following param must be updated e.g. from csar file (grep for VfModuleModelInvariantUuid string) before vcpe.py customer call !!
- # vgw_VfModuleModelInvariantUuid is in rescust service csar,
- # look in service-VcpesvcRescust1118-template.yml for groups vgw module metadata. TODO: read this value automatically
- # CHANGEME part
- self.vgw_VfModuleModelInvariantUuid = '26d6a718-17b2-4ba8-8691-c44343b2ecd2'
+ # Read configuration from config file
+ self._load_config(cfg_file)
+ # Load OpenStack settings
+ self._load_os_config()
+ self.sdnc_controller_pod = '-'.join([self.onap_environment, 'sdnc-sdnc-0'])
# OOM: this is the address that the brg and bng will nat for sdnc access - 10.0.0.x address of k8 host for sdnc-0 container
self.sdnc_oam_ip = self.get_pod_node_oam_ip(self.sdnc_controller_pod)
# OOM: this is a k8s host external IP, e.g. oom-k8s-01 IP
@@ -116,7 +46,7 @@ class VcpeCommon:
self.aai_query_port = '30233' if self.oom_mode else '8443'
self.sniro_port = '30288' if self.oom_mode else '8080'
- self.host_names = ['sdc', 'so', 'sdnc', 'robot', 'aai-inst1', self.dcae_ves_collector_name]
+ self.host_names = ['sdc', 'so', 'sdnc', 'robot', 'aai-inst1', self.dcae_ves_collector_name, 'mariadb-galera']
if extra_host_names:
self.host_names.extend(extra_host_names)
# get IP addresses
@@ -137,12 +67,6 @@ class VcpeCommon:
'vfmodule': 'vcpe_vfmodule'
}
self.aai_userpass = 'AAI', 'AAI'
-
- ############################################################################################################
- # following key is overriding public key from vCPE heat templates, it's important to use correct one in here
- # CHANGEME part
- self.pub_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh'
-
self.os_tenant_id = self.cloud['--os-tenant-id']
self.os_region_name = self.cloud['--os-region-name']
self.common_preload_config['pub_key'] = self.pub_key
@@ -177,7 +101,7 @@ class VcpeCommon:
self.sdnc_db_name = 'sdnctl'
self.sdnc_db_user = 'sdnctl'
self.sdnc_db_pass = 'gamma'
- self.sdnc_db_port = '32774'
+ self.sdnc_db_port = self.get_k8s_service_endpoint_info('mariadb-galera','port') if self.oom_mode else '3306'
self.sdnc_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
self.sdnc_preload_network_url = 'https://' + self.hosts['sdnc'] + \
':' + self.sdnc_preloading_port + '/restconf/operations/VNF-API:preload-network-topology-operation'
@@ -191,6 +115,11 @@ class VcpeCommon:
'/restconf/config/GENERIC-RESOURCE-API:'
#############################################################################################
+ # MARIADB-GALERA settings
+ self.mariadb_galera_endpoint_ip = self.get_k8s_service_endpoint_info('mariadb-galera','ip')
+ self.mariadb_galera_endpoint_port = self.get_k8s_service_endpoint_info('mariadb-galera','port')
+
+ #############################################################################################
# SO urls, note: do NOT add a '/' at the end of the url
self.so_req_api_url = {'v4': 'http://' + self.hosts['so'] + ':' + self.so_nbi_port + '/onap/so/infra/serviceInstantiation/v7/serviceInstances',
'v5': 'http://' + self.hosts['so'] + ':' + self.so_nbi_port + '/onap/so/infra/serviceInstantiation/v7/serviceInstances'}
@@ -200,7 +129,8 @@ class VcpeCommon:
self.so_db_name = 'catalogdb'
self.so_db_user = 'root'
self.so_db_pass = 'secretpassword'
- self.so_db_port = '30252' if self.oom_mode else '32769'
+ self.so_db_host = self.mariadb_galera_endpoint_ip if self.oom_mode else self.hosts['so']
+ self.so_db_port = self.mariadb_galera_endpoint_port if self.oom_mode else '3306'
self.vpp_inf_url = 'http://{0}:8183/restconf/config/ietf-interfaces:interfaces'
self.vpp_api_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
@@ -219,9 +149,71 @@ class VcpeCommon:
self.policy_pap_service_name = 'policy-pap'
#############################################################################################
- # MARIADB-GALERA settings
- self.mariadb_galera_endpoint_ip = self.get_k8s_service_endpoint_info('mariadb-galera','ip')
- self.mariadb_galera_endpoint_port = self.get_k8s_service_endpoint_info('mariadb-galera','port')
+ # AAI urls
+ self.aai_region_query_url = 'https://' + self.oom_so_sdnc_aai_ip + ':' +\
+ self.aai_query_port +\
+ '/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/' +\
+ self.cloud['--os-region-name']
+ self.aai_headers = {'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ 'X-FromAppId': 'postman', 'X-TransactionId': '9999'}
+
+ def _load_config(self, cfg_file):
+ """
+ Reads vcpe config file and injects settings as object's attributes
+ :param cfg_file: Configuration file path
+ """
+
+ if cfg_file is None:
+ cfg_file = self.default_config
+
+ try:
+ with open(cfg_file, 'r') as cfg:
+ cfg_yml = yaml.full_load(cfg)
+ except Exception as e:
+ self.logger.error('Error loading configuration: ' + str(e))
+ sys.exit(1)
+
+ self.logger.debug('\n' + yaml.dump(cfg_yml))
+
+ # Use setattr to load config file keys as VcpeCommon class' object
+ # attributes
+ try:
+ # Check config isn't empty
+ if cfg_yml is not None:
+ for cfg_key in cfg_yml:
+ setattr(self, cfg_key, cfg_yml[cfg_key])
+ except TypeError as e:
+ self.logger.error('Unable to parse config file: ' + str(e))
+ sys.exit(1)
+
+ def _load_os_config(self):
+ """
+ Reads cloud settings and sets them as object's 'cloud' attribute
+ """
+ # Create OpenStackConfig config instance
+ os_config = loader.OpenStackConfig()
+ # Try reading cloud settings for self.cloud_name
+ try:
+ os_cloud = os_config.cloud_config['clouds'][self.cloud_name]
+ except KeyError:
+ self.logger.error('Error fetching cloud settings for cloud "{0}"'
+ .format(self.cloud_name))
+ sys.exit(1)
+ self.logger.debug('Cloud config:\n {0}'.format(json.dumps(
+ os_cloud,indent=4)))
+
+ # Extract all OS settings keys and alter their names
+ # to conform to openstack cli client
+ self.cloud = {}
+ for k in os_cloud:
+ if isinstance(os_cloud[k],dict):
+ for sub_k in os_cloud[k]:
+ os_setting_name = '--os-' + sub_k.replace('_','-')
+ self.cloud[os_setting_name] = os_cloud[k][sub_k]
+ else:
+ os_setting_name = '--os-' + k.replace('_','-')
+ self.cloud[os_setting_name] = os_cloud[k]
def heatbridge(self, openstack_stack_name, svc_instance_uuid):
"""
@@ -230,7 +222,7 @@ class VcpeCommon:
self.logger.info('Adding vServer information to AAI for {0}'.format(openstack_stack_name))
if not self.oom_mode:
cmd = '/opt/demo.sh heatbridge {0} {1} vCPE'.format(openstack_stack_name, svc_instance_uuid)
- ret = commands.getstatusoutput("ssh -i onap_dev root@{0} '{1}'".format(self.hosts['robot'], cmd))
+ ret = subprocess.getstatusoutput("ssh -i onap_dev root@{0} '{1}'".format(self.hosts['robot'], cmd))
self.logger.debug('%s', ret)
else:
print('To add vGMUX vserver info to AAI, do the following:')
@@ -244,8 +236,16 @@ class VcpeCommon:
Check table DHCP_MAP in the SDNC DB. Find the newly instantiated BRG MAC address.
Note that there might be multiple BRGs, the most recently instantiated BRG always has the largest IP address.
"""
- cnx = mysql.connector.connect(user=self.sdnc_db_user, password=self.sdnc_db_pass, database=self.sdnc_db_name,
- host=self.hosts['sdnc'], port=self.sdnc_db_port)
+ if self.oom_mode:
+ db_host=self.mariadb_galera_endpoint_ip
+ else:
+ db_host=self.hosts['mariadb-galera']
+
+ cnx = mysql.connector.connect(user=self.sdnc_db_user,
+ password=self.sdnc_db_pass,
+ database=self.sdnc_db_name,
+ host=db_host,
+ port=self.sdnc_db_port)
cursor = cnx.cursor()
query = "SELECT * from DHCP_MAP"
cursor.execute(query)
@@ -254,7 +254,7 @@ class VcpeCommon:
mac_recent = None
host = -1
for mac, ip in cursor:
- self.logger.debug(mac + ':' + ip)
+ self.logger.debug(mac + ' - ' + ip)
this_host = int(ip.split('.')[-1])
if host < this_host:
host = this_host
@@ -262,7 +262,12 @@ class VcpeCommon:
cnx.close()
- assert mac_recent
+ try:
+ assert mac_recent
+ except AssertionError:
+ self.logger.error('Failed to obtain BRG MAC address from database')
+ sys.exit(1)
+
return mac_recent
def execute_cmds_mariadb(self, cmds):
@@ -276,7 +281,7 @@ class VcpeCommon:
def execute_cmds_so_db(self, cmds):
self.execute_cmds_db(cmds, self.so_db_user, self.so_db_pass, self.so_db_name,
- self.hosts['so'], self.so_db_port)
+ self.so_db_host, self.so_db_port)
def execute_cmds_db(self, cmds, dbuser, dbpass, dbname, host, port):
cnx = mysql.connector.connect(user=dbuser, password=dbpass, database=dbname, host=host, port=port)
@@ -369,7 +374,6 @@ class VcpeCommon:
sys.exit(1)
# Check policy already applied
- requests.packages.urllib3.disable_warnings()
policy_exists_req = requests.get(self.policy_pap_get_url.format(
p_pap_cluster_ip), auth=self.policy_userpass,
verify=False, headers=self.policy_headers)
@@ -440,7 +444,6 @@ class VcpeCommon:
self.hosts['aai-inst1'], self.aai_query_port, search_node_type, key, node_uuid)
headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-FromAppID': 'vCPE-Robot', 'X-TransactionId': 'get_aai_subscr'}
- requests.packages.urllib3.disable_warnings()
r = requests.get(url, headers=headers, auth=self.aai_userpass, verify=False)
response = r.json()
self.logger.debug('aai query: ' + url)
@@ -455,17 +458,17 @@ class VcpeCommon:
:param sz: a string
:return: the first IP address matching the network, e.g. 10.5.12.3
"""
- network = ipaddress.ip_network(unicode('{0}/{1}'.format(net_addr, net_addr_len)), strict=False)
+ network = ipaddress.ip_network(unicode('{0}/{1}'.format(net_addr, net_addr_len)), strict=False) # pylint: disable=E0602
ip_list = re.findall(r'[0-9]+(?:\.[0-9]+){3}', sz)
for ip in ip_list:
- this_net = ipaddress.ip_network(unicode('{0}/{1}'.format(ip, net_addr_len)), strict=False)
+ this_net = ipaddress.ip_network(unicode('{0}/{1}'.format(ip, net_addr_len)), strict=False) # pylint: disable=E0602
if this_net == network:
return str(ip)
return None
def get_pod_node_oam_ip(self, pod):
"""
- :Assuming kubectl is available and configured by default config (~/.kube/config)
+ :Assuming kubectl is available and configured by default config (~/.kube/config)
:param pod: pod name substring, e.g. 'sdnc-sdnc-0'
:return pod's cluster node oam ip (10.0.0.0/16)
"""
@@ -482,12 +485,12 @@ class VcpeCommon:
break
if ret is None:
- ret = raw_input("Enter " + self.sdnc_controller_pod + " pod cluster node OAM IP address(10.0.0.0/16): ")
+ ret = raw_input("Enter " + self.sdnc_controller_pod + " pod cluster node OAM IP address(10.0.0.0/16): ") # pylint: disable=E0602
return ret
def get_pod_node_public_ip(self, pod):
"""
- :Assuming kubectl is available and configured by default config (~/.kube/config)
+ :Assuming kubectl is available and configured by default config (~/.kube/config)
:param pod: pod name substring, e.g. 'sdnc-sdnc-0'
:return pod's cluster node public ip (i.e. 10.12.0.0/16)
"""
@@ -504,7 +507,7 @@ class VcpeCommon:
break
if ret is None:
- ret = raw_input("Enter " + self.sdnc_controller_pod + " pod cluster node public IP address(i.e. " + self.external_net_addr + "): ")
+ ret = raw_input("Enter " + self.sdnc_controller_pod + " pod cluster node public IP address(i.e. " + self.external_net_addr + "): ") # pylint: disable=E0602
return ret
def get_vm_public_ip_by_nova(self, vm):
@@ -514,10 +517,10 @@ class VcpeCommon:
:return vm public ip
"""
subnet = IPNetwork('{0}/{1}'.format(self.external_net_addr, self.external_net_prefix_len))
- nova = openstackclient.Client(2, self.cloud['--os-username'], self.cloud['--os-password'], self.cloud['--os-tenant-id'], self.cloud['--os-auth-url'])
+ nova = openstackclient.Client(2, self.cloud['--os-username'], self.cloud['--os-password'], self.cloud['--os-tenant-id'], self.cloud['--os-auth-url'])
for i in nova.servers.list():
if i.name == vm:
- for k, v in i.networks.items():
+ for k, v in i.networks.items(): # pylint: disable=W0612
for ip in v:
if IPAddress(ip) in subnet:
return ip
@@ -714,7 +717,7 @@ class VcpeCommon:
url = self.vpp_inf_url.format(ip) + '/interface/' + inf
requests.delete(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
- if len(self.get_vxlan_interfaces(ip)) > 0:
+ if self.get_vxlan_interfaces(ip):
self.logger.error("Error deleting VxLAN from {0}, try to restart the VM, IP is {1}.".format(host, ip))
return False
@@ -763,4 +766,3 @@ class VcpeCommon:
def load_vgmux_vnf_name(self):
return self.load_object(self.vgmux_vnf_name_file)
-
diff --git a/test/vcpe/vcpeconfig-oom_disabled.yaml b/test/vcpe/vcpeconfig-oom_disabled.yaml
new file mode 100644
index 000000000..0c2fd7ea9
--- /dev/null
+++ b/test/vcpe/vcpeconfig-oom_disabled.yaml
@@ -0,0 +1,65 @@
+#############################################################################################
+# Set network prefix of k8s host external address; it's used for pod public IP autodetection
+# but can be overriden from user in case of autodetection failure
+external_net_addr: '10.12.0.0'
+external_net_prefix_len: 16
+
+#############################################################################################
+# set the openstack cloud access credentials here
+oom_mode: False
+
+#############################################################################################
+# set the gra_api flag
+# Mustn't be set to True until Frankfurt DGs are updated for GRA-API infrastructure
+gra_api_flag: False
+
+###########################
+# set Openstack cloud name
+cloud_name: 'integration-sb-07'
+
+############################################################################
+# set oam and public network which must exist in openstack before deployment
+common_preload_config:
+ 'oam_onap_net': 'oam_onap_lAky'
+ 'oam_onap_subnet': 'oam_onap_lAky'
+ 'public_net': 'external'
+ 'public_net_id': '971040b2-7059-49dc-b220-4fab50cb2ad4'
+
+#############################################################################
+# Set name of Onap's k8s namespace and sdnc controller pod
+onap_namespace: 'onap'
+onap_environment: 'dev'
+
+template_variable_symbol: '${'
+cpe_vm_prefix: 'zdcpe'
+
+#############################################################################################
+# preloading network config
+# key=network role
+# value = [subnet_start_ip, subnet_gateway_ip]
+preload_network_config:
+ 'cpe_public':
+ - '10.2.0.2'
+ - '10.2.0.1'
+ 'cpe_signal':
+ - '10.4.0.2'
+ - '10.4.0.1'
+ 'brg_bng':
+ - '10.3.0.2'
+ - '10.3.0.1'
+ 'bng_mux':
+ - '10.1.0.10'
+ - '10.1.0.1'
+ 'mux_gw':
+ - '10.5.0.10'
+ - '10.5.0.1'
+
+dcae_ves_collector_name: 'dcae-bootstrap'
+global_subscriber_id: 'SDN-ETHERNET-INTERNET'
+project_name: 'Project-Demonstration'
+owning_entity_id: '520cc603-a3c4-4ec2-9ef4-ca70facd79c0'
+owning_entity_name: 'OE-Demonstration1'
+
+############################################################################################################
+# following key is overriding public key from vCPE heat templates, it's important to use correct one in here
+pub_key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh'
diff --git a/test/vcpe/vcpeconfig.yaml b/test/vcpe/vcpeconfig.yaml
new file mode 100644
index 000000000..36af1b684
--- /dev/null
+++ b/test/vcpe/vcpeconfig.yaml
@@ -0,0 +1,65 @@
+#############################################################################################
+# Set network prefix of k8s host external address; it's used for pod public IP autodetection
+# but can be overriden from user in case of autodetection failure
+external_net_addr: '10.12.0.0'
+external_net_prefix_len: 16
+
+#############################################################################################
+# set the openstack cloud access credentials here
+oom_mode: True
+
+#############################################################################################
+# set the gra_api flag
+# Mustn't be set to True until Frankfurt DGs are updated for GRA-API infrastructure
+gra_api_flag: False
+
+###########################
+# set Openstack cloud name
+cloud_name: 'integration-release-daily'
+
+############################################################################
+# set oam and public network which must exist in openstack before deployment
+common_preload_config:
+ 'oam_onap_net': 'oam_network_exxC'
+ 'oam_onap_subnet': 'oam_network_exxC'
+ 'public_net': 'external'
+ 'public_net_id': '971040b2-7059-49dc-b220-4fab50cb2ad4'
+
+#############################################################################
+# Set name of Onap's k8s namespace and sdnc controller pod
+onap_namespace: 'onap'
+onap_environment: 'dev'
+
+template_variable_symbol: '${'
+cpe_vm_prefix: 'zdcpe'
+
+#############################################################################################
+# preloading network config
+# key=network role
+# value = [subnet_start_ip, subnet_gateway_ip]
+preload_network_config:
+ 'cpe_public':
+ - '10.2.0.2'
+ - '10.2.0.1'
+ 'cpe_signal':
+ - '10.4.0.2'
+ - '10.4.0.1'
+ 'brg_bng':
+ - '10.3.0.2'
+ - '10.3.0.1'
+ 'bng_mux':
+ - '10.1.0.10'
+ - '10.1.0.1'
+ 'mux_gw':
+ - '10.5.0.10'
+ - '10.5.0.1'
+
+dcae_ves_collector_name: 'dcae-bootstrap'
+global_subscriber_id: 'SDN-ETHERNET-INTERNET'
+project_name: 'Project-Demonstration'
+owning_entity_id: '520cc603-a3c4-4ec2-9ef4-ca70facd79c0'
+owning_entity_name: 'OE-Demonstration1'
+
+############################################################################################################
+# following key is overriding public key from vCPE heat templates, it's important to use correct one in here
+pub_key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh'
diff --git a/test/vcpe_tosca/local/config/vcpe_config.json b/test/vcpe_tosca/local/config/vcpe_config.json
new file mode 100644
index 000000000..605f4e291
--- /dev/null
+++ b/test/vcpe_tosca/local/config/vcpe_config.json
@@ -0,0 +1,94 @@
+{
+ "open_cli_product": "onap-dublin",
+ "open_cli_home": "/opt/oclip",
+ "msb_url": "https://192.168.235.77:30283",
+ "aai_url": "https://192.168.235.77:30283",
+ "aai_username": "AAI",
+ "aai_password": "AAI",
+ "multicloud_url": "https://192.168.235.77:30283",
+ "complex_name": "clli_liping",
+ "street1": "street1",
+ "street2": "street2",
+ "physical_location": "phy_type",
+ "data_center_code": "code1",
+ "latitude": "32.89948",
+ "longitude": "97.045443",
+ "lata": "example-lata-val-28399",
+ "elevation": "example-elevation-val-28399",
+ "region": "northwest",
+ "state": "oregon",
+ "city": "hillsboro",
+ "postal-code": "00000",
+ "country": "USA",
+ "identity_url": "example-identity-url-val-56898",
+ "cloud_region_data": {
+ "RegionOne": {
+ "cloud-region-version": "titanium_cloud",
+ "esr-system-info-id": "1111ce1f-aa78-4ebf-8d6f-4b62773e9b01",
+ "service-url": "http://192.168.235.2:5000/v3",
+ "user-name": "vcpe_case",
+ "password": "vCPE@2019",
+ "system-type": "VIM",
+ "ssl-insecure": true,
+ "cloud-domain": "Default",
+ "default-tenant": "vcpe",
+ "cloud-type": "openstack",
+ "identity-url": "http://192.168.235.2:5000/v3",
+ "system-status": "active"
+ }
+ },
+ "cloud-owner": "vCPE009",
+ "owner-defined-type": "t1",
+ "cloud-zone": "CloudZone",
+ "service_name": "vCPE_liping",
+ "customer_name": "cust1_liping",
+ "subscriber_name": "cust1_liping",
+ "vfc-url": "https://192.168.235.77:30283",
+ "vnfs": {
+ "vgw": {
+ "path": "/csar/vgw.csar",
+ "key": "key2",
+ "value": "value2"
+ },
+ "infra": {
+ "path": "/csar/infra.csar",
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbng": {
+ "path": "/csar/vbng.csar",
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbrgemu": {
+ "path": "/csar/vbrgemu.csar",
+ "key": "key2",
+ "value": "value2"
+ },
+ "vgmux": {
+ "path": "/csar/vgmux.csar",
+ "key": "key2",
+ "value": "value2"
+ }
+ },
+ "ns": {
+ "key": "key1",
+ "value": "value1",
+ "path": "/csar/ns.csar",
+ "name": "vcpe11"
+ },
+ "location": "vCPE009_RegionOne",
+ "vnfm_params": {
+ "GVNFMDRIVER": {
+ "type": "gvnfmdriver",
+ "vendor": "vfc",
+ "version": "v1.0",
+ "url": "https://192.168.235.77:30283/",
+ "vim-id": "vCPE009_RegionOne",
+ "user-name": "admin",
+ "user-password": "admin",
+ "vnfm-version": "v1.0"
+ }
+ },
+ "sdc-controller-id": "2"
+}
diff --git a/test/vcpe_tosca/local/csar/infra.csar b/test/vcpe_tosca/local/csar/infra.csar
new file mode 100644
index 000000000..fe4d02829
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/infra.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/csar/ns.csar b/test/vcpe_tosca/local/csar/ns.csar
new file mode 100644
index 000000000..bf9fdc962
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/ns.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/csar/vbng.csar b/test/vcpe_tosca/local/csar/vbng.csar
new file mode 100644
index 000000000..7167cb8e9
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/vbng.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/csar/vbrgemu.csar b/test/vcpe_tosca/local/csar/vbrgemu.csar
new file mode 100644
index 000000000..ebe4b7ebc
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/vbrgemu.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/csar/vgmux.csar b/test/vcpe_tosca/local/csar/vgmux.csar
new file mode 100644
index 000000000..57bfdc150
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/vgmux.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/csar/vgw.csar b/test/vcpe_tosca/local/csar/vgw.csar
new file mode 100644
index 000000000..6c09798e6
--- /dev/null
+++ b/test/vcpe_tosca/local/csar/vgw.csar
Binary files differ
diff --git a/test/vcpe_tosca/local/scripts/install-alpine.sh b/test/vcpe_tosca/local/scripts/install-alpine.sh
new file mode 100755
index 000000000..09c17ce67
--- /dev/null
+++ b/test/vcpe_tosca/local/scripts/install-alpine.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+#*******************************************************************************
+# Copyright 2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#*******************************************************************************
+
+CLI_LATEST_BINARY="https://nexus.onap.org/service/local/artifact/maven/redirect?r=releases&g=org.onap.cli&a=cli-zip&e=zip&v=LATEST"
+CLI_INSTALL_DIR=/opt/oclip
+CLI_ZIP=CLI.zip
+CLI_BIN=/usr/bin/onap
+CLI_ZIP_DIR=/opt
+export OPEN_CLI_HOME=$CLI_INSTALL_DIR
+
+#create install dir
+if [ -d $CLI_INSTALL_DIR ]
+then
+ mv $CLI_INSTALL_DIR /opt/cli_`date +"%m-%d-%y-%H-%M-%S"`
+ rm $CLI_BIN
+fi
+
+mkdir -p $CLI_INSTALL_DIR
+cd $CLI_INSTALL_DIR
+
+#Download and unzip CLI
+apk update
+apk add wget unzip openjdk8-jre
+
+if [ ! -f $CLI_ZIP_DIR/$CLI_ZIP ]
+ then
+ wget -O $CLI_ZIP $CLI_LATEST_BINARY
+ else
+ mv $CLI_ZIP_DIR/$CLI_ZIP .
+ fi
+
+unzip $CLI_ZIP
+if [ ! -d ./data ]; then mkdir ./data; fi
+if [ ! -d ./open-cli-schema ]; then mkdir ./open-cli-schema; fi
+chmod +x ./bin/oclip.sh
+
+#Make oclip available in path
+export OPEN_CLI_HOME=/opt/oclip
+
+cd $OPEN_CLI_HOME
+
+if [ ! -d ./data ]; then mkdir ./data; fi
+if [ ! -d ./open-cli-schema ]; then mkdir ./open-cli-schema; fi
+
+chmod +x ./bin/oclip.sh
+chmod +x ./bin/oclip-rcli.sh
+chmod +x ./bin/oclip-grpc-server.sh
+
+#Make oclip available in path
+ln -sf $OPEN_CLI_HOME/bin/oclip.sh /usr/bin/oclip
+ln -sf $OPEN_CLI_HOME/bin/oclip.sh /usr/bin/onap
+ln -sf $OPEN_CLI_HOME/bin/oclip-rcli.sh /usr/bin/roclip
+ln -sf $OPEN_CLI_HOME/bin/oclip-grpc-server.sh /usr/bin/oclip-grpc
+
+#Print the version
+oclip -v
+
+onap -v
+
+cd -
+
diff --git a/test/vcpe_tosca/local/vcpe_tosca_test.py b/test/vcpe_tosca/local/vcpe_tosca_test.py
new file mode 100644
index 000000000..4b024c6e2
--- /dev/null
+++ b/test/vcpe_tosca/local/vcpe_tosca_test.py
@@ -0,0 +1,651 @@
+#!/usr/bin/python
+
+# Prerequisites for machine to run this
+# Put in required parameters in vcpe_config.json
+# Install python-pip (apt install python-pip)
+# Install requests
+# Install ONAP CLI
+# Must have connectivity to the ONAP, openstack is already configured.
+# Configuration File, the parameters will be modified according to the Lab env
+# Put in vnf and ns CSAR file under csar folder
+
+import json
+import os
+import uuid
+import requests
+import unittest
+import time
+import urllib3
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+
+class VcpeToscaTest(unittest.TestCase):
+ def setUp(self):
+ file_path = os.path.dirname(os.path.abspath(__file__))
+ with open(file_path + "/config/vcpe_config.json", encoding='utf-8') as self.config_file:
+ self.config_params = self.get_parameters()
+ self.aai_header = {
+ "Accept": "application/json",
+ "Content-Type": "application/json",
+ 'X-TransactionId': "9999",
+ 'Real-Time': "true",
+ 'X-FromAppId': "jimmy-postman",
+ "Authorization": "Basic QUFJOkFBSQ=="
+ }
+ self.base_url = self.config_params["msb_url"]
+ print("Set cli command environment--beginning")
+ os.environ["OPEN_CLI_PRODUCT_IN_USE"] = self.config_params["open_cli_product"]
+ os.environ["OPEN_CLI_HOME"] = self.config_params["open_cli_home"]
+ print("Set cli command environment--successful")
+
+ self.complex_version = None
+ self.cloud_version = None
+ self.service_type_version = None
+ self.customer_version = None
+ self.tenant_id = None
+ self.subscription_version = None
+ self.esr_vnfm_version = self.esr_vnfm_id = None
+ self.ns_instance_id = None
+ self.ns_package_id = None
+ self.vnf_package_list = []
+
+ print("Create cloud complex--beginning")
+ self.create_complex()
+ print("Create cloud complex--successful")
+
+ print("Register all clouds--beginning")
+ self.register_all_clouds()
+ print("Register all clouds--successful")
+ time.sleep(30)
+
+ print("Create vCPE service")
+ self.create_service_type()
+
+ print("Create customer")
+ self.create_customer()
+
+ print("Get tenant id")
+ self.get_tenant_id()
+
+ print("Add customer and subscription")
+ self.add_customer_subscription()
+
+ print("Register vnfm")
+ self.register_vnfm()
+
+ def tearDown(self):
+ if self.ns_instance_id:
+ self.terminateNs()
+ self.deleteNs()
+
+ if self.ns_package_id:
+ self.delete_ns_package()
+
+ if self.vnf_package_list:
+ self.delete_vnf_package()
+
+ if self.esr_vnfm_id and self.esr_vnfm_version:
+ self.unregister_vnfm()
+
+ if self.subscription_version:
+ print("Remove service subscription")
+ self.remove_customer_subscription()
+
+ if self.customer_version:
+ print("Remove customer %s" % self.config_params["customer_name"])
+ self.delete_customer()
+
+ if self.service_type_version:
+ print("Remove service type %s" % self.config_params["service_name"])
+ self.delete_service_type()
+
+ if self.cloud_version:
+ print("Remove cloud %s" % self.config_params["cloud-owner"])
+ self.delete_cloud_helper()
+
+ if self.complex_version:
+ self.get_complex_resource_version()
+ print("Remove complex %s" % self.config_params["complex_name"])
+ self.delete_complex()
+
+ def get_parameters(self):
+ parameters = json.load(self.config_file)
+ return parameters
+
+ @staticmethod
+ def get_out_helper_2(in_string):
+ out_list = ((in_string.replace('|', '')).replace('+', '')).split()
+ return out_list
+
+ def create_complex(self):
+ complex_create_string = "oclip complex-create -j {} -r {} -x {} -y {} -lt {} -l {} -i {} -lo {} \
+ -S {} -la {} -g {} -w {} -z {} -k {} -o {} -q {} -m {} -u {} -p {}".format(
+ self.config_params["street2"], self.config_params["physical_location"],
+ self.config_params["complex_name"], self.config_params["data_center_code"],
+ self.config_params["latitude"], self.config_params["region"],
+ self.config_params["street1"], self.config_params["longitude"],
+ self.config_params["state"], self.config_params["lata"],
+ self.config_params["city"], self.config_params["postal-code"],
+ self.config_params["complex_name"], self.config_params["country"],
+ self.config_params["elevation"], self.config_params["identity_url"],
+ self.config_params["aai_url"], self.config_params["aai_username"],
+ self.config_params["aai_password"])
+ os.system(complex_create_string)
+
+ self.get_complex_resource_version()
+
+ def get_complex_resource_version(self):
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+ complex_url = self.base_url + "/aai/v11/cloud-infrastructure/complexes"
+ complex_list_response = requests.get(url=complex_url, headers=self.aai_header, verify=False)
+ if complex_list_response.status_code == 200:
+ for complex in (complex_list_response.json())["complex"]:
+ if complex['physical-location-id'] == self.config_params["complex_name"]:
+ self.complex_version = complex['resource-version']
+ print("Complex %s resource-version is %s."
+ % (self.config_params["complex_name"], self.complex_version))
+
+ def delete_complex(self):
+ complex_delete_string = 'oclip complex-delete -x {} -y {} -m {} -u {} -p {}'.format(
+ self.config_params["complex_name"], self.complex_version, self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(complex_delete_string)
+ print("Delete complex--successful")
+ self.complex_version = None
+
+ def register_cloud_helper(self, cloud_region, values):
+ print("Create Cloud--beginning")
+ cloud_create_string = 'oclip cloud-create -e {} -b {} ' \
+ '-x {} -y {} -j {} -w {} -l {} -url {} -n {} -q {} -r {} -Q {} -i {} -g {} \
+ -z {} -k {} -c {} -m {} -u {} -p {}' \
+ .format(values.get("esr-system-info-id"), values.get("user-name"),
+ self.config_params["cloud-owner"],
+ cloud_region, values.get("password"),
+ values.get("cloud-region-version"), values.get("default-tenant"),
+ values.get("service-url"), self.config_params["complex_name"],
+ values.get("cloud-type"), self.config_params["owner-defined-type"],
+ values.get("system-type"), values.get("identity-url"),
+ self.config_params["cloud-zone"], values.get("ssl-insecure"),
+ values.get("system-status"), values.get("cloud-domain"),
+ self.config_params["aai_url"],
+ self.config_params["aai_username"],
+ self.config_params["aai_password"])
+
+ os.system(cloud_create_string)
+ print("Create Cloud--successful")
+
+ print("Associate Cloud with complex--beginning")
+ complex_associate_string = "oclip complex-associate -x {} -y {} -z {} -m {} -u {} -p {}".format(
+ self.config_params["complex_name"],
+ cloud_region, self.config_params["cloud-owner"], self.config_params["aai_url"],
+ self.config_params["aai_username"],
+ self.config_params["aai_password"])
+ os.system(complex_associate_string)
+ print("Associate Cloud with complex--successful")
+
+ print("Register Cloud with Multicloud--beginning")
+ multicloud_register_string = "oclip multicloud-register-cloud -y {} -x {} -m {}".format(
+ self.config_params["cloud-owner"], cloud_region, self.config_params["multicloud_url"])
+ os.system(multicloud_register_string)
+ print("Register Cloud with Multicloud--successful")
+
+ cloud_url = self.base_url + "/aai/v11/cloud-infrastructure/cloud-regions"
+ cloud_list_response = requests.get(url=cloud_url, headers=self.aai_header, verify=False)
+ if cloud_list_response.status_code == 200:
+ for cloud in (cloud_list_response.json())["cloud-region"]:
+ if cloud['cloud-owner'] == self.config_params["cloud-owner"]:
+ self.cloud_version = cloud['resource-version']
+ print("Cloud %s resource-version is %s."
+ % (self.config_params["cloud-owner"], self.cloud_version))
+
+ def register_all_clouds(self):
+ cloud_dictionary = self.config_params["cloud_region_data"]
+ for cloud_region, cloud_region_values in cloud_dictionary.items():
+ self.register_cloud_helper(cloud_region, cloud_region_values)
+
+ def delete_cloud_helper(self):
+ print("Multicloud-cloud-delete--beginning")
+ cloud_region = list(self.config_params["cloud_region_data"].keys())[0]
+ header = {'content-type': 'application/json', 'accept': 'application/json'}
+ multicloud_url = self.base_url + "/api/multicloud-titaniumcloud/v1/{}/{}" \
+ .format(self.config_params["cloud-owner"], cloud_region)
+ requests.delete(url=multicloud_url, headers=header, verify=False)
+ cloud_url = self.base_url + "/aai/v11/cloud-infrastructure/cloud-regions"
+ n = 60
+ while n > 0:
+ cloud_flag = False
+ cloud_list_response = requests.get(url=cloud_url, headers=self.aai_header, verify=False)
+ n = n - 1
+ if cloud_list_response.status_code == 200:
+ for cloud in (cloud_list_response.json()).get("cloud-region"):
+ if cloud['cloud-owner'] == self.config_params["cloud-owner"]:
+ cloud_flag = True
+ break
+ if not cloud_flag:
+ break
+ else:
+ time.sleep(1)
+ print("Multicloud-cloud-delete----successful")
+ self.cloud_version = None
+
+ def create_service_type(self):
+ create_string = "oclip service-type-create -x {} -y {} -m {} -u {} -p {}".format(
+ self.config_params["service_name"], self.config_params["service_name"], self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(create_string)
+
+ service_tpe_list_url = self.base_url + "/aai/v11/service-design-and-creation/services"
+ service_type_list_response = requests.get(url=service_tpe_list_url, headers=self.aai_header, verify=False)
+ if service_type_list_response.status_code == 200:
+ for service in (service_type_list_response.json())["service"]:
+ if service["service-id"] == self.config_params["service_name"]:
+ self.service_type_version = service['resource-version']
+ print("Service type %s resource-version is %s."
+ % (self.config_params["service_name"], self.service_type_version))
+
+ def delete_service_type(self):
+ print("delete service type--beginning")
+ service_delete_string = 'oclip service-type-delete -x {} -y {} -m {} -u {} -p {}'.format(
+ self.config_params["service_name"], self.service_type_version, self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(service_delete_string)
+ print("delete service type--successful")
+ self.service_type_version = None
+
+ def create_customer(self):
+ create_string = "oclip customer-create -x {} -y {} -m {} -u {} -p {}".format(
+ self.config_params["customer_name"],
+ self.config_params["subscriber_name"],
+ self.config_params["aai_url"],
+ self.config_params["aai_username"],
+ self.config_params["aai_password"])
+ os.system(create_string)
+
+ customer_list_url = self.base_url + "/aai/v11/business/customers"
+ customer_list_response = requests.get(url=customer_list_url, headers=self.aai_header, verify=False)
+ if customer_list_response.status_code == 200:
+ for cutsomer in (customer_list_response.json())["customer"]:
+ if cutsomer['global-customer-id'] == self.config_params["customer_name"]:
+ self.customer_version = cutsomer['resource-version']
+ print("Customer %s resource-version is %s."
+ % (self.config_params["customer_name"], self.customer_version))
+
+ def delete_customer(self):
+ print("delete customer--beginning")
+ customer_delete_string = 'oclip customer-delete -x {} -y {} -m {} -u {} -p {}'.format(
+ self.config_params["customer_name"], self.customer_version, self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(customer_delete_string)
+ print("delete customer--successful")
+ self.customer_version = None
+
+ def get_tenant_id(self):
+ print("Get tenant id--beginning")
+ cloud_dictionary = self.config_params["cloud_region_data"]
+ cloud_region = list(self.config_params["cloud_region_data"].keys())[0]
+
+ tenant_list_url = self.base_url + "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/{}/{}/tenants" \
+ .format(self.config_params["cloud-owner"], cloud_region)
+
+ for cloud_region, cloud_region_values in cloud_dictionary.items():
+ tenant_name = cloud_region_values.get("default-tenant")
+ tenant_list_response = requests.get(url=tenant_list_url, headers=self.aai_header, verify=False)
+ if tenant_list_response.status_code == 200:
+ for tenant in (tenant_list_response.json())["tenant"]:
+ if tenant['tenant-name'] == tenant_name:
+ self.tenant_id = tenant['tenant-id']
+ print("Tenant id is %s ." % self.tenant_id)
+
+ def add_customer_subscription(self):
+ subscription_check = 0
+ for cloud_region, cloud_region_values in (self.config_params["cloud_region_data"]).items():
+ if subscription_check == 0:
+ subscription_string = "oclip subscription-create -x {} -c {} -z {} -e {} " \
+ "-y {} -r {} -m {} -u {} -p {}" \
+ .format(self.config_params["customer_name"],
+ self.tenant_id,
+ self.config_params["cloud-owner"],
+ self.config_params["service_name"],
+ cloud_region_values.get("default-tenant"),
+ cloud_region, self.config_params["aai_url"],
+ self.config_params["aai_username"],
+ self.config_params["aai_password"])
+ else:
+ subscription_string = "oclip subscription-cloud-add -x {} -c {} " \
+ "-z {} -e {} -y {} -r {} -m {} -u {} -p {}" \
+ .format(self.config_params["customer_name"], self.tenant_id,
+ self.config_params["cloud-owner"], self.config_params["service_name"],
+ cloud_region_values.get("default-tenant"), cloud_region,
+ self.config_params["aai_url"],
+ self.config_params["aai_username"],
+ self.config_params["aai_password"])
+ os.system(subscription_string)
+ subscription_check += 1
+
+ subscription_url = self.base_url + "/aai/v11/business/customers/customer/{}" \
+ "/service-subscriptions/service-subscription/{}" \
+ .format(self.config_params["customer_name"], self.config_params["service_name"])
+ resp = requests.get(url=subscription_url, headers=self.aai_header, verify=False)
+ if resp.status_code == 200:
+ self.subscription_version = resp.json()['resource-version']
+ print("Subscription resource-version is %s." % self.subscription_version)
+
+ def remove_customer_subscription(self):
+ print("Remove subscription--beginning")
+ subscription_delete_string = 'oclip subscription-delete -x {} -y {} -g {} -m {} -u {} -p {}'.format(
+ self.config_params["customer_name"], self.config_params["service_name"], self.subscription_version,
+ self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(subscription_delete_string)
+ print("Delete subscription--successful")
+
+ def register_vnfm_helper(self, vnfm_key, values):
+ print("Create vnfm--beginning")
+ self.esr_vnfm_id = str(uuid.uuid4())
+ vnfm_create_string = 'oclip vnfm-create -b {} -c {} -e {} -v {} -g {} -x {} ' \
+ '-y {} -i {} -j {} -q {} -m {} -u {} -p {}' \
+ .format(vnfm_key, values.get("type"), values.get("vendor"),
+ values.get("version"), values.get("url"), values.get("vim-id"),
+ self.esr_vnfm_id, values.get("user-name"), values.get("user-password"),
+ values.get("vnfm-version"), self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+
+ os.system(vnfm_create_string)
+ print("Create vnfm--successful")
+
+ vnfm_url = self.base_url + "/aai/v11/external-system/esr-vnfm-list"
+ resp = requests.get(url=vnfm_url, headers=self.aai_header, verify=False)
+ if resp.status_code == 200:
+ for vnfm in (resp.json())["esr-vnfm"]:
+ if vnfm['vnfm-id'] == self.esr_vnfm_id:
+ self.esr_vnfm_version = vnfm['resource-version']
+ print("Vnfm %s resource-version is %s."
+ % (self.esr_vnfm_id, self.esr_vnfm_version))
+
+ def register_vnfm(self):
+ vnfm_params = self.config_params["vnfm_params"]
+ for vnfm_key, vnfm_values in vnfm_params.items():
+ self.register_vnfm_helper(vnfm_key, vnfm_values)
+
+ def unregister_vnfm(self):
+ print("Delete vnfm %s" % self.esr_vnfm_id)
+ print("Delete vnfm--beginning")
+ vnfm_delete_string = 'oclip vnfm-delete -x {} -y {} -m {} -u {} -p {}'.format(
+ self.esr_vnfm_id, self.esr_vnfm_version, self.config_params["aai_url"],
+ self.config_params["aai_username"], self.config_params["aai_password"])
+ os.system(vnfm_delete_string)
+ self.esr_vnfm_version = self.esr_vnfm_id = None
+ print("Delete vnfm--successful")
+
+ def create_ns(self):
+ ns = self.config_params["ns"]
+ data = {
+ "context": {
+ "globalCustomerId": self.config_params["customer_name"],
+ "serviceType": self.config_params["service_name"]
+ },
+ "csarId": self.ns_package_id,
+ "nsName": ns.get("name"),
+ "description": "description"
+ }
+ ns_header = {'content-type': 'application/json', 'accept': 'application/json'}
+ ns_url = self.base_url + "/api/nslcm/v1/ns"
+ ns_resp = requests.post(ns_url, data=json.dumps(data), headers=ns_header, verify=False)
+ if 201 == ns_resp.status_code:
+ ns_instance_id = ns_resp.json().get("nsInstanceId")
+ print("create ns successfully, the ns instance id is %s" % ns_instance_id)
+ return ns_instance_id
+ else:
+ raise Exception("Create ns failed.")
+
+ def instantiate_ns(self):
+ print("Instantiate ns beginning")
+ constraints = [
+ {
+ "vnfProfileId": x,
+ "locationConstraints": {
+ "vimId": self.config_params["location"]
+ }
+ } for x in self.vnfdId_list]
+ data = {
+ "additionalParamForNs": {
+ "sdnControllerId": self.config_params["sdc-controller-id"]
+ },
+ "locationConstraints": constraints
+ }
+
+ header = {'content-type': 'application/json', 'accept': 'application/json'}
+ instance_url = self.base_url + "/api/nslcm/v1/ns/" + self.ns_instance_id + "/instantiate"
+ instance_resp = requests.post(instance_url, data=json.dumps(data), headers=header, verify=False)
+ if 200 == instance_resp.status_code:
+ ns_instance_jod_id = instance_resp.json().get("jobId")
+ print("Instantiate ns successfully, the job id is %s" % ns_instance_jod_id)
+ return ns_instance_jod_id
+ else:
+ raise Exception("Instantiate ns failed.")
+
+ def create_ns_package(self):
+ print("Create ns package is beginning")
+ ns = self.config_params["ns"]
+ ns_url = self.base_url + "/api/nsd/v1/ns_descriptors"
+ ns_headers = {'content-type': 'application/json', 'accept': 'application/json'}
+ ns_data = {'userDefinedData': {ns.get("key"): ns.get("value")}}
+ ns_package_reps = requests.post(ns_url, data=json.dumps(ns_data), headers=ns_headers, verify=False)
+ if 201 == ns_package_reps.status_code:
+ print("Create ns package successful, the ns package id is %s"
+ % (ns_package_reps.json()["id"]))
+ return ns_package_reps.json()["id"]
+ else:
+ raise Exception("Create ns package failed.")
+
+ def delete_ns_package(self):
+ print("Delete ns package %s is beginning" % self.ns_package_id)
+ vnf_url = self.base_url + "/api/nsd/v1/ns_descriptors/%s" % self.ns_package_id
+ resp = requests.delete(url=vnf_url, verify=False)
+ if 204 == resp.status_code:
+ print("Delete ns package %s successfully." % self.ns_package_id)
+ self.ns_package_id = None
+ else:
+ print("Delete ns package %s failed." % self.ns_package_id)
+
+ def create_upload_vnf_package(self):
+ print("Create vnf package is beginning")
+ vnfs = self.config_params["vnfs"]
+ vnf_url = self.base_url + "/api/vnfpkgm/v1/vnf_packages"
+ header = {'content-type': 'application/json', 'accept': 'application/json'}
+ for vnf_values in vnfs.values():
+ vnf_data = {'userDefinedData': {vnf_values.get("key"): vnf_values.get("value")}}
+ vnf_package_reps = requests.post(vnf_url, data=json.dumps(vnf_data), headers=header, verify=False)
+ if 201 == vnf_package_reps.status_code:
+ print("Create vnf package successful, the vnf package id is %s"
+ % (vnf_package_reps.json()["id"]))
+ package_id = vnf_package_reps.json()["id"]
+ self.vnf_package_list.append(package_id)
+ vnf_upload_url = '{}/api/vnfpkgm/v1/vnf_packages/{}/package_content' \
+ .format(self.config_params["vfc-url"], package_id)
+ file_path = os.path.dirname(os.path.abspath(__file__))
+ csar_file = file_path + "/" + vnf_values.get("path")
+ with open(csar_file, 'rb') as vnf_file:
+ for i in range(10):
+ resp = requests.put(vnf_upload_url, files={'file': vnf_file}, verify=False)
+ if 202 == resp.status_code:
+ break
+ if 500 == resp.status_code:
+ raise Exception("Upload vnf package failed. %s" % resp.json())
+ else:
+ time.sleep(i)
+ else:
+ print("Create vnf package failed.")
+
+ def delete_vnf_package(self):
+ print("Delete vnf package is beginning")
+ for vnf_package_id in self.vnf_package_list:
+ vnf_url = self.base_url + "/api/vnfpkgm/v1/vnf_packages/%s" % vnf_package_id
+ resp = requests.delete(url=vnf_url, verify=False)
+ if 204 == resp.status_code:
+ print("Delete vnf package %s successfully." % vnf_package_id)
+ else:
+ print("Delete vnf package %s failed." % vnf_package_id)
+ self.vnf_package_list = []
+
+ def upload_ns_package(self):
+ ns = self.config_params["ns"]
+ ns_upload_url = '{}/api/nsd/v1/ns_descriptors/{}/nsd_content'.format(self.config_params["vfc-url"],
+ self.ns_package_id)
+ file_path = os.path.dirname(os.path.abspath(__file__))
+ ns_file_path = file_path + "/" + ns["path"]
+ with open(ns_file_path, 'rb') as ns_file:
+ for i in range(10):
+ resp = requests.put(ns_upload_url, files={'file': ns_file}, verify=False)
+ if 204 == resp.status_code:
+ break
+ if 500 == resp.status_code:
+ raise Exception("Upload ns package failed.")
+ else:
+ time.sleep(i)
+
+ def get_vnf_package(self):
+ vnfdid_list = []
+ for vnf_package_id in self.vnf_package_list:
+ n = 60
+ while n > 0:
+ vnf_package_url = self.base_url + '/api/vnfpkgm/v1/vnf_packages/%s' % vnf_package_id
+ vnf_resp = requests.get(vnf_package_url, verify=False)
+ n = n - 1
+ if 200 == vnf_resp.status_code:
+ vnfdId = vnf_resp.json().get("vnfdId")
+ if vnfdId is None:
+ time.sleep(1)
+ else:
+ print("vnfdId is %s" % vnfdId)
+ vnfdid_list.append(vnfdId)
+ break
+ return vnfdid_list
+
+ def getVnf(self, vnfs):
+ vnf_list = []
+ for vnf in vnfs:
+ if 'relationship-list' in vnf:
+ for relation in vnf["relationship-list"]["relationship"]:
+ if "service-instance" == relation["related-to"]:
+ if self.ns_instance_id in relation["related-link"]:
+ vnf_list.append(vnf)
+ return vnf_list
+
+ @staticmethod
+ def findVserver(vnf_list):
+ vserver_list = []
+ for vnf in vnf_list:
+ if 'relationship-list' in vnf:
+ for relation in vnf["relationship-list"]["relationship"]:
+ if "vserver" == relation["related-to"]:
+ for relationData in relation["relationship-data"]:
+ if "vserver.vserver-id" == relationData["relationship-key"]:
+ vserver_list.append(relationData["relationship-value"])
+ return vserver_list
+
+ def waitProcessFinished(self, job_id, action):
+ print("Wait for the %s ns finished." % action)
+ job_url = self.base_url + "/api/nslcm/v1/jobs/%s" % job_id
+ progress = 0
+ n = 6000
+ while n > 0:
+ job_resp = requests.get(url=job_url, verify=False)
+ n = n - 1
+ if 200 == job_resp.status_code:
+ if "responseDescriptor" in job_resp.json():
+ progress_rep = (job_resp.json())["responseDescriptor"]["progress"]
+ if 100 != progress_rep:
+ if 255 == progress_rep:
+ print("Ns %s %s failed." % (self.ns_instance_id, action))
+ raise Exception("%s ns failed." % action)
+ elif progress_rep != progress:
+ progress = progress_rep
+ print("Ns %s %s process is %s." % (self.ns_instance_id, action, progress))
+ time.sleep(0.2)
+ else:
+ print("Ns %s %s process is %s." % (self.ns_instance_id, action, progress_rep))
+ print("Ns %s %s successfully." % (self.ns_instance_id, action))
+ break
+
+ def terminateNs(self):
+ print("Terminate ns--beginning")
+ ns_url = self.base_url + "/api/nslcm/v1/ns/%s" % self.ns_instance_id
+ d = {
+ "gracefulTerminationTimeout": 600,
+ "terminationType": "FORCEFUL"
+ }
+ try:
+ res = requests.post(url=ns_url + "/terminate", data=d, verify=False)
+ if 202 == res.status_code:
+ terminate_ns_job_id = res.json()["jobId"]
+ print("Terminate job is %s" % terminate_ns_job_id)
+ else:
+ raise Exception("Instantiate ns failed.")
+ self.waitProcessFinished(terminate_ns_job_id, "terminate")
+ except Exception as e:
+ print(e.args[0])
+
+ def deleteNs(self):
+ print("Delete ns %s --beginning" % self.ns_instance_id)
+ ns_url = self.base_url + "/api/nslcm/v1/ns/%s" % self.ns_instance_id
+ res = requests.delete(ns_url, verify=False)
+ if 204 == res.status_code:
+ print("Ns %s delete successfully." % self.ns_instance_id)
+ self.ns_instance_id = None
+
+ def testNs(self):
+ print("Use csar file is uploaded by local")
+ try:
+ self.create_upload_vnf_package()
+ self.ns_package_id = self.create_ns_package()
+ print("Get vnfdId list.")
+ self.vnfdId_list = self.get_vnf_package()
+ if len(self.vnfdId_list) < 5:
+ raise Exception("Upload vnf package failed. "
+ "Please check vnf package(b1bb0ce7-1111-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-2222-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-3333-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-4444-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-5555-4fa7-95ed-4840d70a1177) "
+ "and delete them and then upload again.")
+ print("Upload ns package from csar beginning")
+ self.upload_ns_package()
+ print("Upload ns package from csar successfully")
+
+ print("Create ns beginning")
+
+ self.ns_instance_id = self.create_ns()
+ self.assertIsNotNone(self.ns_instance_id)
+ self.ns_instance_jod_id = self.instantiate_ns()
+ print("NS %s instantiate job is %s" % (self.ns_instance_id, self.ns_instance_jod_id))
+ self.assertIsNotNone(self.ns_instance_jod_id)
+ self.waitProcessFinished(self.ns_instance_jod_id, "instantiate")
+ except Exception as e:
+ print(e.args[0])
+
+ vnf_aai_url = self.base_url + "/aai/v11/network/generic-vnfs"
+ vnf_resp = requests.get(url=vnf_aai_url, headers=self.aai_header, verify=False)
+ self.assertEqual(200, vnf_resp.status_code)
+
+ vnfs = vnf_resp.json()["generic-vnf"]
+ vnf_list = self.getVnf(vnfs)
+ self.assertEqual(5, len(vnf_list))
+ print("There are %s vnfs are created." % len(vnf_list))
+ for vnf in vnf_list:
+ print("The vnf %s are created successfully." % vnf.get("vnf-id"))
+
+ vserver_list = self.findVserver(vnf_list)
+ print("The vserver %s is created successfully." % len(vserver_list))
+ self.assertEqual(8, len(vserver_list))
+
+ cloud_region_id = list(self.config_params["cloud_region_data"].keys())[0]
+
+ for vserver_id in vserver_list:
+ vserver_aai_url = self.base_url + "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region" \
+ "/{}/{}/tenants/tenant/{}/vservers/vserver/{}?depth=all" \
+ .format(self.config_params["cloud-owner"], cloud_region_id, self.tenant_id, vserver_id)
+
+ vserver_resp = requests.get(url=vserver_aai_url, headers=self.aai_header, verify=False)
+ self.assertEqual(200, vserver_resp.status_code)
+ print("The vserver %s is created successfully." % vserver_id)
diff --git a/test/xtesting/robot/Dockerfile b/test/xtesting/robot/Dockerfile
deleted file mode 100644
index 5d8d26ea2..000000000
--- a/test/xtesting/robot/Dockerfile
+++ /dev/null
@@ -1,26 +0,0 @@
-FROM opnfv/xtesting
-
-ARG OPENSTACK_TAG=stable/pike
-ARG OPNFV_TAG=master
-ARG ONAP_TAG=master
-
-ENV PYTHONPATH $PYTHONPATH:/src/testing-utils/eteutils
-
-COPY thirdparty-requirements.txt thirdparty-requirements.txt
-RUN apk --no-cache add --virtual .build-deps --update \
- python-dev build-base linux-headers libffi-dev \
- openssl-dev libjpeg-turbo-dev && \
- git clone --depth 1 https://git.onap.org/testsuite -b $ONAP_TAG /var/opt/OpenECOMP_ETE && \
- git clone --depth 1 https://git.onap.org/testsuite/properties -b $ONAP_TAG /share/config && \
- git clone --depth 1 https://git.onap.org/testsuite/python-testing-utils -b $ONAP_TAG /src/testing-utils && \
- pip install \
- -chttps://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt?h=$OPENSTACK_TAG \
- -chttps://git.opnfv.org/functest/plain/upper-constraints.txt?h=$OPNFV_TAG \
- -rthirdparty-requirements.txt \
- -e /src/testing-utils && \
- rm -r thirdparty-requirements.txt /src/testing-utils/.git /share/config/.git \
- /var/opt/OpenECOMP_ETE/.git && \
- apk del .build-deps
-
-COPY testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml
-CMD ["run_tests", "-t", "all"]
diff --git a/test/xtesting/robot/README.md b/test/xtesting/robot/README.md
deleted file mode 100644
index 0805ab1dd..000000000
--- a/test/xtesting/robot/README.md
+++ /dev/null
@@ -1,93 +0,0 @@
-# Xtesting-onap-robot
-Reuse of the Xtesting framework to onboard ONAP robot tests
-It consists in 3 files:
- * Dockerfile: create your dockerfile. For Beijing, it shall be generated manually. You can use a non official version [4]
- * testcases.yaml: the list of the testcases based on robotframework tags as defined in ONAp repo [3]
- * thirdparty-requirements.txt: dependencies needed by the Dockerfile
-
-## Configuration
-
-To launch Xtesting ONAP robot you need 2 files
- * env
- * onap.properties: list of ONAP endpoints (can be found on the robot VM). Depending from where you launch the tests,
-please check that the IP addresses are reachable.
-
-As Xtesting supports both Openstack and Kubernetes, the env files shall be set accordingly.
-
-env file
-```
-INSTALLER_TYPE=heat
-DEPLOY_SCENARIO=os-nosdn-nofeature-ha
-EXTERNAL_NETWORK=ext-network
-NODE_NAME=pod4-orange-heat1
-TEST_DB_URL=hhttp://testresults.opnfv.org/onap/api/v1/results
-BUILD_TAG=jenkins-functest-kolla-baremetal-daily-amsterdam-222
-```
-
-All the values of the env file are not mandatory.
-
-### INSTALLER_TYPE
-It indicates how you deploy your ONAP solution. The possible values are heat or oom.
-
-### DEPLOY_SCENARIO
-If you do not precise DEPLOY_SCENARIO, it will be set to os-nosdn-nofeature-nohai by default, which means
-Openstack / No Additional SDN controller / No Additional feature / no HA mode
-This parameter can be useful if you manage several infrastructure and want to filter the results.
-Other possible scenario:
- * k8-nosdn-nofeature-ha (Kubernetes with no add-ons)
- * os-odl-nofeature-ha (Openstack with Opendaylight SDN controller)
-
-### EXTERNAL_NETWORK (Openstack only)
-You must precise it if it is not the first network with router:external=True
-
-### KUBERNETES_PROVIDER (Kubernetes only)
-This parameter is set to local by default
-
-### KUBE_MASTER_URL (Kubernetes only)
-You must indicate your Kubernetes Master URL.
-
-### KUBE_MASTER_IP (Kubernetes only)
-You must indicate your Kubernetes Master IP.
-
-### NODE_NAME
-The NODE_NAME is the name of the infrastructure that you declared in the Test DB. If you do not want to report the
-results to the Test Database, you do not need to precise this parameter.
-
-### TEST_DB_URL
-This parameter corresponds to the Test Database FQDN.
-If you do not want to report the results to the Test Database, you do not need to precise this parameter.
-
-You can reference either your own local database or a public Database (You must be sure that your NODE_NAME has been declared on
-this database). If so, and if you precise the flag to report the results, the test results will be automatically pushed.
-
-### BUILD_TAG
-This parameter is used to retrieve the version (amsterdam in the example) for the publication in the test database.
-If you do not publish the results, you can omit it.
-It is based on an historical regex setup for OPNFV CI/CD chains.
-
-All the parameters are detailed in Functest user guide [1].
-
-## onap.properties
-
-This file includes all the ONAP end points. It is built at ONAP installation and can be found on the ONAP Robot VM.
-
-# Launch xtesting-onap-robot
-
-You can run the test with the following command:
-
-sudo docker run --env-file <your env> -v <your onap properties>:/share/config/integration_vm_properties.py colvert22/functest-onap:latest
-
-By default it will execute all the tests corresponding to the command bash -c 'run_tests -t all'
-
-If you want to execute only a subset of the tests you may precise the test cases using -t: bash -c 'run_tests -t robot_dcae'
-
-The possible test cases are indicated in the testcases.yaml and are based on robotframework tags.
-
-If you want to push the results to the database, you can use the -r option: bash -c 'run_tests -t all -r'
-
-# References
-
-* [1] Functest User Guide: http://docs.opnfv.org/en/latest/submodules/functest/docs/testing/user/userguide/index.html
-* [2] Xtesting page: https://wiki.opnfv.org/display/functest/Xtesting
-* [3] Onap robot repo: https://git.onap.org/testsuite/
-* [4] https://hub.docker.com/r/colvert22/xtesting-onap-robot/
diff --git a/test/xtesting/robot/testcases.yaml b/test/xtesting/robot/testcases.yaml
deleted file mode 100644
index a74ce0740..000000000
--- a/test/xtesting/robot/testcases.yaml
+++ /dev/null
@@ -1,94 +0,0 @@
----
-tiers:
- -
- name: onap
- order: 1
- ci_loop: '(daily)|(weekly)'
- description: >-
- Set of basic Functional tests to validate the ONAP installation.
- testcases:
- -
- case_name: core
- project_name: functest
- criteria: 100
- blocking: true
- description: >-
- This test case verifies the API of core ONAP components
- aai, dmap, portal, sdc, sdnc, so, robot
- run:
- name: 'robotframework'
- args:
- suites:
- - /var/opt/OpenECOMP_ETE/robot/testsuites/health-check.robot
- include:
- - core
- variablefile:
- - '/share/config/integration_robot_properties.py'
- - '/share/config/vm_properties.py'
- - '/share/config/integration_preload_parameters.py'
-
- -
- case_name: small
- project_name: functest
- criteria: 100
- blocking: false
- description: >-
- This test case verifies the API of the components
- aai, dmap, portal, sdc, sdnc, so, robot,
- AAF, APPC, CLI, COnsul, ESR, Log, MSB, Multicloud, NBI, VID
- run:
- name: 'robotframework'
- args:
- suites:
- - /var/opt/OpenECOMP_ETE/robot/testsuites/health-check.robot
- include:
- - core
- - small
- variablefile:
- - '/share/config/integration_robot_properties.py'
- - '/share/config/vm_properties.py'
- - '/share/config/integration_preload_parameters.py'
-
- -
- case_name: medium
- project_name: functest
- criteria: 100
- blocking: false
- description: >-
- This test case verifies the API of the components
- aai, dmap, portal, sdc, sdnc, so, robot,
- AAF, APPC, CLI, COnsul, ESR, Log, MSB, Multicloud, NBI, VID,
- CLAMP, DCAE, OOF, POLICY, UUI, SNIRO
- run:
- name: 'robotframework'
- args:
- suites:
- - /var/opt/OpenECOMP_ETE/robot/testsuites/health-check.robot
- include:
- - core
- - small
- - medium
- variablefile:
- - '/share/config/integration_robot_properties.py'
- - '/share/config/vm_properties.py'
- - '/share/config/integration_preload_parameters.py'
- -
- case_name: full
- project_name: functest
- criteria: 100
- blocking: false
- description: >-
- This test case verifies all the healthcheck Robot tests
- based on the default robot tests
- run:
- name: 'robotframework'
- args:
- suites:
- - /var/opt/OpenECOMP_ETE/robot/testsuites/health-check.robot
- include:
- - health
- variablefile:
- - '/share/config/integration_robot_properties.py'
- - '/share/config/vm_properties.py'
- - '/share/config/integration_preload_parameters.py'
-
diff --git a/test/xtesting/robot/thirdparty-requirements.txt b/test/xtesting/robot/thirdparty-requirements.txt
deleted file mode 100644
index f85db2d41..000000000
--- a/test/xtesting/robot/thirdparty-requirements.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-selenium<=3.0.0
-requests==2.11.1
-robotframework-selenium2library==1.8.0
-robotframework-databaselibrary==0.8.1
-robotframework-extendedselenium2library==0.9.1
-robotframework-requests==0.4.5
-robotframework-sshlibrary==2.1.2
-robotframework-sudslibrary==0.8
-robotframework-ftplibrary==1.3
-robotframework-rammbock==0.4.0.1
-deepdiff==2.5.1
-dnspython==1.15.0
-robotframework-httplibrary==0.4.2
-robotframework-archivelibrary==0.3.2
-PyYAML==3.12
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 000000000..cdc65959c
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,56 @@
+[tox]
+minversion = 3.2.0
+envlist = json,yaml,py,rst,md
+skipsdist = true
+requires = pip >= 8
+
+[testenv]
+basepython = python3.8
+allowlist_externals =
+ git
+ sh
+ /bin/sh
+ bash
+ /bin/bash
+deps =
+ coala-bears
+ nodeenv
+
+[testenv:json]
+commands_pre =
+ /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.json' > /tmp/.coalist_json"
+commands =
+# '\ ' at the end of command is needed for a case where above command returns empty list (it adds empty file
+# parameter to '--files' opt
+ /bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn json --files $(</tmp/.coalist_json) \ "
+
+[testenv:yaml]
+commands_pre =
+ /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.yaml' '*.yml' > /tmp/.coalist_yaml"
+commands =
+# '\ ' at the end of command is needed for a case where above command returns empty list (it adds empty file
+# parameter to '--files' opt
+ /bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn yaml --files $(</tmp/.coalist_yaml) \ "
+
+[testenv:py]
+commands_pre =
+ /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.py' > /tmp/.coalist_py"
+commands =
+ /bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn py --files $(</tmp/.coalist_py) \ "
+
+[testenv:rst]
+deps =
+ -r{toxinidir}/docs/requirements-docs.txt
+ -chttps://raw.githubusercontent.com/openstack/requirements/stable/yoga/upper-constraints.txt
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt
+commands =
+ sphinx-build -W -n -b html docs docs/build/html
+ sphinx-build -W -n -b linkcheck docs docs/build/linkcheck
+
+[testenv:md]
+commands_pre =
+ nodeenv -p --verbose
+ npm install --global remark-cli
+ /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.md' > /tmp/.coalist_md"
+commands =
+ /bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn md --files $(</tmp/.coalist_md) \ "