aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAndreas Geissler <andreas-geissler@telekom.de>2022-10-19 17:49:04 +0200
committerAndreas Geissler <andreas-geissler@telekom.de>2022-10-19 17:52:13 +0200
commitd273edeccd97122de1caec74243d702a652cacb9 (patch)
tree4414e752f4a6aebc6fa29ef79b36d9b5f5571858
parented3c78887493baa5855ac5256683b68ab7b74073 (diff)
[GITLAB] Initial content for gitlab project xtesting-onap
Issue-ID: INT-2150 Signed-off-by: Andreas Geissler <andreas-geissler@telekom.de> Change-Id: I6a429e2f661474fe54b13b6513eca64f13e99b50
-rw-r--r--.gitlab-ci.yml19
-rw-r--r--.gitmodules3
-rw-r--r--LICENSE201
-rw-r--r--README.md188
-rw-r--r--ansible.cfg15
-rw-r--r--doc/artifacts/versions/guilin.json1694
-rw-r--r--doc/artifacts/versions/honolulu.json1736
-rw-r--r--doc/artifacts/versions/istanbul.json1694
-rw-r--r--doc/artifacts/versions/test_guilin_MR1.json68
-rw-r--r--doc/artifacts/versions/test_honolulu.json62
-rw-r--r--doc/artifacts/versions/test_honolulu_MR_Candidate.json50
-rw-r--r--doc/artifacts/versions/test_master.json62
-rw-r--r--doc/generate_docker_version.py437
-rw-r--r--doc/generate_gating_index.py49
-rw-r--r--doc/generate_stability_graphs.py133
-rw-r--r--doc/generate_status.py457
-rw-r--r--doc/requirements.txt7
-rw-r--r--doc/stability_results.json54
-rw-r--r--doc/template/docker-version-tmpl.html409
-rw-r--r--doc/template/index-gating-tmpl.html63
-rw-r--r--doc/template/index-infra.html46
-rw-r--r--doc/template/index-stability-tmpl.html391
-rw-r--r--doc/template/index-tmpl.html203
-rw-r--r--doc/template/k8s.html103
-rw-r--r--gitlab-ci/base.yml768
-rw-r--r--inventory/group_vars/all.yml210
-rw-r--r--legal-tern.yaml7
-rw-r--r--onap-chaos-tests.yaml7
-rw-r--r--onap-stability.yaml7
-rw-r--r--requirements.yaml11
-rw-r--r--roles/legal-tern/tasks/main.yaml90
-rw-r--r--roles/legal-tern/templates/.netrc.j23
-rw-r--r--roles/onap-chaos-tests/tasks/cassandra.yaml1
-rw-r--r--roles/onap-chaos-tests/tasks/kafka.yaml1
-rw-r--r--roles/onap-chaos-tests/tasks/main.yaml22
-rw-r--r--roles/onap-chaos-tests/tasks/node-cpu-hog.yaml86
-rw-r--r--roles/onap-chaos-tests/tasks/node-drain.yaml105
-rw-r--r--roles/onap-chaos-tests/tasks/node-memory-hog.yaml86
-rw-r--r--roles/onap-chaos-tests/tasks/pod-delete-aai.yaml103
-rw-r--r--roles/onap-chaos-tests/tasks/pod-delete-sdc.yaml76
-rw-r--r--roles/onap-chaos-tests/tasks/pod-delete-sdnc.yaml76
-rw-r--r--roles/onap-chaos-tests/tasks/pod-delete-so.yaml76
-rw-r--r--roles/onap-chaos-tests/tasks/prepare.yaml38
-rw-r--r--roles/onap-chaos-tests/tasks/reporting.yaml1
-rw-r--r--roles/onap-chaos-tests/templates/node-cpu-hog-chaos.yaml.j230
-rw-r--r--roles/onap-chaos-tests/templates/node-cpu-hog-rbac.yaml.j249
-rw-r--r--roles/onap-chaos-tests/templates/node-drain-chaos.yaml.j228
-rw-r--r--roles/onap-chaos-tests/templates/node-drain-rbac.yaml.j253
-rw-r--r--roles/onap-chaos-tests/templates/node-memory-hog-chaos.yaml.j232
-rw-r--r--roles/onap-chaos-tests/templates/node-memory-hog-rbac.yaml.j249
-rw-r--r--roles/onap-chaos-tests/templates/pod-delete-aai-chaos.yaml.j235
-rw-r--r--roles/onap-chaos-tests/templates/pod-delete-aai-rbac.yaml.j257
-rw-r--r--roles/onap-chaos-tests/templates/pod-delete-rbac.yaml.j257
-rw-r--r--roles/onap-stability-tests/tasks/main.yaml61
-rw-r--r--roles/onap-stability-tests/templates/.netrc.j23
-rw-r--r--roles/xtesting-healthcheck-k8s-job/defaults/main.yaml47
-rw-r--r--roles/xtesting-healthcheck-k8s-job/tasks/main.yaml51
-rw-r--r--roles/xtesting-healthcheck-k8s/defaults/main.yaml9
-rw-r--r--roles/xtesting-healthcheck-k8s/tasks/main.yaml126
-rw-r--r--roles/xtesting-healthcheck-k8s/templates/env-os.j217
-rw-r--r--roles/xtesting-healthcheck/defaults/main.yaml130
-rw-r--r--roles/xtesting-healthcheck/tasks/launch.yaml50
-rw-r--r--roles/xtesting-healthcheck/tasks/main.yaml5
-rw-r--r--roles/xtesting-healthcheck/tasks/prepare.yaml52
-rw-r--r--roles/xtesting-jumphost/tasks/main.yaml101
-rw-r--r--roles/xtesting-onap-security/tasks/main.yaml88
-rw-r--r--roles/xtesting-onap-security/templates/env-os.j211
-rw-r--r--roles/xtesting-onap-vnf/tasks/launch.yaml75
-rw-r--r--roles/xtesting-onap-vnf/tasks/main.yaml5
-rw-r--r--roles/xtesting-onap-vnf/tasks/prepare.yaml57
-rw-r--r--roles/xtesting-onap-vnf/tasks/prepare_cnf_test.yaml86
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm-service-istanbul.yaml.j22
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm-service-jakarta.yaml.j240
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm-service-master.yaml.j240
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm_macro-service-istanbul.yaml.j22
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm_macro-service-jakarta.yaml.j255
-rw-r--r--roles/xtesting-onap-vnf/templates/basic_vm_macro-service-master.yaml.j255
-rw-r--r--roles/xtesting-onap-vnf/templates/clearwater-ims-service.yaml.j255
-rw-r--r--roles/xtesting-onap-vnf/templates/env-os.j214
-rw-r--r--roles/xtesting-onap-vnf/templates/settings.py.j263
-rw-r--r--roles/xtesting-pages/tasks/main.yaml69
-rw-r--r--scripts/.netrc3
-rwxr-xr-xscripts/output_summary.sh161
-rwxr-xr-xscripts/output_summary_s3.sh162
-rwxr-xr-xscripts/push_results_to_lf.sh118
-rwxr-xr-xscripts/run_chaos_tests.sh116
-rwxr-xr-xscripts/run_stability_tests.sh99
-rwxr-xr-xscripts/run_tern.sh184
-rw-r--r--xtesting-healthcheck-k8s-job.yaml8
-rw-r--r--xtesting-healthcheck-k8s.yaml8
-rw-r--r--xtesting-healthcheck.yaml7
-rw-r--r--xtesting-jumphost.yaml5
-rw-r--r--xtesting-onap-security.yaml7
-rw-r--r--xtesting-onap-vnf.yaml9
-rw-r--r--xtesting-pages.yaml8
95 files changed, 12572 insertions, 0 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..05f1e44
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,19 @@
+---
+variables:
+ RUNNER_TAG: <SET ME>
+
+.syntax_checking_tags:
+ tags:
+ # - shared
+ # - docker
+ # - innovation
+ - $RUNNER_TAG
+
+.ansible_run_tags:
+ tags:
+ # - docker
+ # - sina
+ - $RUNNER_TAG
+
+include:
+ - local: "gitlab-ci/base.yml"
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..880c9d1
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "scripts/chained-ci-tools"]
+ path = scripts/chained-ci-tools
+ url = https://gitlab.com/Orange-OpenSource/lfn/ci_cd/chained-ci-tools.git
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..5b656e9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2019 Orange-OpenSource / lfn / onap
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..18a869b
--- /dev/null
+++ b/README.md
@@ -0,0 +1,188 @@
+# xtesting-onap
+
+leverage xtesting-onap-robot and xtesting-onap-vnf in CI/CD chains
+
+https://hub.docker.com/r/morganrol/xtesting-onap-robot/
+https://hub.docker.com/r/morganrol/xtesting-onap-vnf/
+
+This project aims to automatically test ONAP. Its config source
+is shared config files among all OPNFV installers:
+- PDF - Pod Description File: describing the hardware level of the
+ infrastructure hosting the VIM
+
+## Input
+
+ - configuration files:
+ - mandatory:
+ - vars/pdf.yml: POD Description File
+ - vars/cluster.yml: information about ONAP cluster
+ - inventory/jumphost: the ansible inventory for the jumphost
+ - vars/kube-config: the kubernetes configuration file in order to have
+ credentials to connect
+ - clouds.yml: retrieve from the controler node used to create OpenStack
+ resources when needed and verify the creation of resources through
+ openstack commands. For xtesting, assuming that it is run from the
+ controller node, it is transparent. If not copy the clouds.yml in the
+ docker under /root/.config/openstack/clouds.yml and reference the
+ cloud with the env variable OS_TEST_CLOUD
+ - optional:
+ - vars/vaulted_ssh_credentials.yml: Ciphered private/public pair of key
+ that allows to connect to jumphost
+ - Environment variables:
+ - mandatory:
+ - PRIVATE_TOKEN: to get the artifact
+ - artifacts_src: the url to get the artifacts
+ - OR artifacts_bin: b64_encoded zipped artifacts (tbd)
+ - ANSIBLE_VAULT_PASSWORD: the vault password needed by ciphered ansible
+ vars
+ - TEST_CLOUD
+ - role: name of the cloud as defined in the clouds.yaml_linting
+ - value type: string
+ - default: "openlab-vnfs-ci"
+ - optional:
+ - RUNNER_TAG:
+ - override the default gitlab-runner tag (ta5_tnaplab)
+ - "old" lab runner tag: tnaplab2
+ - ANSIBLE_VERBOSE:
+ - role: verbose option for ansible
+ - values: "", "-vvv"
+ - default: ""
+ - POD:
+ - role: name of the pod when we'll insert healtcheck results
+ - value type;: string
+ - default: empty
+ - DEPLOYMENT:
+ - role: name of the deployment for right tagging when we'll insert
+ healtcheck results
+ - value type: string
+ - default: "oom"
+ - INFRA_DEPLOYMENT:
+ - role: name of the infra deployment for right tagging when we'll
+ insert healtcheck results
+ - value type: string
+ - default: "rancher"
+ - DEPLOYMENT_TYPE:
+ - role: type of ONAP deployment done
+ - values: "core", "small", "medium", "full"
+ - default: "core" if nothing found in vars/cluster.yml
+ - TEST_RESULT_DB_URL:
+ - role: url of test db api
+ - value type: string
+ - default: "http://testresults.opnfv.org/onap/api/v1/results"
+ - DEPLOY_SCENARIO
+ - role: name of the deployment scenario
+ - value type: string
+ - default: "onap-ftw"
+ - ONAP_NAMESPACE
+ - role: the name of the namespace on kubernetes where ONAP is
+ installed
+ - value type: string
+ - default: "onap"
+ - ONAP_VERSION
+ - role: the ONAP version deployed
+ - value type: string
+ - values: "beijing", "2.0.0-ONAP", "master"
+ - default: "master"
+ - INGRESS:
+ - role: do we want to use ingress with ONAP or not
+ - value type: boolean
+ - default: False
+ - CNF_NAMESPACE
+ - role: the name of the namespace on kubernetes used for basic_cnf test
+ installed
+ - value type: string
+ - default: "k8s"
+ - tests_list
+ - role: Define the vnf tests list
+ - value type: string
+ - values: "all", "basic_vm, freeradius_nbi, ims"
+ - default: "all"
+ - DEBUG
+ - role: enable debug logs and the creation of xtesting.debug.log
+ - value type: boolean
+ - values: True, False (case insensitive)
+ - default: False
+ - HELM3_USE_SQL
+ - role: ask to use SQL backend for helm3
+ - value type: bool
+ - default: False
+ - RANDOM_WAIT
+ - role: do we wait a random time before executing tests involving SDC.
+ This is interesting in order to avoid race conditions.
+ - value type: bool
+ - default: False
+
+## Output
+
+none
+
+## Chaos testing
+
+ Chaos testing suite using [Litmus](https://litmuschaos.io/)
+Launching specified scenarios on specified target, to test the system resiliency
+
+### How to launch a scenario without CI
+
+<code>ansible-playbook onap-chaos-tests.yaml --tags "prepare,<\experiment name>\" --extra-vars "[\extra argument]\"
+</code>
+
+### Available scenarios
+
+- **Node drain**
+ Unschedule a node then evict all the pods on it
+ - extra vars : <code>compute_chaos=<\node name>\ </code> default: First node in the cluster
+ - tag : node_drain
+- **Node cpu hog**
+ Exhaust cpu ressources on the node
+ - extra vars : <code>compute_chaos=<\node name>\ </code> default: First node in the cluster
+ - tag : node_cpu_hog
+- **Node memory hog**
+ Exhaust memory ressources on the node
+ - extra vars : <code>compute_chaos=<\node name>\ </code> default: First node in the cluster
+ - tag : node_memory_hog
+
+## Add Testresults to ONAP Integration result page
+
+Name of test-pod: \<pipeline_name\>-\<pod_owner\>-\<DEPLOYMENT\>
+
+e.g. onap-daily-dt-oom-istanbul-TNAP-oom
+
+Result page: https://logs.onap.org/onap-integration
+Instructions: https://wiki.onap.org/pages/viewpage.action?pageId=79202765
+
+```
+ag@ag-dev:~$ ssh onap-integration@testresults.opnfv.org
+[onap-integration@gce-opnfv-sandbox-fbrockners ~]$ export LANG=de_DE
+[onap-integration@gce-opnfv-sandbox-fbrockners ~]$ mongo
+MongoDB shell version: 3.2.16
+connecting to: test
+Server has startup warnings:
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten]
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten] ** WARNING: /sys/kernel/mm/transparent_hugepage/enabled is 'always'.
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten] ** We suggest setting it to 'never'
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten]
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten] ** WARNING: /sys/kernel/mm/transparent_hugepage/defrag is 'always'.
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten] ** We suggest setting it to 'never'
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten]
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten] ** WARNING: soft rlimits too low. rlimits set to 4096 processes, 64000 files. Number of processes should be at least 32000 : 0.5 times number of files.
+2020-10-01T07:54:55.852+0000 I CONTROL [initandlisten]
+singleNodeRepl:PRIMARY>
+singleNodeRepl:PRIMARY> use onap
+switched to db onap
+singleNodeRepl:PRIMARY> show collections
+deployresults
+pods
+projects
+results
+scenarios
+test
+testcases
+users
+
+singleNodeRepl:PRIMARY> db.pods.insert({"name":"onap-daily-dt-oom-istanbul-TNAP-oom","creator":"Deutsche Telekom","role":"daily","details":"contact: Andreas Geissler","creation_date":"2022-02-22 8:00:00"})
+WriteResult({ "nInserted" : 1 })
+
+```
+Check if the pod is available:
+
+http://testresults.opnfv.org/onap/api/v1/pods
diff --git a/ansible.cfg b/ansible.cfg
new file mode 100644
index 0000000..c07b87a
--- /dev/null
+++ b/ansible.cfg
@@ -0,0 +1,15 @@
+[defaults]
+host_key_checking = False
+forks = 20
+stdout_callback = yaml
+gathering = smart
+fact_caching = jsonfile
+fact_caching_connection = /tmp
+inventory_ignore_extensions = ~, .orig, .bak, .ini, .cfg, .retry, .pyc, .pyo, .creds
+callback_whitelist = timer
+interpreter_python = auto
+
+[ssh_connection]
+pipelining = True
+retries = 5
+ssh_args = -o ControlMaster=auto -o ControlPersist=60s
diff --git a/doc/artifacts/versions/guilin.json b/doc/artifacts/versions/guilin.json
new file mode 100644
index 0000000..1dc244d
--- /dev/null
+++ b/doc/artifacts/versions/guilin.json
@@ -0,0 +1,1694 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "a1policymanagement",
+ "component": "a1policymanagement",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-oran-a1policymanagementservice",
+ "version": "1.0.1"
+ },
+ {
+ "container": "aaf-cass",
+ "component": "aaf-cass",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_cass",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-cm",
+ "component": "aaf-cm",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-fs",
+ "component": "aaf-fs",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-gui",
+ "component": "aaf-gui",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-locate",
+ "component": "aaf-locate",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-oauth",
+ "component": "aaf-oauth",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-service",
+ "component": "aaf-service",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-sms",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-vault",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/vault",
+ "version": "1.3.3"
+ },
+ {
+ "container": "aaf-sms-vault-backend",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.7.1"
+ },
+ {
+ "container": "aaf-sshsm-distcenter",
+ "component": "aaf-sshsm-distcenter",
+ "image": "nexus3.onap.org:10001/onap/aaf/distcenter",
+ "version": "4.0.0"
+ },
+ {
+ "container": "aaf-sshsm-testca",
+ "component": "aaf-sshsm-testca",
+ "image": "nexus3.onap.org:10001/onap/aaf/testcaservice",
+ "version": "4.0.0"
+ },
+ {
+ "container": "aai",
+ "component": "aai",
+ "image": "docker.nexus.azure.onap.eu/aaionap/haproxy",
+ "version": "1.4.2"
+ },
+ {
+ "container": "aai-babel",
+ "component": "aai-babel",
+ "image": "nexus3.onap.org:10001/onap/babel",
+ "version": "1.7.1"
+ },
+ {
+ "container": "aai-data-router",
+ "component": "aai-data-router",
+ "image": "nexus3.onap.org:10001/onap/aai-data-router",
+ "version": "1.7.0"
+ },
+ {
+ "container": "aai-elasticsearch",
+ "component": "aai-elasticsearch",
+ "image": "nexus3.onap.org:10001/onap/aai-elasticsearch",
+ "version": "6.1.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-babel",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin",
+ "component": "aai-graphadmin",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.7.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-graphadmin",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin-job",
+ "component": "aai-graphadmin-job",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.7.1"
+ },
+ {
+ "container": "aai-modelloader",
+ "component": "aai-modelloader",
+ "image": "nexus3.onap.org:10001/onap/model-loader",
+ "version": "1.7.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-resources",
+ "component": "aai-resources",
+ "image": "nexus3.onap.org:10001/onap/aai-resources",
+ "version": "1.7.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-schema-service",
+ "component": "aai-schema-service",
+ "image": "nexus3.onap.org:10001/onap/aai-schema-service",
+ "version": "1.7.14"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-schema-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-sparky-be",
+ "component": "aai-sparky-be",
+ "image": "nexus3.onap.org:10001/onap/sparky-be",
+ "version": "1.6.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-sparky-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal",
+ "component": "aai-traversal",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.7.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-traversal",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal-job",
+ "component": "aai-traversal-job",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.7.2"
+ },
+ {
+ "container": "appc",
+ "component": "appc",
+ "image": "nexus3.onap.org:10001/onap/appc-image",
+ "version": "1.7.2"
+ },
+ {
+ "container": "appc-ansible-server",
+ "component": "appc-ansible-server",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-ansible-server-image",
+ "version": "0.4.4"
+ },
+ {
+ "container": "appc-cdt",
+ "component": "appc-cdt",
+ "image": "nexus3.onap.org:10001/onap/appc-cdt-image",
+ "version": "1.7.2"
+ },
+ {
+ "container": "appc-db",
+ "component": "appc-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "appc-dgbuilder",
+ "component": "appc-dgbuilder",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-dgbuilder-image",
+ "version": "1.1.1"
+ },
+ {
+ "container": "awx-web",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_web",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-celery",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-rabbit",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_rabbitmq",
+ "version": "3.7.4"
+ },
+ {
+ "container": "awx-memcached",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/memcached",
+ "version": "1.5.20"
+ },
+ {
+ "container": "awx-postgres",
+ "component": "awx-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "awx-mgnt",
+ "component": "awx-mgnt",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cds-blueprints-processor",
+ "component": "cds-blueprints-processor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-blueprintsprocessor",
+ "version": "1.0.3"
+ },
+ {
+ "container": "cds-command-executor",
+ "component": "cds-command-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-commandexecutor",
+ "version": "1.0.3"
+ },
+ {
+ "container": "cds-db",
+ "component": "cds-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "cds-py-executor",
+ "component": "cds-py-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-py-executor",
+ "version": "1.0.3"
+ },
+ {
+ "container": "cds-sdc-listener",
+ "component": "cds-sdc-listener",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-sdclistener",
+ "version": "1.0.3"
+ },
+ {
+ "container": "cds-ui",
+ "component": "cds-ui",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-cds-ui-server",
+ "version": "1.0.3"
+ },
+ {
+ "container": "clamp",
+ "component": "clamp",
+ "image": "nexus3.onap.org:10001/onap/clamp",
+ "version": "5.1.4"
+ },
+ {
+ "container": "clamp-backend",
+ "component": "clamp-backend",
+ "image": "nexus3.onap.org:10001/onap/clamp-backend",
+ "version": "5.1.5"
+ },
+ {
+ "container": "clamp-dash-es",
+ "component": "clamp-dash-es",
+ "image": "nexus3.onap.org:10001/onap/clamp-dash-es",
+ "version": "5.0.4"
+ },
+ {
+ "container": "clamp-dash-kibana",
+ "component": "clamp-dash-kibana",
+ "image": "nexus3.onap.org:10001/onap/clamp-dash-kibana",
+ "version": "5.0.4"
+ },
+ {
+ "container": "clamp-dash-logstash",
+ "component": "clamp-dash-logstash",
+ "image": "nexus3.onap.org:10001/onap/clamp-dash-logstash",
+ "version": "5.0.4"
+ },
+ {
+ "container": "clamp-mariadb",
+ "component": "clamp-mariadb",
+ "image": "nexus3.onap.org:10001/onap/clamp-mariadb",
+ "version": "10.5.4"
+ },
+ {
+ "container": "cli",
+ "component": "cli",
+ "image": "nexus3.onap.org:10001/onap/cli",
+ "version": "6.0.0"
+ },
+ {
+ "container": "cmso-db",
+ "component": "cmso-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "cmso-db-config",
+ "component": "cmso-db-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "consul",
+ "component": "consul",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-bootstrap",
+ "component": "dcae-bootstrap",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container",
+ "version": "2.1.8"
+ },
+ {
+ "container": "dcae-cloudify-manager",
+ "component": "dcae-cloudify-manager",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.cm-container",
+ "version": "3.3.4"
+ },
+ {
+ "container": "dcae-config-binding-service",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.3"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-config-binding-service-insecure",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.3"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap-i",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard",
+ "component": "dcae-dashboard",
+ "image": "nexus3.onap.org:10001/onap/org.onap.ccsdk.dashboard.ccsdk-app-os",
+ "version": "1.4.0"
+ },
+ {
+ "container": "dcae-dashboard-filebeat",
+ "component": "dcae-dashboard",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-deployment-handler",
+ "component": "dcae-deployment-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.deployment-handler",
+ "version": "4.4.1"
+ },
+ {
+ "container": "dcae-deployment-handler-filebeat",
+ "component": "dcae-deployment-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-healthcheck",
+ "component": "dcae-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.1.0"
+ },
+ {
+ "container": "dcae-hv-ves-collector",
+ "component": "dcae-hv-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main",
+ "version": "1.5.0"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-hv-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcae-inventory-api",
+ "component": "dcae-inventory-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.inventory-api",
+ "version": "3.5.2"
+ },
+ {
+ "container": "dcae-inventory-api-filebeat",
+ "component": "dcae-inventory-api",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-mongo",
+ "component": "dcae-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "dcae-policy-handler",
+ "component": "dcae-policy-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.policy-handler",
+ "version": "5.1.0"
+ },
+ {
+ "container": "dcae-policy-handler-filebeat",
+ "component": "dcae-policy-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-prh",
+ "component": "dcae-prh",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server",
+ "version": "1.5.4"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-prh",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-servicechange-handler",
+ "component": "dcae-servicechange-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.servicechange-handler",
+ "version": "1.4.0"
+ },
+ {
+ "container": "dcae-tcagen2",
+ "component": "dcae-tcagen2",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.analytics.tca-gen2.dcae-analytics-tca-web",
+ "version": "1.2.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-tcagen2",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-ves-collector",
+ "component": "dcae-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.ves.vescollector",
+ "version": "1.7.9"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ },
+ {
+ "container": "dcaemod-designtool",
+ "component": "dcaemod-designtool",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.designtool-web",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-distributor-api",
+ "component": "dcaemod-distributor-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.distributorapi",
+ "version": "1.1.0"
+ },
+ {
+ "container": "dcaemod-genprocessor",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-job",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-genprocessor-http",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-http",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-healthcheck",
+ "component": "dcaemod-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.0.0"
+ },
+ {
+ "container": "dcaemod-nifi-registry",
+ "component": "dcaemod-nifi-registry",
+ "image": "docker.nexus.azure.onap.eu/apache/nifi-registry",
+ "version": "0.5.0"
+ },
+ {
+ "container": "dcaemod-onboarding-api",
+ "component": "dcaemod-onboarding-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.onboardingapi",
+ "version": "2.12.3"
+ },
+ {
+ "container": "dcaemod-runtime-api",
+ "component": "dcaemod-runtime-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.runtime-web",
+ "version": "1.1.1"
+ },
+ {
+ "container": "dmaap-bc",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-bc",
+ "version": "2.0.4"
+ },
+ {
+ "container": "dmaap-provisioning-job",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dbc-client",
+ "version": "1.0.9"
+ },
+ {
+ "container": "dmaap-dr-db",
+ "component": "dmaap-dr-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "dmaap-dr-node",
+ "component": "dmaap-dr-node",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-node",
+ "version": "2.1.7"
+ },
+ {
+ "container": "dmaap-dr-node-filebeat-onap",
+ "component": "dmaap-dr-node",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dmaap-dr-prov",
+ "component": "dmaap-dr-prov",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-prov",
+ "version": "2.1.7"
+ },
+ {
+ "container": "dmaap-dr-prov-filebeat-onap",
+ "component": "dmaap-dr-prov",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "ejbca-ejbca",
+ "component": "ejbca",
+ "image": "docker.nexus.azure.onap.eu/primekey/ejbca-ce",
+ "version": "6.15.2.5"
+ },
+ {
+ "container": "ejbca-config",
+ "component": "ejbca-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "esr-gui",
+ "component": "esr-gui",
+ "image": "nexus3.onap.org:10001/onap/aai/esr-gui",
+ "version": "1.4.0"
+ },
+ {
+ "container": "esr-server",
+ "component": "esr-server",
+ "image": "nexus3.onap.org:10001/onap/aai/esr-server",
+ "version": "1.5.2"
+ },
+ {
+ "container": "esr-server-filebeat-onap",
+ "component": "esr-server",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "holmes-engine-mgmt",
+ "component": "holmes-engine-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/engine-management",
+ "version": "1.2.9"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-rule-mgmt",
+ "component": "holmes-rule-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/rule-management",
+ "version": "1.2.6"
+ },
+ {
+ "container": "kube2msb",
+ "component": "kube2msb",
+ "image": "nexus3.onap.org:10001/onap/oom/kube2msb",
+ "version": "1.2.6"
+ },
+ {
+ "container": "mariadb-galera",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "message-router",
+ "component": "message-router",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-mr",
+ "version": "1.1.18"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.0.5"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.0.5"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.0.4"
+ },
+ {
+ "container": "message-router-zookeeper",
+ "component": "message-router-zookeeper",
+ "image": "nexus3.onap.org:10001/onap/dmaap/zookeeper",
+ "version": "6.0.3"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "nexus3.onap.org:10001/onap/modeling/etsicatalog",
+ "version": "10.1.38"
+ },
+ {
+ "container": "modeling-etsicatalog-filebeat-onap",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "1.0.9"
+ },
+ {
+ "container": "msb-consul",
+ "component": "msb-consul",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.4.3"
+ },
+ {
+ "container": "msb-discovery",
+ "component": "msb-discovery",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_discovery",
+ "version": "1.2.6"
+ },
+ {
+ "container": "msb-discovery-filebeat-onap",
+ "component": "msb-discovery",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-eag",
+ "component": "msb-eag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.2.7"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-eag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-iag",
+ "component": "msb-iag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.2.7"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-iag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud",
+ "component": "multicloud",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework",
+ "version": "1.6.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-fcaps",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-fcaps",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-fcaps",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "rabbit-mq",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/rabbitmq",
+ "version": "alpine"
+ },
+ {
+ "container": "multicloud-k8s",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/k8s",
+ "version": "0.7.0"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "onap-multicloud-k8s-etcd",
+ "component": "multicloud-k8s-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "multicloud-k8s-mongo",
+ "component": "multicloud-k8s-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "multicloud-pike",
+ "component": "multicloud-pike",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-pike",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-pike",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-starlingx",
+ "component": "multicloud-starlingx",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-starlingx",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-starlingx",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-starlingx",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "multicloud-vio",
+ "component": "multicloud-vio",
+ "image": "nexus3.onap.org:10001/onap/multicloud/vio",
+ "version": "1.4.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-vio",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-windriver",
+ "component": "multicloud-windriver",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-windriver",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-windriver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-windriver",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "music-springboot",
+ "component": "music",
+ "image": "nexus3.onap.org:10001/onap/music/music_sb",
+ "version": "3.2.40"
+ },
+ {
+ "container": "music-cassandra",
+ "component": "music-cassandra",
+ "image": "nexus3.onap.org:10001/onap/music/cassandra_3_11",
+ "version": "3.0.24"
+ },
+ {
+ "container": "music-cassandra-update-job",
+ "component": "music-cassandra-job",
+ "image": "nexus3.onap.org:10001/onap/music/cassandra_job",
+ "version": "3.0.24"
+ },
+ {
+ "container": "nbi",
+ "component": "nbi",
+ "image": "nexus3.onap.org:10001/onap/externalapi/nbi",
+ "version": "7.0.2"
+ },
+ {
+ "container": "nbi-config",
+ "component": "nbi-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "nbi-mongo",
+ "component": "nbi-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "nengdb-init",
+ "component": "nengdb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "netbox-app",
+ "component": "netbox-app",
+ "image": "docker.nexus.azure.onap.eu/netboxcommunity/netbox",
+ "version": "v2.5.8"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-nginx",
+ "component": "netbox-nginx",
+ "image": "docker.nexus.azure.onap.eu/nginx",
+ "version": "1.15-alpine"
+ },
+ {
+ "container": "netbox-postgres",
+ "component": "netbox-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "network-name-gen",
+ "component": "network-name-gen",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-apps-ms-neng",
+ "version": "1.1.1"
+ },
+ {
+ "container": "oof",
+ "component": "oof",
+ "image": "nexus3.onap.org:10001/onap/optf-osdf",
+ "version": "3.0.2"
+ },
+ {
+ "container": "oof-cmso-optimizer",
+ "component": "oof-cmso-optimizer",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-optimizer",
+ "version": "2.3.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "oof-cmso-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "mso-simulator",
+ "component": "oof-cmso-service",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-robot",
+ "version": "2.3.0"
+ },
+ {
+ "container": "oof-cmso-service",
+ "component": "oof-cmso-service",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-service",
+ "version": "2.3.0"
+ },
+ {
+ "container": "oof-cmso-ticketmgt",
+ "component": "oof-cmso-ticketmgt",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-ticketmgt",
+ "version": "2.3.0"
+ },
+ {
+ "container": "oof-cmso-topology",
+ "component": "oof-cmso-topology",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-topology",
+ "version": "2.3.0"
+ },
+ {
+ "container": "oof-has-api",
+ "component": "oof-has-api",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "oof-has-controller",
+ "component": "oof-has-controller",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-data",
+ "component": "oof-has-data",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-healthcheck",
+ "component": "oof-has",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-onboard",
+ "component": "oof-has",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-reservation",
+ "component": "oof-has-reservation",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oof-has-solver",
+ "component": "oof-has-solver",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.2"
+ },
+ {
+ "container": "oom-cert-service",
+ "component": "oom-cert-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-api",
+ "version": "2.1.1"
+ },
+ {
+ "container": "policy-apex-pdp",
+ "component": "policy-apex-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-apex-pdp",
+ "version": "2.4.4"
+ },
+ {
+ "container": "policy-api",
+ "component": "policy-api",
+ "image": "nexus3.onap.org:10001/onap/policy-api",
+ "version": "2.3.3"
+ },
+ {
+ "container": "policy-distribution",
+ "component": "policy-distribution",
+ "image": "nexus3.onap.org:10001/onap/policy-distribution",
+ "version": "2.4.3"
+ },
+ {
+ "container": "policy-drools-pdp",
+ "component": "policy-drools-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-pdpd-cl",
+ "version": "1.7.5"
+ },
+ {
+ "container": "policy-mariadb",
+ "component": "policy-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "policy-pap",
+ "component": "policy-pap",
+ "image": "nexus3.onap.org:10001/onap/policy-pap",
+ "version": "2.3.3"
+ },
+ {
+ "container": "policy-xacml-pdp",
+ "component": "policy-xacml-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-xacml-pdp",
+ "version": "2.3.3"
+ },
+ {
+ "container": "portal-app",
+ "component": "portal-app",
+ "image": "nexus3.onap.org:10001/onap/portal-app",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-app",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-cassandra",
+ "component": "portal-cassandra",
+ "image": "docker.nexus.azure.onap.eu/bitnami/cassandra",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-db",
+ "component": "portal-db",
+ "image": "nexus3.onap.org:10001/onap/portal-db",
+ "version": "3.4.1"
+ },
+ {
+ "container": "portal-db-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-db-oom-update-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-sdk",
+ "component": "portal-sdk",
+ "image": "nexus3.onap.org:10001/onap/portal-sdk",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-sdk",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-widget",
+ "component": "portal-widget",
+ "image": "nexus3.onap.org:10001/onap/portal-wms",
+ "version": "3.4.2"
+ },
+ {
+ "container": "robot",
+ "component": "robot",
+ "image": "nexus3.onap.org:10001/onap/testsuite",
+ "version": "1.7.2"
+ },
+ {
+ "container": "sdc-be",
+ "component": "sdc-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-all-plugins",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-be-filebeat-onap",
+ "component": "sdc-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-be-job",
+ "component": "sdc-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-init",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-cs-job",
+ "component": "sdc-cs-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-cassandra-init",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-fe",
+ "component": "sdc-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-frontend",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-fe-filebeat-onap",
+ "component": "sdc-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-onboarding-be-job",
+ "component": "sdc-onboarding-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-cassandra-init",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-onboarding-be",
+ "component": "sdc-onboarding-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-backend",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-onboarding-be-filebeat-onap",
+ "component": "sdc-onboarding-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-wfd-be",
+ "component": "sdc-wfd-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-backend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-be-job",
+ "component": "sdc-wfd-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-init",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe",
+ "component": "sdc-wfd-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-frontend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe-filebeat-onap",
+ "component": "sdc-wfd-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "sdnc",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc-ansible-server",
+ "component": "sdnc-ansible-server",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ansible-server-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc-job",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "sdnc-dgbuilder",
+ "component": "sdnc-dgbuilder",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-dgbuilder-image",
+ "version": "1.0.2"
+ },
+ {
+ "container": "sdnc-dmaap-listener",
+ "component": "sdnc-dmaap-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-dmaap-listener-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "sdnc-sdnrdb-init-job",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "sdnc-ueb-listener",
+ "component": "sdnc-ueb-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ueb-listener-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc-web",
+ "component": "sdnc-web",
+ "image": "nexus3.onap.org:10001/onap/sdnc-web-image",
+ "version": "2.0.5"
+ },
+ {
+ "container": "sdnrdb-nginx",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.16-debian-9"
+ },
+ {
+ "container": "sdnrdb-elasticsearch",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.6.1"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.6.1"
+ },
+ {
+ "container": "so",
+ "component": "so",
+ "image": "nexus3.onap.org:10001/onap/so/api-handler-infra",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-filebeat-onap",
+ "component": "so",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-admin-cockpit",
+ "component": "so-admin-cockpit",
+ "image": "nexus3.onap.org:10001/onap/so/so-admin-cockpit",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-bpmn-infra",
+ "component": "so-bpmn-infra",
+ "image": "nexus3.onap.org:10001/onap/so/bpmn-infra",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-bpmn-infra-filebeat-onap",
+ "component": "so-bpmn-infra",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-catalog-db-adapter",
+ "component": "so-catalog-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/catalog-db-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-cnf-adapter",
+ "component": "so-cnf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-cnf-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-etsi-nfvo-ns-lcm",
+ "component": "so-etsi-nfvo-ns-lcm",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-nfvo-ns-lcm",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-etsi-sol003-adapter",
+ "component": "so-etsi-sol003-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol003-adapter",
+ "version": "1.7.112"
+ },
+ {
+ "container": "so-etsi-sol005-adapter",
+ "component": "so-etsi-sol005-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol005-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-mariadb-config",
+ "component": "so-mariadb-job",
+ "image": "nexus3.onap.org:10001/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "so-nssmf-adapter",
+ "component": "so-nssmf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-nssmf-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-oof-adapter",
+ "component": "so-oof-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-oof-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-oof-adapter-filebeat-onap",
+ "component": "so-oof-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-openstack-adapter",
+ "component": "so-openstack-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/openstack-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-openstack-adapter-filebeat-onap",
+ "component": "so-openstack-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-request-db-adapter",
+ "component": "so-request-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/request-db-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-sdc-controller",
+ "component": "so-sdc-controller",
+ "image": "nexus3.onap.org:10001/onap/so/sdc-controller",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-sdc-controller-filebeat-onap",
+ "component": "so-sdc-controller",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-sdnc-adapter",
+ "component": "so-sdnc-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/sdnc-adapter",
+ "version": "1.7.11"
+ },
+ {
+ "container": "so-sdnc-adapter-filebeat-onap",
+ "component": "so-sdnc-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "uui",
+ "component": "uui",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui",
+ "version": "3.0.6"
+ },
+ {
+ "container": "uui-server",
+ "component": "uui-server",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui-server",
+ "version": "3.0.6"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/gvnfmdriver",
+ "version": "1.4.0"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver-filebeat-onap",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/nfvo/svnfm/huawei",
+ "version": "1.3.8"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver-filebeat-onap",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-mariadb",
+ "component": "vfc-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "v002"
+ },
+ {
+ "container": "vfc-mariadb-metrics",
+ "component": "vfc-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "vfc-nslcm-filebeat-onap",
+ "component": "vfc-nslcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-redis",
+ "component": "vfc-redis",
+ "image": "nexus3.onap.org:10001/onap/vfc/db",
+ "version": "1.3.4"
+ },
+ {
+ "container": "vfc-vnflcm",
+ "component": "vfc-vnflcm",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnflcm",
+ "version": "1.4.0"
+ },
+ {
+ "container": "vfc-vnflcm-filebeat-onap",
+ "component": "vfc-vnflcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfmgr",
+ "component": "vfc-vnfmgr",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfmgr",
+ "version": "1.3.9"
+ },
+ {
+ "container": "vfc-vnfmgr-filebeat-onap",
+ "component": "vfc-vnfmgr",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfres",
+ "component": "vfc-vnfres",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfres",
+ "version": "1.3.8"
+ },
+ {
+ "container": "vfc-vnfres-filebeat-onap",
+ "component": "vfc-vnfres",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/ztevnfmdriver",
+ "version": "1.3.8"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver-filebeat-onap",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid",
+ "component": "vid",
+ "image": "nexus3.onap.org:10001/onap/vid",
+ "version": "7.0.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "vid",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid-mariadb-init",
+ "component": "vid-mariadb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "v002"
+ },
+ {
+ "container": "vnfsdk",
+ "component": "vnfsdk",
+ "image": "nexus3.onap.org:10001/onap/vnfsdk/refrepo",
+ "version": "1.6.2"
+ },
+ {
+ "container": "vnfsdk-job",
+ "component": "vnfsdk-job",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.3-1.8.2"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.3-1.8.2"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos7-10.11-4.2.1"
+ }
+]
diff --git a/doc/artifacts/versions/honolulu.json b/doc/artifacts/versions/honolulu.json
new file mode 100644
index 0000000..a694b7f
--- /dev/null
+++ b/doc/artifacts/versions/honolulu.json
@@ -0,0 +1,1736 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "a1policymanagement",
+ "component": "a1policymanagement",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-oran-a1policymanagementservice",
+ "version": "1.1.1"
+ },
+ {
+ "container": "aaf-cass",
+ "component": "aaf-cass",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_cass",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-cm",
+ "component": "aaf-cm",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-fs",
+ "component": "aaf-fs",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-gui",
+ "component": "aaf-gui",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-locate",
+ "component": "aaf-locate",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-oauth",
+ "component": "aaf-oauth",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-service",
+ "component": "aaf-service",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-sms",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-vault",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/vault",
+ "version": "1.3.3"
+ },
+ {
+ "container": "aaf-sms-vault-backend",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.7.1"
+ },
+ {
+ "container": "aaf-sshsm-distcenter",
+ "component": "aaf-sshsm-distcenter",
+ "image": "nexus3.onap.org:10001/onap/aaf/distcenter",
+ "version": "4.0.0"
+ },
+ {
+ "container": "aaf-sshsm-testca",
+ "component": "aaf-sshsm-testca",
+ "image": "nexus3.onap.org:10001/onap/aaf/testcaservice",
+ "version": "4.0.0"
+ },
+ {
+ "container": "aai",
+ "component": "aai",
+ "image": "docker.nexus.azure.onap.eu/aaionap/haproxy",
+ "version": "1.4.2"
+ },
+ {
+ "container": "aai-babel",
+ "component": "aai-babel",
+ "image": "nexus3.onap.org:10001/onap/babel",
+ "version": "1.8.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-babel",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin",
+ "component": "aai-graphadmin",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.8.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-graphadmin",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin-job",
+ "component": "aai-graphadmin-job",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.8.0"
+ },
+ {
+ "container": "aai-graphadmin-job",
+ "component": "aai-graphadmin-job",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.8.0"
+ },
+ {
+ "container": "aai-modelloader",
+ "component": "aai-modelloader",
+ "image": "nexus3.onap.org:10001/onap/model-loader",
+ "version": "1.8.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-resources",
+ "component": "aai-resources",
+ "image": "nexus3.onap.org:10001/onap/aai-resources",
+ "version": "1.8.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-schema-service",
+ "component": "aai-schema-service",
+ "image": "nexus3.onap.org:10001/onap/aai-schema-service",
+ "version": "1.8.6"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-schema-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-sparky-be",
+ "component": "aai-sparky-be",
+ "image": "nexus3.onap.org:10001/onap/sparky-be",
+ "version": "2.0.3"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-sparky-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal",
+ "component": "aai-traversal",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.8.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-traversal",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal-job",
+ "component": "aai-traversal-job",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.8.0"
+ },
+ {
+ "container": "appc",
+ "component": "appc",
+ "image": "nexus3.onap.org:10001/onap/appc-image",
+ "version": "1.7.2"
+ },
+ {
+ "container": "appc-ansible-server",
+ "component": "appc-ansible-server",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-ansible-server-image",
+ "version": "0.4.4"
+ },
+ {
+ "container": "appc-cdt",
+ "component": "appc-cdt",
+ "image": "nexus3.onap.org:10001/onap/appc-cdt-image",
+ "version": "1.7.2"
+ },
+ {
+ "container": "appc-db",
+ "component": "appc-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "appc-db-metrics",
+ "component": "appc-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "appc-dgbuilder",
+ "component": "appc-dgbuilder",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-dgbuilder-image",
+ "version": "1.1.1"
+ },
+ {
+ "container": "awx-web",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_web",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-celery",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-rabbit",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_rabbitmq",
+ "version": "3.7.4"
+ },
+ {
+ "container": "awx-memcached",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/memcached",
+ "version": "1.5.20"
+ },
+ {
+ "container": "awx-postgres",
+ "component": "awx-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "awx-mgnt",
+ "component": "awx-mgnt",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cds-blueprints-processor",
+ "component": "cds-blueprints-processor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-blueprintsprocessor",
+ "version": "1.1.2"
+ },
+ {
+ "container": "cds-command-executor",
+ "component": "cds-command-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-commandexecutor",
+ "version": "1.1.2"
+ },
+ {
+ "container": "cds-db",
+ "component": "cds-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "cds-db-metrics",
+ "component": "cds-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "cds-py-executor",
+ "component": "cds-py-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-py-executor",
+ "version": "1.1.2"
+ },
+ {
+ "container": "cds-sdc-listener",
+ "component": "cds-sdc-listener",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-sdclistener",
+ "version": "1.1.2"
+ },
+ {
+ "container": "cds-ui",
+ "component": "cds-ui",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-cds-ui-server",
+ "version": "1.1.2"
+ },
+ {
+ "container": "cli",
+ "component": "cli",
+ "image": "nexus3.onap.org:10001/onap/cli",
+ "version": "6.0.1"
+ },
+ {
+ "container": "cmso-db",
+ "component": "cmso-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "cmso-db-metrics",
+ "component": "cmso-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "cmso-db-config",
+ "component": "cmso-db-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "consul",
+ "component": "consul",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "cps",
+ "component": "cps",
+ "image": "nexus3.onap.org:10001/onap/cps-and-nf-proxy",
+ "version": "1.0.1"
+ },
+ {
+ "container": "cps-postgres",
+ "component": "cps-postgres-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "cps-postgres",
+ "component": "cps-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-bootstrap",
+ "component": "dcae-bootstrap",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container",
+ "version": "3.0.4"
+ },
+ {
+ "container": "dcae-cloudify-manager",
+ "component": "dcae-cloudify-manager",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.cm-container",
+ "version": "4.4.2"
+ },
+ {
+ "container": "dcae-config-binding-service",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.3"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-config-binding-service-insecure",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.3"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap-i",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard",
+ "component": "dcae-dashboard",
+ "image": "nexus3.onap.org:10001/onap/org.onap.ccsdk.dashboard.ccsdk-app-os",
+ "version": "1.4.0"
+ },
+ {
+ "container": "dcae-dashboard-filebeat",
+ "component": "dcae-dashboard",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-deployment-handler",
+ "component": "dcae-deployment-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.deployment-handler",
+ "version": "4.4.1"
+ },
+ {
+ "container": "dcae-deployment-handler-filebeat",
+ "component": "dcae-deployment-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-healthcheck",
+ "component": "dcae-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.1.0"
+ },
+ {
+ "container": "dcae-hv-ves-collector",
+ "component": "dcae-hv-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main",
+ "version": "1.6.0"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-hv-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-inventory-api",
+ "component": "dcae-inventory-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.inventory-api",
+ "version": "3.5.2"
+ },
+ {
+ "container": "dcae-inventory-api-filebeat",
+ "component": "dcae-inventory-api",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-mongo",
+ "component": "dcae-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "dcae-ms-healthcheck",
+ "component": "dcae-ms-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.1.0"
+ },
+ {
+ "container": "dcae-policy-handler",
+ "component": "dcae-policy-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.policy-handler",
+ "version": "5.1.2"
+ },
+ {
+ "container": "dcae-policy-handler-filebeat",
+ "component": "dcae-policy-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-prh",
+ "component": "dcae-prh",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server",
+ "version": "1.5.6"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-prh",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-servicechange-handler",
+ "component": "dcae-servicechange-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.servicechange-handler",
+ "version": "1.4.0"
+ },
+ {
+ "container": "dcae-tcagen2",
+ "component": "dcae-tcagen2",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.analytics.tca-gen2.dcae-analytics-tca-web",
+ "version": "1.2.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-tcagen2",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-ves-collector",
+ "component": "dcae-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.ves.vescollector",
+ "version": "1.8.0"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-ves-openapi-manager",
+ "component": "dcae-ves-openapi-manager",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.ves-openapi-manager",
+ "version": "1.0.1"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcaemod-designtool",
+ "component": "dcaemod-designtool",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.designtool-web",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-distributor-api",
+ "component": "dcaemod-distributor-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.distributorapi",
+ "version": "1.1.0"
+ },
+ {
+ "container": "dcaemod-genprocessor",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-job",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-genprocessor-http",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-http",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-healthcheck",
+ "component": "dcaemod-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.1.0"
+ },
+ {
+ "container": "dcaemod-nifi-registry",
+ "component": "dcaemod-nifi-registry",
+ "image": "docker.nexus.azure.onap.eu/apache/nifi-registry",
+ "version": "0.5.0"
+ },
+ {
+ "container": "dcaemod-onboarding-api",
+ "component": "dcaemod-onboarding-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.onboardingapi",
+ "version": "2.12.5"
+ },
+ {
+ "container": "dcaemod-runtime-api",
+ "component": "dcaemod-runtime-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.runtime-web",
+ "version": "1.2.3"
+ },
+ {
+ "container": "dmaap-bc",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-bc",
+ "version": "2.0.5"
+ },
+ {
+ "container": "dmaap-provisioning-job",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dbc-client",
+ "version": "1.0.9"
+ },
+ {
+ "container": "dmaap-dr-db",
+ "component": "dmaap-dr-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "dmaap-dr-db-metrics",
+ "component": "dmaap-dr-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "dmaap-dr-node",
+ "component": "dmaap-dr-node",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-node",
+ "version": "2.1.8"
+ },
+ {
+ "container": "dmaap-dr-node-filebeat-onap",
+ "component": "dmaap-dr-node",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dmaap-dr-prov",
+ "component": "dmaap-dr-prov",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-prov",
+ "version": "2.1.8"
+ },
+ {
+ "container": "dmaap-dr-prov-filebeat-onap",
+ "component": "dmaap-dr-prov",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "ejbca-ejbca",
+ "component": "ejbca",
+ "image": "docker.nexus.azure.onap.eu/primekey/ejbca-ce",
+ "version": "6.15.2.5"
+ },
+ {
+ "container": "ejbca-config",
+ "component": "ejbca-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "esr-gui",
+ "component": "esr-gui",
+ "image": "nexus3.onap.org:10001/onap/aai/esr-gui",
+ "version": "1.4.0"
+ },
+ {
+ "container": "esr-server",
+ "component": "esr-server",
+ "image": "nexus3.onap.org:10001/onap/aai/esr-server",
+ "version": "1.5.2"
+ },
+ {
+ "container": "esr-server-filebeat-onap",
+ "component": "esr-server",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "holmes-engine-mgmt",
+ "component": "holmes-engine-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/engine-management",
+ "version": "1.3.3"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-rule-mgmt",
+ "component": "holmes-rule-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/rule-management",
+ "version": "1.3.3"
+ },
+ {
+ "container": "kube2msb",
+ "component": "kube2msb",
+ "image": "nexus3.onap.org:10001/onap/oom/kube2msb",
+ "version": "1.2.6"
+ },
+ {
+ "container": "mariadb-galera",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "mariadb-galera-metrics",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "message-router",
+ "component": "message-router",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-mr",
+ "version": "1.1.20"
+ },
+ {
+ "container": "message-router-zookeeper",
+ "component": "message-router-zookeeper",
+ "image": "nexus3.onap.org:10001/onap/dmaap/zookeeper",
+ "version": "6.0.3"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "nexus3.onap.org:10001/onap/modeling/etsicatalog",
+ "version": "10.5.8"
+ },
+ {
+ "container": "modeling-etsicatalog-filebeat-onap",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "1.0.10"
+ },
+ {
+ "container": "msb-consul",
+ "component": "msb-consul",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.4.3"
+ },
+ {
+ "container": "msb-discovery",
+ "component": "msb-discovery",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_discovery",
+ "version": "1.2.6"
+ },
+ {
+ "container": "msb-discovery-filebeat-onap",
+ "component": "msb-discovery",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-eag",
+ "component": "msb-eag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.2.7"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-eag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-iag",
+ "component": "msb-iag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.2.7"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-iag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud",
+ "component": "multicloud",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework",
+ "version": "1.6.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-fcaps",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-fcaps",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-fcaps",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "rabbit-mq",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/rabbitmq",
+ "version": "alpine"
+ },
+ {
+ "container": "multicloud-k8s",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/k8s",
+ "version": "0.8.1"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "onap-multicloud-k8s-etcd",
+ "component": "multicloud-k8s-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "multicloud-k8s-mongo",
+ "component": "multicloud-k8s-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "multicloud-pike",
+ "component": "multicloud-pike",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-pike",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-pike",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-starlingx",
+ "component": "multicloud-starlingx",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-starlingx",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-starlingx",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-starlingx",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "multicloud-vio",
+ "component": "multicloud-vio",
+ "image": "nexus3.onap.org:10001/onap/multicloud/vio",
+ "version": "1.4.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-vio",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-windriver",
+ "component": "multicloud-windriver",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-windriver",
+ "version": "1.5.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-windriver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-windriver",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.6.0"
+ },
+ {
+ "container": "music-springboot",
+ "component": "music",
+ "image": "nexus3.onap.org:10001/onap/music/music_sb",
+ "version": "3.2.40"
+ },
+ {
+ "container": "music-cassandra",
+ "component": "music-cassandra",
+ "image": "nexus3.onap.org:10001/onap/music/cassandra_3_11",
+ "version": "3.0.24"
+ },
+ {
+ "container": "music-cassandra-update-job",
+ "component": "music-cassandra-job",
+ "image": "nexus3.onap.org:10001/onap/music/cassandra_job",
+ "version": "3.0.24"
+ },
+ {
+ "container": "nbi",
+ "component": "nbi",
+ "image": "nexus3.onap.org:10001/onap/externalapi/nbi",
+ "version": "8.0.1"
+ },
+ {
+ "container": "nbi-config",
+ "component": "nbi-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "nbi-mongo",
+ "component": "nbi-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "nengdb-init",
+ "component": "nengdb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "netbox-app",
+ "component": "netbox-app",
+ "image": "docker.nexus.azure.onap.eu/netboxcommunity/netbox",
+ "version": "v2.5.8"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-nginx",
+ "component": "netbox-nginx",
+ "image": "docker.nexus.azure.onap.eu/nginx",
+ "version": "1.15-alpine"
+ },
+ {
+ "container": "netbox-postgres",
+ "component": "netbox-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "network-name-gen",
+ "component": "network-name-gen",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-apps-ms-neng",
+ "version": "1.1.1"
+ },
+ {
+ "container": "oof",
+ "component": "oof",
+ "image": "nexus3.onap.org:10001/onap/optf-osdf",
+ "version": "3.0.4"
+ },
+ {
+ "container": "oof-cmso-optimizer",
+ "component": "oof-cmso-optimizer",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-optimizer",
+ "version": "2.3.3"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "oof-cmso-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "mso-simulator",
+ "component": "oof-cmso-service",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-robot",
+ "version": "2.3.3"
+ },
+ {
+ "container": "oof-cmso-service",
+ "component": "oof-cmso-service",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-service",
+ "version": "2.3.3"
+ },
+ {
+ "container": "oof-cmso-ticketmgt",
+ "component": "oof-cmso-ticketmgt",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-ticketmgt",
+ "version": "2.3.3"
+ },
+ {
+ "container": "oof-cmso-topology",
+ "component": "oof-cmso-topology",
+ "image": "nexus3.onap.org:10001/onap/optf-cmso-topology",
+ "version": "2.3.3"
+ },
+ {
+ "container": "oof-has-api",
+ "component": "oof-has-api",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "oof-has-controller",
+ "component": "oof-has-controller",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-data",
+ "component": "oof-has-data",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-healthcheck",
+ "component": "oof-has",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-onboard",
+ "component": "oof-has",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-reservation",
+ "component": "oof-has-reservation",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oof-has-solver",
+ "component": "oof-has-solver",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.1.5"
+ },
+ {
+ "container": "oom-cert-service",
+ "component": "oom-cert-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-api",
+ "version": "2.3.3"
+ },
+ {
+ "container": "policy-apex-pdp",
+ "component": "policy-apex-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-apex-pdp",
+ "version": "2.5.2"
+ },
+ {
+ "container": "policy-api",
+ "component": "policy-api",
+ "image": "nexus3.onap.org:10001/onap/policy-api",
+ "version": "2.4.2"
+ },
+ {
+ "container": "policy-clamp-be",
+ "component": "policy-clamp-be",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-backend",
+ "version": "6.0.2"
+ },
+ {
+ "container": "policy-clamp-fe",
+ "component": "policy-clamp-fe",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-frontend",
+ "version": "6.0.2"
+ },
+ {
+ "container": "onap-policy-clamp-galera-config",
+ "component": "policy-clamp-be-policy-clamp-job",
+ "image": "docker.nexus.azure.onap.eu/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "policy-distribution",
+ "component": "policy-distribution",
+ "image": "nexus3.onap.org:10001/onap/policy-distribution",
+ "version": "2.5.2"
+ },
+ {
+ "container": "policy-drools-pdp",
+ "component": "policy-drools-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-pdpd-cl",
+ "version": "1.8.2"
+ },
+ {
+ "container": "onap-policy-galera-config",
+ "component": "policy-job",
+ "image": "docker.nexus.azure.onap.eu/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "policy-mariadb",
+ "component": "policy-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "policy-mariadb-metrics",
+ "component": "policy-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "policy-pap",
+ "component": "policy-pap",
+ "image": "nexus3.onap.org:10001/onap/policy-pap",
+ "version": "2.4.2"
+ },
+ {
+ "container": "policy-xacml-pdp",
+ "component": "policy-xacml-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-xacml-pdp",
+ "version": "2.4.2"
+ },
+ {
+ "container": "portal-app",
+ "component": "portal-app",
+ "image": "nexus3.onap.org:10001/onap/portal-app",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-app",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-cassandra",
+ "component": "portal-cassandra",
+ "image": "docker.nexus.azure.onap.eu/bitnami/cassandra",
+ "version": "3.11.9-debian-10-r30"
+ },
+ {
+ "container": "portal-db",
+ "component": "portal-db",
+ "image": "nexus3.onap.org:10001/onap/portal-db",
+ "version": "3.4.1"
+ },
+ {
+ "container": "portal-db-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-db-oom-update-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-sdk",
+ "component": "portal-sdk",
+ "image": "nexus3.onap.org:10001/onap/portal-sdk",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-sdk",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-widget",
+ "component": "portal-widget",
+ "image": "nexus3.onap.org:10001/onap/portal-wms",
+ "version": "3.4.2"
+ },
+ {
+ "container": "robot",
+ "component": "robot",
+ "image": "nexus3.onap.org:10001/onap/testsuite",
+ "version": "1.7.3"
+ },
+ {
+ "container": "sdc-be",
+ "component": "sdc-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-all-plugins",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-be-filebeat-onap",
+ "component": "sdc-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-be-job",
+ "component": "sdc-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-init",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-cs-job",
+ "component": "sdc-cs-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-cassandra-init",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-fe",
+ "component": "sdc-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-frontend",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-fe-filebeat-onap",
+ "component": "sdc-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-onboarding-be-job",
+ "component": "sdc-onboarding-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-cassandra-init",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-onboarding-be",
+ "component": "sdc-onboarding-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-backend",
+ "version": "1.8.5"
+ },
+ {
+ "container": "sdc-onboarding-be-filebeat-onap",
+ "component": "sdc-onboarding-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-wfd-be",
+ "component": "sdc-wfd-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-backend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-be-job",
+ "component": "sdc-wfd-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-init",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe",
+ "component": "sdc-wfd-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-frontend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe-filebeat-onap",
+ "component": "sdc-wfd-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "sdnc",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc-ansible-server",
+ "component": "sdnc-ansible-server",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ansible-server-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc-job",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc-dgbuilder",
+ "component": "sdnc-dgbuilder",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-dgbuilder-image",
+ "version": "1.1.1"
+ },
+ {
+ "container": "sdnc-dmaap-listener",
+ "component": "sdnc-dmaap-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-dmaap-listener-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc-sdnrdb-init-job",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc-ueb-listener",
+ "component": "sdnc-ueb-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ueb-listener-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnc-web",
+ "component": "sdnc-web",
+ "image": "nexus3.onap.org:10001/onap/sdnc-web-image",
+ "version": "2.1.5"
+ },
+ {
+ "container": "sdnrdb-nginx",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "sdnrdb-elasticsearch",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "so",
+ "component": "so",
+ "image": "nexus3.onap.org:10001/onap/so/api-handler-infra",
+ "version": "1.8.1"
+ },
+ {
+ "container": "so-filebeat-onap",
+ "component": "so",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-admin-cockpit",
+ "component": "so-admin-cockpit",
+ "image": "nexus3.onap.org:10001/onap/so/so-admin-cockpit",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-bpmn-infra",
+ "component": "so-bpmn-infra",
+ "image": "nexus3.onap.org:10001/onap/so/bpmn-infra",
+ "version": "1.8.1"
+ },
+ {
+ "container": "so-bpmn-infra-filebeat-onap",
+ "component": "so-bpmn-infra",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-catalog-db-adapter",
+ "component": "so-catalog-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/catalog-db-adapter",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-cnf-adapter",
+ "component": "so-cnf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-cnf-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-etsi-nfvo-ns-lcm",
+ "component": "so-etsi-nfvo-ns-lcm",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-nfvo-ns-lcm",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-etsi-sol003-adapter",
+ "component": "so-etsi-sol003-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol003-adapter",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-etsi-sol005-adapter",
+ "component": "so-etsi-sol005-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol005-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-mariadb-config",
+ "component": "so-mariadb-job",
+ "image": "nexus3.onap.org:10001/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "so-nssmf-adapter",
+ "component": "so-nssmf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-nssmf-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-oof-adapter",
+ "component": "so-oof-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-oof-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-oof-adapter-filebeat-onap",
+ "component": "so-oof-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-openstack-adapter",
+ "component": "so-openstack-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/openstack-adapter",
+ "version": "1.8.1"
+ },
+ {
+ "container": "so-openstack-adapter-filebeat-onap",
+ "component": "so-openstack-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-request-db-adapter",
+ "component": "so-request-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/request-db-adapter",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-sdc-controller",
+ "component": "so-sdc-controller",
+ "image": "nexus3.onap.org:10001/onap/so/sdc-controller",
+ "version": "1.8.1"
+ },
+ {
+ "container": "so-sdc-controller-filebeat-onap",
+ "component": "so-sdc-controller",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-sdnc-adapter",
+ "component": "so-sdnc-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/sdnc-adapter",
+ "version": "1.8.1"
+ },
+ {
+ "container": "so-sdnc-adapter-filebeat-onap",
+ "component": "so-sdnc-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "uui",
+ "component": "uui",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui",
+ "version": "4.0.0"
+ },
+ {
+ "container": "uui-server",
+ "component": "uui-server",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui-server",
+ "version": "4.0.0"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/gvnfmdriver",
+ "version": "1.4.1"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver-filebeat-onap",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/nfvo/svnfm/huawei",
+ "version": "1.3.9"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver-filebeat-onap",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-mariadb",
+ "component": "vfc-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "vfc-mariadb-metrics",
+ "component": "vfc-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "vfc-nslcm",
+ "component": "vfc-nslcm",
+ "image": "nexus3.onap.org:10001/onap/vfc/nslcm",
+ "version": "1.4.3"
+ },
+ {
+ "container": "vfc-nslcm-filebeat-onap",
+ "component": "vfc-nslcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-redis",
+ "component": "vfc-redis",
+ "image": "nexus3.onap.org:10001/onap/vfc/db",
+ "version": "1.3.4"
+ },
+ {
+ "container": "vfc-vnflcm",
+ "component": "vfc-vnflcm",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnflcm",
+ "version": "1.4.1"
+ },
+ {
+ "container": "vfc-vnflcm-filebeat-onap",
+ "component": "vfc-vnflcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfmgr",
+ "component": "vfc-vnfmgr",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfmgr",
+ "version": "1.4.0"
+ },
+ {
+ "container": "vfc-vnfmgr-filebeat-onap",
+ "component": "vfc-vnfmgr",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfres",
+ "component": "vfc-vnfres",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfres",
+ "version": "1.3.9"
+ },
+ {
+ "container": "vfc-vnfres-filebeat-onap",
+ "component": "vfc-vnfres",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/ztevnfmdriver",
+ "version": "1.4.0"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver-filebeat-onap",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid",
+ "component": "vid",
+ "image": "nexus3.onap.org:10001/onap/vid",
+ "version": "8.0.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "vid",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid-mariadb-init",
+ "component": "vid-mariadb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "vnfsdk",
+ "component": "vnfsdk",
+ "image": "nexus3.onap.org:10001/onap/vnfsdk/refrepo",
+ "version": "1.6.3"
+ },
+ {
+ "container": "vnfsdk-job",
+ "component": "vnfsdk-job",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+]
diff --git a/doc/artifacts/versions/istanbul.json b/doc/artifacts/versions/istanbul.json
new file mode 100644
index 0000000..c184fbf
--- /dev/null
+++ b/doc/artifacts/versions/istanbul.json
@@ -0,0 +1,1694 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "a1policymanagement",
+ "component": "a1policymanagement",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-oran-a1policymanagementservice",
+ "version": "1.2.3"
+ },
+ {
+ "container": "aaf-cass",
+ "component": "aaf-cass",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_cass",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-cm",
+ "component": "aaf-cm",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-fs",
+ "component": "aaf-fs",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-gui",
+ "component": "aaf-gui",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-locate",
+ "component": "aaf-locate",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-oauth",
+ "component": "aaf-oauth",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-service",
+ "component": "aaf-service",
+ "image": "nexus3.onap.org:10001/onap/aaf/aaf_core",
+ "version": "2.1.23"
+ },
+ {
+ "container": "aaf-sms",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-vault",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/vault",
+ "version": "1.3.3"
+ },
+ {
+ "container": "aaf-sms-vault-backend",
+ "component": "aaf-sms-vault",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.7.1"
+ },
+ {
+ "container": "aai",
+ "component": "aai",
+ "image": "docker.nexus.azure.onap.eu/aaionap/haproxy",
+ "version": "1.4.2"
+ },
+ {
+ "container": "aai-babel",
+ "component": "aai-babel",
+ "image": "nexus3.onap.org:10001/onap/babel",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-babel",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin",
+ "component": "aai-graphadmin",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-graphadmin",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-graphadmin-job",
+ "component": "aai-graphadmin-job",
+ "image": "nexus3.onap.org:10001/onap/aai-graphadmin",
+ "version": "1.9.1"
+ },
+ {
+ "container": "aai-modelloader",
+ "component": "aai-modelloader",
+ "image": "nexus3.onap.org:10001/onap/model-loader",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-resources",
+ "component": "aai-resources",
+ "image": "nexus3.onap.org:10001/onap/aai-resources",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-schema-service",
+ "component": "aai-schema-service",
+ "image": "nexus3.onap.org:10001/onap/aai-schema-service",
+ "version": "1.9.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-schema-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-sparky-be",
+ "component": "aai-sparky-be",
+ "image": "nexus3.onap.org:10001/onap/sparky-be",
+ "version": "2.0.3"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-sparky-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal",
+ "component": "aai-traversal",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-traversal",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "aai-traversal-job",
+ "component": "aai-traversal-job",
+ "image": "nexus3.onap.org:10001/onap/aai-traversal",
+ "version": "1.9.1"
+ },
+ {
+ "container": "awx-web",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_web",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-celery",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-rabbit",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_rabbitmq",
+ "version": "3.7.4"
+ },
+ {
+ "container": "awx-memcached",
+ "component": "onap-awx",
+ "image": "docker.nexus.azure.onap.eu/memcached",
+ "version": "1.5.20"
+ },
+ {
+ "container": "awx-mgnt",
+ "component": "awx-mgnt",
+ "image": "docker.nexus.azure.onap.eu/ansible/awx_task",
+ "version": "9.0.1"
+ },
+ {
+ "container": "awx-postgres",
+ "component": "awx-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cassandra",
+ "component": "cassandra",
+ "image": "docker.nexus.azure.onap.eu/cassandra",
+ "version": "3.11.4"
+ },
+ {
+ "container": "cds-blueprints-processor",
+ "component": "cds-blueprints-processor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-blueprintsprocessor",
+ "version": "1.2.1"
+ },
+ {
+ "container": "cds-command-executor",
+ "component": "cds-command-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-commandexecutor",
+ "version": "1.2.1"
+ },
+ {
+ "container": "cds-db",
+ "component": "cds-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "cds-db-metrics",
+ "component": "cds-db",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "cds-py-executor",
+ "component": "cds-py-executor",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-py-executor",
+ "version": "1.2.1"
+ },
+ {
+ "container": "cds-sdc-listener",
+ "component": "cds-sdc-listener",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-sdclistener",
+ "version": "1.2.1"
+ },
+ {
+ "container": "cds-ui",
+ "component": "cds-ui",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-cds-ui-server",
+ "version": "1.2.1"
+ },
+ {
+ "container": "chartmuseum",
+ "component": "chartmuseum",
+ "image": "ghcr.io/helm/chartmuseum",
+ "version": "v0.13.1"
+ },
+ {
+ "container": "cli",
+ "component": "cli",
+ "image": "nexus3.onap.org:10001/onap/cli",
+ "version": "6.0.1"
+ },
+ {
+ "container": "provider",
+ "component": "onap-cmpv2-cert-provider-9c6b4f65f-2nbvq",
+ "image": "nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-k8s-external-provider",
+ "version": "2.4.0"
+ },
+ {
+ "container": "consul",
+ "component": "consul",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "consul-server",
+ "component": "consul-server",
+ "image": "nexus3.onap.org:10001/onap/oom/consul",
+ "version": "2.1.0"
+ },
+ {
+ "container": "cps-core",
+ "component": "cps-core",
+ "image": "nexus3.onap.org:10001/onap/cps-and-ncmp",
+ "version": "2.0.1"
+ },
+ {
+ "container": "cps-postgres-init-update-config",
+ "component": "cps-postgres-init",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "cps-postgres-init-update-config",
+ "component": "cps-postgres-init",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "cps-temporal",
+ "component": "cps-temporal",
+ "image": "nexus3.onap.org:10001/onap/cps-temporal",
+ "version": "1.0.0"
+ },
+ {
+ "container": "cps-temporal-db",
+ "component": "cps-temporal-db",
+ "image": "docker.nexus.azure.onap.eu/timescale/timescaledb",
+ "version": "2.1.1-pg13"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dbc-pg",
+ "component": "dbc-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-bootstrap",
+ "component": "dcae-bootstrap",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container",
+ "version": "3.3.5"
+ },
+ {
+ "container": "dcae-cloudify-manager",
+ "component": "dcae-cloudify-manager",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.cm-container",
+ "version": "4.6.1"
+ },
+ {
+ "container": "dcae-config-binding-service",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.4"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-config-binding-service-insecure",
+ "component": "dcae-config-binding-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding",
+ "version": "2.5.4"
+ },
+ {
+ "container": "dcae-config-binding-service-fb-onap-i",
+ "component": "dcae-config-binding-service",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard",
+ "component": "dcae-dashboard",
+ "image": "nexus3.onap.org:10001/onap/org.onap.ccsdk.dashboard.ccsdk-app-os",
+ "version": "1.4.4"
+ },
+ {
+ "container": "dcae-dashboard-filebeat",
+ "component": "dcae-dashboard",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-dashboard-pg",
+ "component": "dcae-dashboard-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-db",
+ "component": "dcae-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-deployment-handler",
+ "component": "dcae-deployment-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.deployment-handler",
+ "version": "4.4.1"
+ },
+ {
+ "container": "dcae-deployment-handler-filebeat",
+ "component": "dcae-deployment-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-healthcheck",
+ "component": "dcae-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.2.0"
+ },
+ {
+ "container": "dcae-hv-ves-collector",
+ "component": "dcae-hv-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main",
+ "version": "1.9.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-hv-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-inv-pg",
+ "component": "dcae-inv-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcae-inventory-api",
+ "component": "dcae-inventory-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.inventory-api",
+ "version": "3.5.2"
+ },
+ {
+ "container": "dcae-inventory-api-filebeat",
+ "component": "dcae-inventory-api",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-mongo",
+ "component": "dcae-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "dcae-ms-healthcheck",
+ "component": "dcae-ms-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.2.0"
+ },
+ {
+ "container": "dcae-policy-handler",
+ "component": "dcae-policy-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.policy-handler",
+ "version": "5.1.3"
+ },
+ {
+ "container": "dcae-policy-handler-filebeat",
+ "component": "dcae-policy-handler",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-prh",
+ "component": "dcae-prh",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server",
+ "version": "1.7.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-prh",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-servicechange-handler",
+ "component": "dcae-servicechange-handler",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.servicechange-handler",
+ "version": "1.4.0"
+ },
+ {
+ "container": "dcae-tcagen2",
+ "component": "dcae-tcagen2",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.analytics.tca-gen2.dcae-analytics-tca-web",
+ "version": "1.3.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-tcagen2",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-ves-collector",
+ "component": "dcae-ves-collector",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.ves.vescollector",
+ "version": "1.10.1"
+ },
+ {
+ "container": "filebeat",
+ "component": "dcae-ves-collector",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dcae-ves-openapi-manager",
+ "component": "dcae-ves-openapi-manager",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.ves-openapi-manager",
+ "version": "1.0.1"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcaemod-db",
+ "component": "dcaemod-db-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "dcaemod-designtool",
+ "component": "dcaemod-designtool",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.designtool-web",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-distributor-api",
+ "component": "dcaemod-distributor-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.distributorapi",
+ "version": "1.1.0"
+ },
+ {
+ "container": "dcaemod-genprocessor",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-job",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-genprocessor-http",
+ "component": "dcaemod-genprocessor",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.genprocessor-http",
+ "version": "1.0.2"
+ },
+ {
+ "container": "dcaemod-healthcheck",
+ "component": "dcaemod-healthcheck",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.healthcheck-container",
+ "version": "2.2.0"
+ },
+ {
+ "container": "dcaemod-nifi-registry",
+ "component": "dcaemod-nifi-registry",
+ "image": "docker.nexus.azure.onap.eu/apache/nifi-registry",
+ "version": "0.5.0"
+ },
+ {
+ "container": "dcaemod-onboarding-api",
+ "component": "dcaemod-onboarding-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.onboardingapi",
+ "version": "2.12.5"
+ },
+ {
+ "container": "dcaemod-runtime-api",
+ "component": "dcaemod-runtime-api",
+ "image": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.mod.runtime-web",
+ "version": "1.2.3"
+ },
+ {
+ "container": "dmaap-bc",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-bc",
+ "version": "2.0.8"
+ },
+ {
+ "container": "dmaap-provisioning-job",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dbc-client",
+ "version": "1.0.9"
+ },
+ {
+ "container": "dmaap-provisioning-job",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dbc-client",
+ "version": "1.0.9"
+ },
+ {
+ "container": "dmaap-provisioning-job",
+ "component": "dmaap-bc",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dbc-client",
+ "version": "1.0.9"
+ },
+ {
+ "container": "dmaap-dr-mariadb-init",
+ "component": "dmaap-dr-mariadb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "dmaap-dr-node",
+ "component": "dmaap-dr-node",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-node",
+ "version": "2.1.9"
+ },
+ {
+ "container": "dmaap-dr-node-filebeat-onap",
+ "component": "dmaap-dr-node",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "dmaap-dr-prov",
+ "component": "dmaap-dr-prov",
+ "image": "nexus3.onap.org:10001/onap/dmaap/datarouter-prov",
+ "version": "2.1.9"
+ },
+ {
+ "container": "dmaap-dr-prov-filebeat-onap",
+ "component": "dmaap-dr-prov",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "ejbca-ejbca",
+ "component": "ejbca",
+ "image": "docker.nexus.azure.onap.eu/primekey/ejbca-ce",
+ "version": "7.4.3.2"
+ },
+ {
+ "container": "ejbca-config",
+ "component": "ejbca-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "holmes-engine-mgmt",
+ "component": "holmes-engine-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/engine-management",
+ "version": "9.0.0"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-pg",
+ "component": "holmes-pg-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "holmes-rule-mgmt",
+ "component": "holmes-rule-mgmt",
+ "image": "nexus3.onap.org:10001/onap/holmes/rule-management",
+ "version": "9.0.1"
+ },
+ {
+ "container": "kube2msb",
+ "component": "kube2msb",
+ "image": "nexus3.onap.org:10001/onap/oom/kube2msb",
+ "version": "1.2.6"
+ },
+ {
+ "container": "mariadb-galera",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "mariadb-galera-metrics",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "mariadb-galera",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "mariadb-galera-metrics",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "mariadb-galera",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "mariadb-galera-metrics",
+ "component": "mariadb-galera",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "message-router",
+ "component": "message-router",
+ "image": "nexus3.onap.org:10001/onap/dmaap/dmaap-mr",
+ "version": "1.3.0"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.1.1"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.1.1"
+ },
+ {
+ "container": "message-router-kafka",
+ "component": "message-router-kafka",
+ "image": "nexus3.onap.org:10001/onap/dmaap/kafka111",
+ "version": "1.1.1"
+ },
+ {
+ "container": "message-router-zookeeper",
+ "component": "message-router-zookeeper",
+ "image": "nexus3.onap.org:10001/onap/dmaap/zookeeper",
+ "version": "6.1.0"
+ },
+ {
+ "container": "message-router-zookeeper",
+ "component": "message-router-zookeeper",
+ "image": "nexus3.onap.org:10001/onap/dmaap/zookeeper",
+ "version": "6.1.0"
+ },
+ {
+ "container": "message-router-zookeeper",
+ "component": "message-router-zookeeper",
+ "image": "nexus3.onap.org:10001/onap/dmaap/zookeeper",
+ "version": "6.1.0"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "nexus3.onap.org:10001/onap/modeling/etsicatalog",
+ "version": "1.0.11"
+ },
+ {
+ "container": "modeling-etsicatalog-filebeat-onap",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "modeling-etsicatalog",
+ "component": "modeling-etsicatalog",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "msb-consul",
+ "component": "msb-consul",
+ "image": "docker.nexus.azure.onap.eu/library/consul",
+ "version": "1.4.3"
+ },
+ {
+ "container": "msb-discovery",
+ "component": "msb-discovery",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_discovery",
+ "version": "1.3.0"
+ },
+ {
+ "container": "msb-discovery-filebeat-onap",
+ "component": "msb-discovery",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-eag",
+ "component": "msb-eag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.3.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-eag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-iag",
+ "component": "msb-iag",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_apigateway",
+ "version": "1.3.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "msb-iag",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud",
+ "component": "multicloud",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework",
+ "version": "1.7.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "multicloud-fcaps",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-fcaps",
+ "version": "1.5.6"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-fcaps",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "rabbit-mq",
+ "component": "multicloud-fcaps",
+ "image": "nexus3.onap.org:10001/rabbitmq",
+ "version": "alpine"
+ },
+ {
+ "container": "multicloud-k8s",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/k8s",
+ "version": "0.9.3"
+ },
+ {
+ "container": "framework-artifactbroker",
+ "component": "multicloud-k8s",
+ "image": "nexus3.onap.org:10001/onap/multicloud/framework-artifactbroker",
+ "version": "1.7.1"
+ },
+ {
+ "container": "multicloud-k8s-etcd",
+ "component": "multicloud-k8s-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "multicloud-k8s-mongo",
+ "component": "multicloud-k8s-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "multicloud-pike",
+ "component": "multicloud-pike",
+ "image": "nexus3.onap.org:10001/onap/multicloud/openstack-pike",
+ "version": "1.5.6"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "multicloud-pike",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "nbi",
+ "component": "nbi",
+ "image": "nexus3.onap.org:10001/onap/externalapi/nbi",
+ "version": "8.0.1"
+ },
+ {
+ "container": "nbi-config",
+ "component": "nbi-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "nbi-mongo",
+ "component": "nbi-mongo",
+ "image": "docker.nexus.azure.onap.eu/library/mongo",
+ "version": "4.0.8"
+ },
+ {
+ "container": "ncmp-dmi-plugin",
+ "component": "ncmp-dmi-plugin",
+ "image": "nexus3.onap.org:10001/onap/ncmp-dmi-plugin",
+ "version": "1.0.0"
+ },
+ {
+ "container": "nengdb-init",
+ "component": "nengdb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "netbox-app",
+ "component": "netbox-app",
+ "image": "docker.nexus.azure.onap.eu/netboxcommunity/netbox",
+ "version": "v2.5.8"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-app-provisioning-job",
+ "component": "netbox-app-provisioning-job",
+ "image": "docker.nexus.azure.onap.eu/curlimages/curl",
+ "version": "7.69.1"
+ },
+ {
+ "container": "netbox-nginx",
+ "component": "netbox-nginx",
+ "image": "docker.nexus.azure.onap.eu/nginx",
+ "version": "1.15-alpine"
+ },
+ {
+ "container": "netbox-postgres",
+ "component": "netbox-postgres",
+ "image": "docker.nexus.azure.onap.eu/postgres",
+ "version": "10.4-alpine"
+ },
+ {
+ "container": "network-name-gen",
+ "component": "network-name-gen",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-apps-ms-neng",
+ "version": "1.2.1"
+ },
+ {
+ "container": "oof",
+ "component": "oof",
+ "image": "nexus3.onap.org:10001/onap/optf-osdf",
+ "version": "3.0.6"
+ },
+ {
+ "container": "oof-has-api",
+ "component": "oof-has-api",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.2.1"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "oof-has-controller",
+ "component": "oof-has-controller",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.2.1"
+ },
+ {
+ "container": "oof-has-data",
+ "component": "oof-has-data",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.2.1"
+ },
+ {
+ "container": "oof-has-etcd",
+ "component": "oof-has-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "oof-has-etcd",
+ "component": "oof-has-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "oof-has-etcd",
+ "component": "oof-has-etcd",
+ "image": "docker.nexus.azure.onap.eu/etcd-amd64",
+ "version": "3.2.24"
+ },
+ {
+ "container": "oof-has-etcd-config",
+ "component": "oof-has-etcd-config",
+ "image": "docker.nexus.azure.onap.eu/bitnami/etcd",
+ "version": "3.3.15"
+ },
+ {
+ "container": "oof-has-reservation",
+ "component": "oof-has-reservation",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.2.1"
+ },
+ {
+ "container": "oof-has-solver",
+ "component": "oof-has-solver",
+ "image": "nexus3.onap.org:10001/onap/optf-has",
+ "version": "2.2.1"
+ },
+ {
+ "container": "oom-cert-service",
+ "component": "oom-cert-service",
+ "image": "nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-api",
+ "version": "2.4.0"
+ },
+ {
+ "container": "policy-apex-pdp",
+ "component": "policy-apex-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-apex-pdp",
+ "version": "2.6.1"
+ },
+ {
+ "container": "policy-api",
+ "component": "policy-api",
+ "image": "nexus3.onap.org:10001/onap/policy-api",
+ "version": "2.5.1"
+ },
+ {
+ "container": "policy-clamp-be",
+ "component": "policy-clamp-be",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-backend",
+ "version": "6.1.3"
+ },
+ {
+ "container": "policy-clamp-cl-http-ppnt",
+ "component": "policy-clamp-cl-http-ppnt",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-cl-http-ppnt",
+ "version": "6.1.3"
+ },
+ {
+ "container": "policy-clamp-cl-k8s-ppnt",
+ "component": "policy-clamp-cl-k8s-ppnt",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-cl-k8s-ppnt",
+ "version": "6.1.3"
+ },
+ {
+ "container": "policy-clamp-cl-pf-ppnt",
+ "component": "policy-clamp-cl-pf-ppnt",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-cl-pf-ppnt",
+ "version": "6.1.3"
+ },
+ {
+ "container": "policy-clamp-cl-runtime",
+ "component": "policy-clamp-cl-runtime",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-cl-runtime",
+ "version": "6.1.3"
+ },
+ {
+ "container": "policy-clamp-fe",
+ "component": "policy-clamp-fe",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-frontend",
+ "version": "6.1.3"
+ },
+ {
+ "container": "onap-policy-clamp-galera-config",
+ "component": "policy-clamp-be-policy-clamp-job",
+ "image": "docker.nexus.azure.onap.eu/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "policy-distribution",
+ "component": "policy-distribution",
+ "image": "nexus3.onap.org:10001/onap/policy-distribution",
+ "version": "2.6.1"
+ },
+ {
+ "container": "policy-drools-pdp",
+ "component": "policy-drools-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-pdpd-cl",
+ "version": "1.9.1"
+ },
+ {
+ "container": "onap-policy-galera-db-migrator",
+ "component": "policy-job",
+ "image": "nexus3.onap.org:10001/onap/policy-db-migrator",
+ "version": "2.3.1"
+ },
+ {
+ "container": "policy-gui",
+ "component": "policy-gui",
+ "image": "nexus3.onap.org:10001/onap/policy-gui",
+ "version": "2.1.1"
+ },
+ {
+ "container": "policy-mariadb",
+ "component": "policy-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb-galera",
+ "version": "10.5.8"
+ },
+ {
+ "container": "policy-mariadb-metrics",
+ "component": "policy-mariadb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mysqld-exporter",
+ "version": "0.12.1-debian-10-r264"
+ },
+ {
+ "container": "policy-pap",
+ "component": "policy-pap",
+ "image": "nexus3.onap.org:10001/onap/policy-pap",
+ "version": "2.5.1"
+ },
+ {
+ "container": "policy-xacml-pdp",
+ "component": "policy-xacml-pdp",
+ "image": "nexus3.onap.org:10001/onap/policy-xacml-pdp",
+ "version": "2.5.1"
+ },
+ {
+ "container": "portal-app",
+ "component": "portal-app",
+ "image": "nexus3.onap.org:10001/onap/portal-app",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-app",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-cassandra",
+ "component": "portal-cassandra",
+ "image": "docker.nexus.azure.onap.eu/bitnami/cassandra",
+ "version": "3.11.9-debian-10-r30"
+ },
+ {
+ "container": "portal-db",
+ "component": "portal-db",
+ "image": "nexus3.onap.org:10001/onap/portal-db",
+ "version": "3.4.1"
+ },
+ {
+ "container": "portal-db-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-db-oom-update-job",
+ "component": "portal-db-job",
+ "image": "docker.nexus.azure.onap.eu/oomk8s/mariadb-client-init",
+ "version": "3.0.0"
+ },
+ {
+ "container": "portal-sdk",
+ "component": "portal-sdk",
+ "image": "nexus3.onap.org:10001/onap/portal-sdk",
+ "version": "3.4.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "portal-sdk",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "portal-widget",
+ "component": "portal-widget",
+ "image": "nexus3.onap.org:10001/onap/portal-wms",
+ "version": "3.4.2"
+ },
+ {
+ "container": "postgres",
+ "component": "postgres-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "postgres",
+ "component": "postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "robot",
+ "component": "robot",
+ "image": "nexus3.onap.org:10001/onap/testsuite",
+ "version": "1.8.0"
+ },
+ {
+ "container": "sdc-be-job",
+ "component": "sdc-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-init",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-be",
+ "component": "sdc-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-backend-all-plugins",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-be-filebeat-onap",
+ "component": "sdc-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-cs-job",
+ "component": "sdc-cs-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-cassandra-init",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-fe",
+ "component": "sdc-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-frontend",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-fe-filebeat-onap",
+ "component": "sdc-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-helm-validator",
+ "component": "sdc-helm-validator",
+ "image": "nexus3.onap.org:10001/onap/org.onap.sdc.sdc-helm-validator",
+ "version": "1.2.2"
+ },
+ {
+ "container": "sdc-onboarding-be",
+ "component": "sdc-onboarding-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-backend",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-onboarding-be-filebeat-onap",
+ "component": "sdc-onboarding-be",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdc-onboarding-be-job",
+ "component": "sdc-onboarding-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-onboard-cassandra-init",
+ "version": "1.9.5"
+ },
+ {
+ "container": "sdc-wfd-be",
+ "component": "sdc-wfd-be",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-backend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-be-job",
+ "component": "sdc-wfd-be-job",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-init",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe",
+ "component": "sdc-wfd-fe",
+ "image": "nexus3.onap.org:10001/onap/sdc-workflow-frontend",
+ "version": "1.7.0"
+ },
+ {
+ "container": "sdc-wfd-fe-filebeat-onap",
+ "component": "sdc-wfd-fe",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "sdnc",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "sdnc-ansible-server",
+ "component": "sdnc-ansible-server",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ansible-server-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnc",
+ "component": "sdnc-job",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnc-dgbuilder",
+ "component": "sdnc-dgbuilder",
+ "image": "nexus3.onap.org:10001/onap/ccsdk-dgbuilder-image",
+ "version": "1.2.2"
+ },
+ {
+ "container": "sdnc-dmaap-listener",
+ "component": "sdnc-dmaap-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-dmaap-listener-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnc-sdnrdb-init-job",
+ "component": "sdnc",
+ "image": "nexus3.onap.org:10001/onap/sdnc-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnc-ueb-listener",
+ "component": "sdnc-ueb-listener",
+ "image": "nexus3.onap.org:10001/onap/sdnc-ueb-listener-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnc-web",
+ "component": "sdnc-web",
+ "image": "nexus3.onap.org:10001/onap/sdnc-web-image",
+ "version": "2.2.2"
+ },
+ {
+ "container": "sdnrdb-nginx",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "sdnrdb-elasticsearch",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "sdnrdb-master",
+ "component": "sdnrdb",
+ "image": "docker.nexus.azure.onap.eu/bitnami/elasticsearch",
+ "version": "7.9.3"
+ },
+ {
+ "container": "so",
+ "component": "so",
+ "image": "nexus3.onap.org:10001/onap/so/api-handler-infra",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-filebeat-onap",
+ "component": "so",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-admin-cockpit",
+ "component": "so-admin-cockpit",
+ "image": "nexus3.onap.org:10001/onap/so/so-admin-cockpit",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-bpmn-infra",
+ "component": "so-bpmn-infra",
+ "image": "nexus3.onap.org:10001/onap/so/bpmn-infra",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-bpmn-infra-filebeat-onap",
+ "component": "so-bpmn-infra",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-catalog-db-adapter",
+ "component": "so-catalog-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/catalog-db-adapter",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-cnf-adapter",
+ "component": "so-cnf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-cnf-adapter",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-etsi-nfvo-ns-lcm",
+ "component": "so-etsi-nfvo-ns-lcm",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-nfvo-ns-lcm",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-etsi-sol003-adapter",
+ "component": "so-etsi-sol003-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol003-adapter",
+ "version": "1.8.2"
+ },
+ {
+ "container": "so-etsi-sol005-adapter",
+ "component": "so-etsi-sol005-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-etsi-sol005-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-mariadb-config",
+ "component": "so-mariadb-job",
+ "image": "nexus3.onap.org:10001/mariadb",
+ "version": "10.1.38"
+ },
+ {
+ "container": "so-nssmf-adapter",
+ "component": "so-nssmf-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-nssmf-adapter",
+ "version": "1.9.1"
+ },
+ {
+ "container": "so-oof-adapter",
+ "component": "so-oof-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/so-oof-adapter",
+ "version": "1.8.3"
+ },
+ {
+ "container": "so-oof-adapter-filebeat-onap",
+ "component": "so-oof-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-openstack-adapter",
+ "component": "so-openstack-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/openstack-adapter",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-openstack-adapter-filebeat-onap",
+ "component": "so-openstack-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-request-db-adapter",
+ "component": "so-request-db-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/request-db-adapter",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-sdc-controller",
+ "component": "so-sdc-controller",
+ "image": "nexus3.onap.org:10001/onap/so/sdc-controller",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-sdc-controller-filebeat-onap",
+ "component": "so-sdc-controller",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "so-sdnc-adapter",
+ "component": "so-sdnc-adapter",
+ "image": "nexus3.onap.org:10001/onap/so/sdnc-adapter",
+ "version": "1.9.2"
+ },
+ {
+ "container": "so-sdnc-adapter-filebeat-onap",
+ "component": "so-sdnc-adapter",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "create-tls-secret",
+ "component": "so-tls-cert",
+ "image": "docker.nexus.azure.onap.eu/bitnami/kubectl",
+ "version": "1.19"
+ },
+ {
+ "container": "uui",
+ "component": "uui",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui",
+ "version": "4.0.5"
+ },
+ {
+ "container": "uui-server",
+ "component": "uui-server",
+ "image": "nexus3.onap.org:10001/onap/usecase-ui-server",
+ "version": "4.0.5"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/gvnfmdriver",
+ "version": "1.4.3"
+ },
+ {
+ "container": "vfc-generic-vnfm-driver-filebeat-onap",
+ "component": "vfc-generic-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/nfvo/svnfm/huawei",
+ "version": "1.3.9"
+ },
+ {
+ "container": "vfc-huawei-vnfm-driver-filebeat-onap",
+ "component": "vfc-huawei-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-nslcm",
+ "component": "vfc-nslcm",
+ "image": "nexus3.onap.org:10001/onap/vfc/nslcm",
+ "version": "1.4.4"
+ },
+ {
+ "container": "vfc-nslcm-filebeat-onap",
+ "component": "vfc-nslcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-redis",
+ "component": "vfc-redis",
+ "image": "nexus3.onap.org:10001/onap/vfc/db",
+ "version": "1.3.5"
+ },
+ {
+ "container": "vfc-vnflcm",
+ "component": "vfc-vnflcm",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnflcm",
+ "version": "1.4.2"
+ },
+ {
+ "container": "vfc-vnflcm-filebeat-onap",
+ "component": "vfc-vnflcm",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfmgr",
+ "component": "vfc-vnfmgr",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfmgr",
+ "version": "1.4.1"
+ },
+ {
+ "container": "vfc-vnfmgr-filebeat-onap",
+ "component": "vfc-vnfmgr",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-vnfres",
+ "component": "vfc-vnfres",
+ "image": "nexus3.onap.org:10001/onap/vfc/vnfres",
+ "version": "1.4.0"
+ },
+ {
+ "container": "vfc-vnfres-filebeat-onap",
+ "component": "vfc-vnfres",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "nexus3.onap.org:10001/onap/vfc/ztevnfmdriver",
+ "version": "1.4.1"
+ },
+ {
+ "container": "vfc-zte-vnfm-driver-filebeat-onap",
+ "component": "vfc-zte-vnfm-driver",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid",
+ "component": "vid",
+ "image": "nexus3.onap.org:10001/onap/vid",
+ "version": "8.0.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "vid",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "vid-mariadb-init",
+ "component": "vid-mariadb-init",
+ "image": "docker.nexus.azure.onap.eu/bitnami/mariadb",
+ "version": "10.5.8"
+ },
+ {
+ "container": "vnfsdk",
+ "component": "vnfsdk",
+ "image": "nexus3.onap.org:10001/onap/vnfsdk/refrepo",
+ "version": "1.6.3"
+ },
+ {
+ "container": "vnfsdk-job",
+ "component": "vnfsdk-job",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-primary",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+ ] \ No newline at end of file
diff --git a/doc/artifacts/versions/test_guilin_MR1.json b/doc/artifacts/versions/test_guilin_MR1.json
new file mode 100644
index 0000000..45c4e64
--- /dev/null
+++ b/doc/artifacts/versions/test_guilin_MR1.json
@@ -0,0 +1,68 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.1"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "appc-cdt",
+ "component": "appc-cdt",
+ "image": "nexus3.onap.org:10001/onap/appc-cdt-image",
+ "version": "1.7.1"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "msb-discovery",
+ "component": "msb-discovery",
+ "image": "nexus3.onap.org:10001/onap/msb/msb_discovery",
+ "version": "1.2.6"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "policy-clamp-fe",
+ "component": "policy-clamp-fe",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-frontend",
+ "version": "6.0.2"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+]
diff --git a/doc/artifacts/versions/test_honolulu.json b/doc/artifacts/versions/test_honolulu.json
new file mode 100644
index 0000000..35f7174
--- /dev/null
+++ b/doc/artifacts/versions/test_honolulu.json
@@ -0,0 +1,62 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "appc-cdt",
+ "component": "appc-cdt",
+ "image": "nexus3.onap.org:10001/onap/appc-cdt-image",
+ "version": "1.7.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.18-debian-10"
+ },
+ {
+ "container": "policy-clamp-fe",
+ "component": "policy-clamp-fe",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-frontend",
+ "version": "6.0.2"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+]
diff --git a/doc/artifacts/versions/test_honolulu_MR_Candidate.json b/doc/artifacts/versions/test_honolulu_MR_Candidate.json
new file mode 100644
index 0000000..f15f159
--- /dev/null
+++ b/doc/artifacts/versions/test_honolulu_MR_Candidate.json
@@ -0,0 +1,50 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "oof-has-api-nginx",
+ "component": "oof-has-api",
+ "image": "docker.nexus.azure.onap.eu/bitnami/nginx",
+ "version": "1.19"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+]
diff --git a/doc/artifacts/versions/test_master.json b/doc/artifacts/versions/test_master.json
new file mode 100644
index 0000000..4e6c799
--- /dev/null
+++ b/doc/artifacts/versions/test_master.json
@@ -0,0 +1,62 @@
+[
+ {
+ "container": "a1policymanagement-update-config",
+ "component": "a1policymanagement",
+ "image": "dibi/envsubst",
+ "version": "1"
+ },
+ {
+ "container": "aaf-sms-preload",
+ "component": "aaf-sms",
+ "image": "nexus3.onap.org:10001/onap/aaf/sms",
+ "version": "4.0.5"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "aaf-sms-quorumclient",
+ "component": "aaf-sms-quorumclient",
+ "image": "nexus3.onap.org:10001/onap/aaf/smsquorumclient",
+ "version": "4.0.2"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-modelloader",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "filebeat-onap",
+ "component": "aai-resources",
+ "image": "docker.nexus.azure.onap.eu/beats/filebeat",
+ "version": "5.5.0"
+ },
+ {
+ "container": "cps-postgres",
+ "component": "cps-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ },
+ {
+ "container": "policy-clamp-fe",
+ "component": "policy-clamp-fe",
+ "image": "nexus3.onap.org:10001/onap/policy-clamp-frontend",
+ "version": "6.0.3"
+ },
+ {
+ "container": "vnfsdk-postgres",
+ "component": "vnfsdk-postgres-replica",
+ "image": "docker.nexus.azure.onap.eu/crunchydata/crunchy-postgres",
+ "version": "centos8-13.2-4.6.1"
+ }
+]
diff --git a/doc/generate_docker_version.py b/doc/generate_docker_version.py
new file mode 100644
index 0000000..fa13229
--- /dev/null
+++ b/doc/generate_docker_version.py
@@ -0,0 +1,437 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+'''Docker version comparison generator.'''
+
+import argparse
+import logging
+import json
+import re
+from collections import Counter
+from dataclasses import dataclass
+import os
+import requests
+from bs4 import BeautifulSoup # sudo apt-get install python3-bs4 if pip doesn't work
+from deepdiff import DeepDiff
+from jinja2 import Environment, FileSystemLoader, select_autoescape
+
+PROXY = {}
+# PROXY = {'http': 'socks5h://127.0.0.1:8080',
+# 'https': 'socks5h://127.0.0.1:8080'}
+
+
+BASE_URL = "https://logs.onap.org/onap-integration/daily/"
+ONAP_VERSION = 'master'
+component_versions = {}
+versions_to_be_compared_with_master = ['jakarta', 'istanbul', 'honolulu_MR1', 'honolulu', 'guilin_MR1']
+VERSION_URL = []
+END_URL = "infrastructure-healthcheck/k8s/kubernetes-status/onap_versions.json"
+
+# first date where file is present for each version
+ONAP_RELEASE_DATE = {
+ "jakarta": "onap-daily-dt-oom-jakarta/2022-07/19_03-53/",
+ "istanbul": "onap_daily_pod4_istanbul/2021-11/15_03-55/",
+ "honolulu": "onap_daily_pod4_honolulu/2021-05/28_01-00/",
+ "honolulu_MR1": "onap_daily_pod4_honolulu/2021-08/12_20-15/",
+ "guilin_MR1": "onap_daily_pod4_guilin/2021-05/27_03-23/"
+}
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("Docker-Version-Status")
+LOGGER.setLevel("INFO")
+
+# Arg
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument('-t', '--test', action='store_true' )
+# PARSER.add_argument('-t', '--test', default=True, type=bool, help='Execute with test dataset')
+ARGS = PARSER.parse_args()
+
+DATA_VERSIONS = []
+
+@dataclass
+class ComponentVersion:
+ """A component version."""
+ component: str
+ container: str
+ image: str
+ current_version: str
+ other_version: {}
+ status: int
+
+# ***********************************************************************
+# functions
+# ***********************************************************************
+def get_months(url):
+ """Load and parse list of months"""
+ months = []
+ response_months = requests.get(url, proxies=PROXY)
+ soup = BeautifulSoup(response_months.text, "lxml")
+
+ for link in soup.find_all('a'):
+ pattern = bool(re.match("[0-9]{4}-[0-9]{2}", link.contents[0]))
+ if pattern:
+ months.append(link.contents[0])
+ return months
+
+def get_days(url):
+ """Load and parse list of days"""
+ days = []
+ response_days = requests.get(url, proxies=PROXY)
+ soup = BeautifulSoup(response_days.text, "lxml")
+
+ for link in soup.find_all('a'):
+ pattern = bool(
+ re.match("[0-9]{2}_[0-9]{2}-[0-9]{2}", link.contents[0]))
+ if pattern:
+ days.append(link.contents[0])
+ return days
+
+def get_diff_index(change):
+ """Search Index of the Diff table to get the container related to the detected diff."""
+ local_index = re.findall(r"\[([0-9]+)\]", change)
+ return int(local_index[0])
+
+def is_it_a_simple_version_change(change, delta_values_changed):
+ """Detect if it is a simple version change or a subtitution."""
+ # retrieve the change index
+ change_index = get_diff_index(change)
+ # count the number of occurence
+ # if 1 => simplie version change
+ return str(delta_values_changed).count("root[" + str(change_index) + "]") < 2
+
+def get_component_status(container, delta_versions, version_master, other_version):
+ """Get component status."""
+ LOGGER.debug(container)
+ container_status = "unchanged"
+ # Look in values_changed
+ for change_type in ['values_changed', 'iterable_item_added', 'iterable_item_removed']:
+ # We test the change type as they may not occur, or only 1 type is possible
+ if change_type in delta_versions:
+ # Dive into the change found
+ for change in delta_versions[change_type]:
+ if change_type == 'values_changed':
+ # 2 cases
+ # - a simple version change for the same component (only version)
+ # in this case only the other_version shall be changed
+ # - a substitution (version, container, image,...)
+ # in this case a new component must be added and another removed
+ # it may be a little bit misleading
+ # as it could courrespond to an offset (not really a new component)
+ if is_it_a_simple_version_change(change, delta_versions[change_type]):
+ # simple replacement case
+ ## change_index = int(change[5:-12])
+ change_index = get_diff_index(change)
+ if container == other_version[change_index]['container']:
+ LOGGER.debug("Component version change: %s",
+ other_version[change_index]['container'])
+ container_status = "version_changed"
+ else:
+ if 'container' in change:
+ ## change_index = int(change[5:-14])
+ change_index = get_diff_index(change)
+ # substitution case
+ for substitution_change in delta_versions['values_changed']:
+ if str(change_index) in substitution_change:
+ LOGGER.debug("From %s to %s",
+ delta_versions['values_changed'][substitution_change]['old_value'],
+ delta_versions['values_changed'][substitution_change]['new_value'])
+ if container == delta_versions['values_changed'][substitution_change]['old_value']:
+ container_status = "component_removed"
+ if container == delta_versions['values_changed'][substitution_change]['new_value']:
+ container_status = "component_added"
+ elif change_type == 'iterable_item_added':
+ ## change_index = int(change[5:-1])
+ change_index = get_diff_index(change)
+ LOGGER.debug(
+ "New component added: %s", version_master[change_index]['container'])
+ if container == version_master[change_index]['container']:
+ container_status = "component_added"
+ elif change_type == 'iterable_item_removed':
+ ## change_index = int(change[5:-1])
+ change_index = get_diff_index(change)
+ LOGGER.debug(
+ "Component removed: %s", other_version[change_index]['container'])
+ if container == other_version[change_index]['container']:
+ container_status = "component_removed"
+ return container_status
+
+def get_old_version_component(container, component_versions):
+ """Retrieve the version of an old container."""
+ container_version = "unknown"
+ for component in component_versions:
+ if component['container'] == container:
+ return component['version']
+ return container_version
+
+def get_removed_container_list(delta_versions, component_versions, master_container):
+ """Retrieve the removed containers."""
+ removed_container_list = []
+ # Look in the removed list
+ try:
+ for change in delta_versions['iterable_item_removed']:
+ removed_container_list.append(component_versions[get_diff_index(change)])
+ except KeyError:
+ LOGGER.info("No Item in the removed section, look at the possible substitution")
+
+ # Consider the subsitution case
+ for delta in delta_versions['values_changed']:
+ if "container" in delta:
+ index_container = get_diff_index(delta)
+ removed_container_list.append(component_versions[index_container])
+ return removed_container_list
+
+def get_data_version_container_index(container, dataset):
+ """Get the index of a container from a dataset."""
+ for data in dataset:
+ if data.container == container:
+ return dataset.index(data)
+ return -1
+
+def get_json_master_components():
+ """Retrieve the json master description from LF backend."""
+ local_url = BASE_URL + "onap_daily_pod4_master/"
+ months_with_results = get_months(local_url)
+ LOGGER.debug("months_with_results: %s", months_with_results)
+ month = months_with_results[-1]
+
+ for day in get_days(local_url + month):
+ response_day = requests.get(local_url + month + day + END_URL, proxies=PROXY)
+ if response_day.status_code == 404:
+ LOGGER.debug("%s : does not exist", local_url + month + day + END_URL)
+ else:
+ version_infos = {"month": month,
+ "day": day,
+ "file": local_url + month + day + END_URL}
+ VERSION_URL.append(version_infos)
+
+ # load latest version json from LFN backend
+ version_file = VERSION_URL[-1]["file"]
+ LOGGER.debug(version_file)
+ # retrieve the file
+ response_latest = requests.get(version_file, proxies=PROXY)
+ return json.loads(response_latest.text)
+
+def get_json_version_components(version):
+ """Retrieve the versions of the component from LF Backend."""
+ local_url = BASE_URL + ONAP_RELEASE_DATE[version] + END_URL
+ local_response = requests.get(local_url, proxies=PROXY)
+ return json.loads(local_response.text)
+
+def compare_func(x, y, level=None):
+ try:
+ res = x["container"] == y["container"] and \
+ x["component"] == y["component"]
+ return res
+ except Exception:
+ raise CannotCompare()
+
+
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("******************* Retrieve Raw Data ***************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+MONTHS = ['2021-08/']
+LOGGER.info("Retrieve the Raw Data.")
+
+# Retrieve the last available version file
+# - we fist list the url of all the daily results on the given version
+# - we check that the onap_versions.json exist
+# - we take the last existing one VERSION_URL[-1]
+# - we download this particular file
+
+
+# Master is the ref
+if ARGS.test:
+ with open('./artifacts/versions/test_master.json') as json_file:
+ LATEST_VERSION = json.load(json_file)
+else:
+ LATEST_VERSION = get_json_master_components()
+CLEAN_LATEST_VERSION = [k for j, k in enumerate(
+ LATEST_VERSION) if k not in LATEST_VERSION[j + 1:]]
+SORTED_MASTER = sorted(CLEAN_LATEST_VERSION, key=lambda k: k['container'])
+LOGGER.info("Versions of the latest run ================> %s", SORTED_MASTER)
+
+for version in versions_to_be_compared_with_master:
+ # Retrieve versions of official release (release or maintenance release)
+ LOCAL_PATH = "./artifacts/versions/test_" + version + ".json"
+ if ARGS.test:
+ with open(LOCAL_PATH) as json_file:
+ local_json_file = json.load(json_file)
+ else:
+ local_json_file = get_json_version_components(version)
+
+ # remove duplicates
+ local_clean = [k for j, k in enumerate(
+ local_json_file) if k not in local_json_file[j + 1:]]
+ # sort
+ component_versions[version] = sorted(
+ local_clean, key=lambda k: k['container'])
+ LOGGER.debug("Versions of the release run %s", version)
+ LOGGER.debug("================> %s", component_versions[version])
+
+# # Post processing on the loaded versions
+# # we got
+# # - the last version
+# # - the Honolulu versions
+# # - the Guilin MR versions
+# # we need first to cleanup (avoid duplicate)
+# # then we need to build the object for the jinja template
+
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("******************* Process the Data ***************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+
+NB_ITERATION_DATASET = 0
+# we compare Master with a bunch of official release or maintenance releases
+for version in versions_to_be_compared_with_master:
+ LOGGER.info("----------------------------------------------------")
+ LOGGER.info("----------------------------------------------------")
+ LOGGER.info("-----Comparison Master versus %s --------", version)
+ LOGGER.info("----------------------------------------------------")
+ LOGGER.info("----------------------------------------------------")
+
+ # we use DeeDiff to get the difference between the 2 json list
+ delta_versions = DeepDiff(
+ component_versions[version],
+ SORTED_MASTER,
+ ignore_order=True,
+ iterable_compare_func=compare_func,
+ cutoff_distance_for_pairs=0.1,
+ cutoff_intersection_for_pairs=1.0,
+ get_deep_distance=True)
+ LOGGER.info("Delta between Master and %s", version)
+ LOGGER.info("Delta = %s", delta_versions)
+
+ # Manage containers found in Master
+ for master_component in SORTED_MASTER:
+ # test if container mentioned in delta
+ # possible cases
+ # * values_changed:
+ # - simple version change for the same component
+ # - substitution
+ # * iterable_item_removed (remove in master)
+ # * iterable_item_added (added in master)
+ # create ComponentVersion accordingly
+ INDEX_STATUS = 0
+ other_version = {}
+ CONTAINER_STATUS = get_component_status(
+ master_component['container'],
+ delta_versions, SORTED_MASTER,
+ component_versions[version])
+ LOGGER.info("Container status: %s", CONTAINER_STATUS)
+
+ if CONTAINER_STATUS == 'version_changed':
+ other_version[version] = get_old_version_component(
+ master_component['container'], component_versions[version])
+ INDEX_STATUS = 1
+ elif CONTAINER_STATUS == 'component_added':
+ # no other version
+ INDEX_STATUS = 2
+ else:
+ other_version[version] = master_component['version']
+ # need post processing see oldest version to see
+ # if there is no changes since more than 2 release or not
+ # temp index = -1
+ # post processing shall allow to say if
+ # index = 0: no changes since last release
+ # index = 3: no changes since more than 2 release
+
+ # As we are testing several releases, check if the object already exists
+ # If so complete it, do not create a new one..
+ if NB_ITERATION_DATASET < 1:
+ LOGGER.info("Creation of the dataset for %s", master_component['container'])
+ version_object = ComponentVersion(
+ component=master_component['component'],
+ container=master_component['container'],
+ image=master_component['image'],
+ current_version=master_component['version'],
+ other_version=other_version,
+ status=INDEX_STATUS)
+ DATA_VERSIONS.append(version_object)
+ else:
+ # we already compared master with a release
+ # the DATASET is initiated
+ # we start the second comparison
+
+ # First consider the changes between master and the new release
+ LOGGER.info("Update of the dataset for %s", master_component['container'])
+ index_data = get_data_version_container_index(
+ master_component['container'], DATA_VERSIONS)
+
+ local_other_version = DATA_VERSIONS[index_data].other_version
+ if get_old_version_component(
+ master_component['container'],
+ component_versions[version]) != "unknown":
+ local_other_version[version] = get_old_version_component(
+ master_component['container'], component_versions[version])
+ DATA_VERSIONS[index_data].other_version = local_other_version
+
+ # postprocessing to detect if the version of the container
+ # did not change for at least 2 releases
+ count_versions = Counter(local_other_version.values())
+ for count_version in count_versions.values():
+ if (count_version > 1 and
+ INDEX_STATUS < 1):
+ DATA_VERSIONS[index_data].status = 3
+ NB_ITERATION_DATASET += 1
+
+ # manage removed pods
+ for removed_container in get_removed_container_list(
+ delta_versions, component_versions[version], master_component):
+
+ index_data = get_data_version_container_index(
+ removed_container['container'], DATA_VERSIONS)
+ LOGGER.info("Removed container index data in DATASET: %s", index_data)
+ # "new" removed container, add it to DATASET
+ if index_data < 0 or index_data is None:
+ version_object = ComponentVersion(
+ component=removed_container['component'],
+ container=removed_container['container'],
+ image=removed_container['image'],
+ current_version="",
+ other_version={version: removed_container['version']},
+ status=4)
+ DATA_VERSIONS.append(version_object)
+ else:
+ # removed container already seen as removed in another old release
+ # just amend the DATASET to indicate the old versions
+ LOGGER.info(
+ "Update of the dataset for a removed container in master %s",
+ removed_container['container'])
+ index_data = get_data_version_container_index(
+ removed_container['container'], DATA_VERSIONS)
+ local_other_version = DATA_VERSIONS[index_data].other_version
+ local_other_version[version] = get_old_version_component(
+ removed_container['container'], component_versions[version])
+ DATA_VERSIONS[index_data].other_version = local_other_version
+
+# Exclude filebeat dockers
+CLEAN_DATA_VERSIONS = []
+for data in DATA_VERSIONS:
+ # an xfail list coudl be used here
+ if "filebeat" not in data.image:
+ CLEAN_DATA_VERSIONS.append(data)
+
+#LOGGER.info(str(CLEAN_DATA_VERSIONS))
+
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("****************** Generate Reporting ***************")
+LOGGER.info("*********************************************************************")
+LOGGER.info("*********************************************************************")
+
+jinja_env = Environment(
+ autoescape=select_autoescape(['html']),
+ loader=FileSystemLoader('./template'))
+jinja_env.get_template('docker-version-tmpl.html').stream(
+ data=CLEAN_DATA_VERSIONS).dump(
+ '{}'.format("index-versions.html"))
diff --git a/doc/generate_gating_index.py b/doc/generate_gating_index.py
new file mode 100644
index 0000000..904844b
--- /dev/null
+++ b/doc/generate_gating_index.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+""" Module to generate Functest reporting for gitlab pages """
+
+import argparse
+import datetime
+import logging
+
+import jinja2
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("Gating-Index")
+LOGGER.setLevel("INFO")
+LOGGER.setLevel("DEBUG")
+
+LOGGER.info("generate Xtesting gating index page")
+
+REPORTINGDATE = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
+
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument('-l', '--list', help='patchset list')
+ARGS = PARSER.parse_args()
+
+PATCHSET_LIST = []
+
+# we expect an argument as follows: 12345-1,45678-99,65432-42
+if ARGS.list is not None:
+ PATCHSET_LIST = ARGS.list.split(",")
+
+TEMPLATELOADER = jinja2.FileSystemLoader(".")
+TEMPLATEENV = jinja2.Environment(
+ loader=TEMPLATELOADER, autoescape=True)
+TEMPLATE_FILE = ("./template/index-gating-tmpl.html")
+TEMPLATE = TEMPLATEENV.get_template(TEMPLATE_FILE)
+OUTPUT_TEXT = TEMPLATE.render(
+ patchsets=PATCHSET_LIST,
+ date=REPORTINGDATE)
+
+FILENAME = "./index-gating.html"
+
+with open(FILENAME, "w+") as fh:
+ fh.write(OUTPUT_TEXT)
diff --git a/doc/generate_stability_graphs.py b/doc/generate_stability_graphs.py
new file mode 100644
index 0000000..3b0f433
--- /dev/null
+++ b/doc/generate_stability_graphs.py
@@ -0,0 +1,133 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+""" Module to generate Functest reporting for gitlab pages """
+
+import argparse
+import datetime
+import logging
+import json
+import os
+import re
+import requests
+import lxml
+from bs4 import BeautifulSoup # sudo apt-get install python3-bs4 if pip doesn't work
+from anytree import Node, RenderTree
+import jinja2
+
+PROXY = {}
+# PROXY = {'http': 'socks5h://127.0.0.1:8080',
+# 'https': 'socks5h://127.0.0.1:8080'}
+
+MONTHS = []
+DAYS = []
+BASE_URL ="https://logs.onap.org/onap-integration/daily/"
+END_URL = "daily-scores.json"
+
+
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument('-v', '--onap_version', help='onap version',default='master')
+
+ARGS = PARSER.parse_args()
+
+ci_version = "onap_daily_pod4_" + ARGS.onap_version + "/"
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("CI Timeview")
+LOGGER.setLevel("INFO")
+
+def get_months(url):
+ """load and parse list of months"""
+ local_months = []
+ response_months = requests.get(url, proxies=PROXY)
+ soup = BeautifulSoup(response_months.text ,"lxml")
+
+ for link in soup.find_all('a'):
+ pattern = bool(re.match("[0-9]{4}-[0-9]{2}", link.contents[0]))
+ if pattern:
+ local_months.append(link.contents[0])
+ LOGGER.debug(local_months)
+ return local_months
+
+def get_days(url):
+ """load and parse list of days"""
+ local_days = []
+ response_days = requests.get(url, proxies=PROXY)
+ soup = BeautifulSoup(response_days.text ,"lxml")
+
+ for link in soup.find_all('a'):
+ pattern = bool(re.match("[0-9]{2}_[0-9]{2}-[0-9]{2}", link.contents[0]))
+ if pattern:
+ local_days.append(link.contents[0])
+ LOGGER.debug(local_days)
+ return local_days
+
+def get_results_of_a_day(url, month, day):
+ """ get the daily scores for a day"""
+ daily_score = {}
+ response_day = requests.get(url + month + day + END_URL, proxies=PROXY)
+ if response_day.status_code != 200:
+ LOGGER.debug(url + month + day + END_URL + " : does not exist")
+ else:
+ parsed_month = re.match("[0-9]{4}-[0-9]{2}", month).group()
+ parsed_day = re.match("[0-9]{2}", day).group()
+ daily_score = { "date": parsed_month + "-" + parsed_day }
+ json_res = json.loads(response_day.content)
+ for res in json_res:
+ # ugly workaround as one of the key contains a space
+ # which is painful for processing
+ daily_score.update({ res['tier'].replace(" ","-"): res['score'] })
+ return daily_score
+
+# ------------------------------------------------------------------------------
+LOGGER.info("---------------------------------------")
+LOGGER.info("Look for results for %s", ci_version)
+url = BASE_URL + ci_version
+months = get_months(url)
+# NOTE the 2: has been set to exclude old results for which we do not have the
+# json. Once applied, the number shall be 6 to consider the last 6 months
+filtered_months = months[-3:]
+
+LOGGER.info(filtered_months)
+data_scores = []
+for month in filtered_months:
+ for day in get_days(url + "/" + month):
+ if get_results_of_a_day(url, month, day) != {}:
+ data_scores.append(
+ get_results_of_a_day(url, month, day))
+
+# check if local results daily-scores.json can be found
+# if a result already exists for this day do nothing, else add it
+if os.path.isfile('./daily-scores.json'):
+ my_day = datetime.datetime.today()
+ local_day = (str(my_day.year) + "-" + str(my_day.month) + "-" +
+ str(my_day.day))
+ LOGGER.info("Local results found")
+ with open('./daily-scores.json') as json_file:
+ local_res = json.load(json_file)
+ daily_score = {'date': local_day}
+ for res in local_res:
+ daily_score[res['tier'].replace(" ","-")] = res['score']
+ data_scores.append(daily_score)
+
+LOGGER.info("---------------------------------------")
+LOGGER.info("Generate the page %s", ci_version)
+
+TEMPLATELOADER = jinja2.FileSystemLoader(".")
+TEMPLATEENV = jinja2.Environment(
+ loader=TEMPLATELOADER, autoescape=True)
+TEMPLATE_FILE = ("./template/index-stability-tmpl.html")
+TEMPLATE = TEMPLATEENV.get_template(TEMPLATE_FILE)
+OUTPUT_TEXT = TEMPLATE.render(
+ data=data_scores,
+ lab_owner=ci_version[:-1],
+ lab_version=ARGS.onap_version)
+FILENAME = "./index-stability.html"
+with open(FILENAME, "w+") as fh:
+ fh.write(OUTPUT_TEXT)
diff --git a/doc/generate_status.py b/doc/generate_status.py
new file mode 100644
index 0000000..e749e64
--- /dev/null
+++ b/doc/generate_status.py
@@ -0,0 +1,457 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+""" Module to generate Functest reporting for gitlab pages """
+
+import argparse
+import datetime
+import logging
+import os
+
+import jinja2
+import json
+import requests
+
+from prettytable import PrettyTable
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("Xtesting-ONAP-Status")
+LOGGER.setLevel("INFO")
+# LOGGER.setLevel("DEBUG")
+
+PROXY = {}
+# PROXY = {'http': 'socks5h://127.0.0.1:8080',
+# 'https': 'socks5h://127.0.0.1:8080'}
+
+# Initialization
+URL_PRIVATE_BASE = "http://onap.api.testresults.opnfv.fr/api/v1/results"
+URL_BASE = "http://testresults.opnfv.org/onap/api/v1/results"
+URL_BASE_PODS = "http://testresults.opnfv.org/onap/api/v1/pods"
+REPORTINGDATE = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
+
+# init just connection_check to get the list of scenarios
+# as all the scenarios run connection_check
+# the following tests are the default daily tests
+INFRA_HEALTHCHECK = {'name': 'infrastructure-healthcheck',
+ 'tests': {'onap-k8s', 'onap-helm',
+ 'onap-k8s-teardown'}}
+# 'onap-k8s-teardown','internal_check_certs'}}
+HEALTHCHECK = {'name': 'healthcheck',
+ 'tests': {'core', 'full',
+ 'healthdist', 'postinstall',
+ 'hv-ves', 'ves-collector',
+ 'basic_onboard', 'dcaemod',
+ 'cps-healthcheck', 'cps-dmi-plugin-healthcheck',
+ 'cps-temporal-healthcheck'}}
+# SMOKE_USECASES = {'name': 'smoke usecases',
+# 'tests': {'basic_vm', 'freeradius_nbi', 'clearwater_ims',
+# 'pnf-registrate', '5gbulkpm', 'hv-ves'}}
+SMOKE_USECASES = {'name': 'smoke usecases',
+ 'tests': {'basic_vm', 'basic_network', 'basic_cnf', 'cmpv2',
+ 'pnf-registrate', '5gbulkpm', 'basic_clamp',
+ 'basic_vm_macro', 'pnf_macro', 'cds_resource_resolution',
+ 'basic_cnf_macro'}}
+
+SECURITY_USECASES = {'name': 'security',
+ 'tests': {'root_pods', 'unlimitted_pods',
+ 'nonssl_endpoints', 'nodeport_check_certs',
+ 'kube_hunter'}}
+
+TIERS = [INFRA_HEALTHCHECK, HEALTHCHECK, SMOKE_USECASES, SECURITY_USECASES]
+TRENDS = [INFRA_HEALTHCHECK, HEALTHCHECK, SMOKE_USECASES, SECURITY_USECASES]
+
+# list of tests with dedicated reporting page to be referenced
+RESULT_URLS_LEGACY = {
+ 'core': './xtesting-healthcheck/core/core/report.html',
+ 'small': './xtesting-healthcheck/small/small/report.html',
+ 'medium': './xtesting-healthcheck/medium/medium/report.html',
+ 'full': './xtesting-healthcheck/full/full/report.html',
+ 'postinstall': './xtesting-healthcheck/postinstall/postinstall/report.html',
+ 'healthdist': './xtesting-healthcheck/healthdist/healthdist/report.html',
+ 'onap-k8s': './infrastructure-healthcheck/k8s/kubernetes-status/index.html',
+ 'onap-k8s-teardown': './infrastructure-healthcheck/k8s-teardown/kubernetes-status/index.html',
+ 'onap-helm': './infrastructure-healthcheck/k8s/onap-helm/helm.html',
+ 'nodeport_check_certs': './infrastructure-healthcheck/k8s/nodeport_check_certs/certificates.html',
+ 'internal_check_certs': './infrastructure-healthcheck/internal_check_certs/internal_check_certs/certificates.html',
+ 'basic_vm': './smoke-usecases/basic_vm/basic_vm/reporting.html',
+ 'basic_vm_macro': './smoke-usecases/basic_vm_macro/basic_vm_macro/reporting.html',
+ 'basic_network': './smoke-usecases/basic_network/basic_network/reporting.html',
+ 'basic_cnf': './smoke-usecases/basic_cnf/basic_cnf/reporting.html',
+ 'basic_cds': './smoke-usecases/basic_cds/basic_cds/reporting.html',
+ 'basic_onboard': './smoke-usecases/basic_onboard/basic_onboard/reporting.html',
+ 'basic_clamp': './smoke-usecases/basic_clamp/basic_clamp/reporting.html',
+ 'pnf_macro': './smoke-usecases/pnf_macro/pnf_macro/reporting.html',
+ 'pnf-registrate': './xtesting-smoke-usecases-robot/pnf-registrate/pnf-registrate/report.html',
+ '5gbulkpm': './xtesting-smoke-usecases-robot/5gbulkpm/5gbulkpm/report.html',
+ 'hv-ves': './xtesting-smoke-usecases-robot/hv-ves/hv-ves/report.html',
+ 'cmpv2': './xtesting-smoke-usecases-robot/cmpv2/cmpv2/report.html',
+ 'dcaemod': './xtesting-smoke-usecases-robot/dcaemod/dcaemod/report.html',
+ 'ves-collector': './xtesting-smoke-usecases-robot/ves-collector/ves-collector/report.html',
+ 'root_pods': './security/root_pods/root_pods/root_pods.log',
+ 'unlimitted_pods': './security/unlimitted_pods/unlimitted_pods/unlimitted_pods.log',
+ 'cis_kubernetes': './security/cis_kubernetes/cis_kubernetes/cis_kubernetes.log',
+ 'nonssl_endpoints': './security/nonssl_endpoints/nonssl_endpoints/nonssl_endpoints.log',
+ 'jdpw_ports': './security/jdpw_ports/jdpw_ports/jdpw_ports.log',
+ 'kube_hunter': './security/kube_hunter/kube_hunter/kube_hunter.log',
+ 'versions': './security/versions/versions.html',
+ 'cps-healthcheck': './xtesting-healthcheck/cps-healthcheck/cps-healthcheck/report.html',
+ 'cds_resource_resolution': './smoke-usecases/cds_resource_resolution/cds_resource_resolution/reporting.html',
+ 'basic_cnf_macro':'./smoke-usecases/basic_cnf_macro/basic_cnf_macro/reporting.html',
+ 'cps-dmi-plugin-healthcheck':'./xtesting-healthcheck/cps-dmi-plugin-healthcheck/cps-dmi-plugin-healthcheck/report.html',
+ 'cps-temporal-healthcheck':'./xtesting-healthcheck/cps-temporal-healthcheck/cps-temporal-healthcheck/report.html'
+ }
+
+# list of tests with dedicated reporting page to be referenced
+RESULT_URLS_S3 = {
+ 'core': './core/core/report.html',
+ 'full': './full/full/report.html',
+ 'postinstall': './postinstall/postinstall/report.html',
+ 'healthdist': './healthdist/healthdist/report.html',
+ 'onap-k8s': './k8s/k8s/kubernetes-status/index.html',
+ 'onap-k8s-teardown': './k8s-teardown/k8s-teardown/kubernetes-status/index.html',
+ 'onap-helm': './k8s/k8s/onap-helm/helm.html',
+ 'nodeport_check_certs': './k8s/k8s/nodeport_check_certs/certificates.html',
+ 'internal_check_certs': './infrastructure-healthcheck/internal_check_certs/internal_check_certs/certificates.html',
+ 'basic_vm': './basic_vm/basic_vm/reporting.html',
+ 'basic_vm_macro': './basic_vm_macro/basic_vm_macro/reporting.html',
+ 'basic_network': './basic_network/basic_network/reporting.html',
+ 'basic_cnf': './basic_cnf/basic_cnf/reporting.html',
+ 'basic_cds': './basic_cds/basic_cds/reporting.html',
+ 'basic_onboard': './basic_onboard/basic_onboard/reporting.html',
+ 'basic_clamp': './basic_clamp/basic_clamp/reporting.html',
+ 'pnf_macro': './pnf_macro/pnf_macro/reporting.html',
+ 'pnf-registrate': './pnf-registrate/pnf-registrate/report.html',
+ '5gbulkpm': './5gbulkpm/5gbulkpm/report.html',
+ 'hv-ves': './hv-ves/hv-ves/report.html',
+ 'cmpv2': './cmpv2/cmpv2/report.html',
+ 'dcaemod': './dcaemod/dcaemod/report.html',
+ 'ves-collector': './ves-collector/ves-collector/report.html',
+ 'root_pods': './root_pods/root_pods/root_pods.log',
+ 'unlimitted_pods': './unlimitted_pods/unlimitted_pods/unlimitted_pods.log',
+ 'cis_kubernetes': './cis_kubernetes/cis_kubernetes/cis_kubernetes.log',
+ 'nonssl_endpoints': './nonssl_endpoints/nonssl_endpoints/nonssl_endpoints.log',
+ 'jdpw_ports': './jdpw_ports/jdpw_ports/jdpw_ports.log',
+ 'kube_hunter': './kube_hunter/kube_hunter/kube_hunter.log',
+ 'versions': './security/versions/versions.html',
+ 'cps-healthcheck': './cps-healthcheck/cps-healthcheck/report.html',
+ 'cds_resource_resolution': './cds_resource_resolution/cds_resource_resolution/reporting.html',
+ 'basic_cnf_macro':'./basic_cnf_macro/basic_cnf_macro/reporting.html',
+ 'cps-dmi-plugin-healthcheck':'./cps-dmi-plugin-healthcheck/cps-dmi-plugin-healthcheck/report.html',
+ 'cps-temporal-healthcheck':'./cps-temporal-healthcheck/cps-temporal-healthcheck/report.html'
+ }
+
+def get_lab_owner(pod_name):
+ url = (URL_BASE_PODS + "?name=" + pod_name)
+ response = requests.get(url, proxies=PROXY)
+ response_json = response.json()
+ try:
+ lab_owner = response_json['pods'][0]['creator']
+ except KeyError:
+ lab_owner = "unknown"
+ except IndexError:
+ lab_owner = "unknown"
+ return lab_owner
+
+# Retrieve the Functest configuration to detect which tests are relevant
+# according to the pod, scenario
+PERIOD = 1
+
+LOGGER.info("generate Xtesting reporting page")
+
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument('-p', '--pod', help='Pod name')
+PARSER.add_argument('-d', '--db', help='Test DB URL')
+PARSER.add_argument('-t', '--build_tag', help='Build_tag')
+PARSER.add_argument('-m', '--mode', help='result retrieval mode', choices=['legacy', 's3'], default='legacy')
+ARGS = PARSER.parse_args()
+
+PODS = ['onap_xtesting_openlab-OPNFV-oom',
+ 'onap_oom_gating_pod4_1-ONAP-oom',
+ 'onap_oom_gating_pod4_2-ONAP-oom',
+ 'onap_oom_gating_pod4_3-ONAP-oom',
+ 'onap_oom_gating_pod4_4-ONAP-oom',
+ 'onap_oom_gating_azure_1-OPNFV-oom',
+ 'onap_oom_gating_azure_2-OPNFV-oom',
+ 'onap_oom_gating_azure_3-OPNFV-oom',
+ 'onap_oom_gating_azure_4-OPNFV-oom',
+ 'onap_daily_pod4_master-ONAP-oom',
+ 'onap_daily_pod4_istanbul-ONAP-oom',
+ 'onap_daily_pod4_jakarta-ONAP-oom']
+
+if ARGS.pod is not None:
+ PODS = [ARGS.pod]
+
+ # adapt tests according to the typ of tests: daily/weekly/gating
+ if "weekly" in ARGS.pod:
+ # Complete the list with weekly tests
+ SECURITY_USECASES['tests'].add('versions')
+ SECURITY_USECASES['tests'].add('jdpw_ports')
+ INFRA_HEALTHCHECK['tests'].add('internal_check_certs')
+ PERIOD = 7
+ if "gating" in ARGS.pod:
+ SECURITY_USECASES['tests'].remove('kube_hunter')
+
+ # adapt test according to the version: guilin / honolulu / master
+ if "guilin" in ARGS.pod:
+ HEALTHCHECK['tests'].remove('dcaemod')
+ HEALTHCHECK['tests'].remove('cps-healthcheck')
+ HEALTHCHECK['tests'].remove('cps-dmi-plugin-healthcheck')
+ HEALTHCHECK['tests'].remove('cps-temporal-healthcheck')
+ SMOKE_USECASES['tests'].remove('basic_clamp')
+ SMOKE_USECASES['tests'].remove('cds_resource_resolution')
+ SMOKE_USECASES['tests'].remove('basic_cnf_macro')
+ if "honolulu" in ARGS.pod:
+ HEALTHCHECK['tests'].remove('cps-healthcheck')
+ HEALTHCHECK['tests'].remove('cps-dmi-plugin-healthcheck')
+ HEALTHCHECK['tests'].remove('cps-temporal-healthcheck')
+ SMOKE_USECASES['tests'].remove('cds_resource_resolution')
+ SMOKE_USECASES['tests'].remove('basic_cnf_macro')
+ if "istanbul" in ARGS.pod:
+ HEALTHCHECK['tests'].remove('cps-dmi-plugin-healthcheck')
+ HEALTHCHECK['tests'].remove('cps-temporal-healthcheck')
+ SMOKE_USECASES['tests'].remove('cds_resource_resolution')
+ SMOKE_USECASES['tests'].remove('basic_cnf_macro')
+ # Exclude Cloudify based use cases in Master (after istanbul)
+ # TO BE updated as it is possible to perform gating on old versions
+ # We should exclude cases according to the version not the pod name
+ if "master" in ARGS.pod or "gating" in ARGS.pod or "jakarta" in ARGS.pod:
+ SMOKE_USECASES['tests'].remove('basic_clamp')
+ SMOKE_USECASES['tests'].remove('cmpv2')
+
+RESULT_URLS = RESULT_URLS_LEGACY
+LOGGER.info("init core result_url: %s", RESULT_URLS['core'])
+if ARGS.mode == "s3":
+ LOGGER.info("use s3 mode for file retrieval")
+ LOGGER.info("intended core result_url: %s", RESULT_URLS_S3['core'])
+ RESULT_URLS = RESULT_URLS_S3
+ LOGGER.info("s3 core result_url: %s", RESULT_URLS['core'])
+
+LOGGER.info("final core result_url: %s", RESULT_URLS['core'])
+LOGGER.info("List of PODS: %s", PODS)
+for pod in PODS:
+ LOGGER.info("POD: %s", pod)
+
+ # Get the version
+ lab_version = "unknown"
+ lab_owner = get_lab_owner(pod)
+ LOGGER.info("Lab owner: %s", lab_owner)
+
+ TREND_LINE = ""
+ SCORE = 0
+
+ # Trend
+ # *****
+ # calculation of the TREND
+ SCORE_TREND = 0
+ if ARGS.db is not None:
+ URL_BASE = str([ARGS.db][0])
+ LOGGER.info("Database: %s", URL_BASE)
+
+ for tier_trend in TRENDS:
+ tier_results = []
+ nb_tests = 0
+ nb_pass = 0
+ nb_fail = 0
+ score = 0
+
+ for test in tier_trend['tests']:
+ project = 'integration'
+ # Security tests affected to security project
+ if tier_trend['name'] == 'security':
+ project = 'security'
+ url = (URL_BASE + "?project_name=" + project + "&case=" + test +
+ "&pod_name=" + pod + "&last=5")
+ response = requests.get(url, proxies=PROXY)
+ response_json = response.json()
+ # Note the 'u' must be used in python 2.7
+ # str(response_json).count("criteria': 'uFAIL")
+ # it shall be removed if using python3
+ nb_fail = nb_fail + str(response_json).count("criteria': 'FAIL")
+ nb_pass = nb_pass + str(response_json).count("criteria': 'PASS")
+ try:
+ score_trend = round(100 * nb_pass / (nb_pass + nb_fail))
+ except ZeroDivisionError:
+ score_trend = 0
+ LOGGER.debug("Score Trend %s: %s", tier_trend, score_trend)
+ tier_trend['score'] = score_trend
+
+ # calculation of the overall SCORE for TREND
+ NB_TIERS = 0
+ for tier_trend in TRENDS:
+ NB_TIERS += 1
+ SCORE_TREND = SCORE_TREND + tier_trend['score']
+ SCORE_TREND = round(SCORE_TREND / NB_TIERS)
+
+ LOGGER.info("Score Trend: %s", str(SCORE_TREND))
+
+ # calculation of the overall SCORE
+ for tier in TIERS:
+ tier_results = []
+ nb_tests = 0
+ nb_pass = 0
+ score = 0
+ for test in tier['tests']:
+ # for Gating we consider the build_tag to retrieve the results
+ # For daily runs, we do not. A build_tag is created based on
+ # gitlab CI id and is different for each CI stage
+ param_build_tag = ""
+ if "gating" in pod and ARGS.build_tag is not None:
+ param_build_tag = "&build_tag=" + str([ARGS.build_tag][0])
+ project = 'integration'
+ # Security tests affected to security project
+ if tier['name'] == 'security':
+ project = 'security'
+
+ # onap-k8s and onap-k8s-teardown are the same test
+ # BUT
+ # onap-k8s is executed after the installation (fresh installation)
+ # onap-k8s-teardown after the tests
+ # in case of tests executed in onap namespace, a test may trigger
+ # an error status even it was OK at the end of the installation
+ # a special uggly processing is then needed to avoid false negative
+ search_test = test
+ if test == "onap-k8s-teardown":
+ search_test = "onap-k8s"
+
+ nb_test_max = 5
+
+ url = (URL_BASE + "?project_name=" + project +
+ "&case=" + search_test +
+ "&period=" + str(PERIOD) +
+ "&pod_name=" + pod + "&last=" + str(nb_test_max) +
+ param_build_tag)
+ LOGGER.debug("url: %s", url)
+ response = requests.get(url, proxies=PROXY)
+ response_json = response.json()
+ response_url = ""
+
+ if test in RESULT_URLS:
+ response_url = RESULT_URLS[test]
+ LOGGER.debug("response_json: %s", response_json)
+ req_result = ""
+
+ nb_results_found = len(response_json['results'])
+
+ try:
+ if test == "onap-k8s":
+ # We run that test twice (it's failing due to nodeport checks)
+ # so to get the latest result of onap-k8s test (running on startup)
+ # we need to get the 3rd result
+ req_result = response_json['results'][2]['criteria']
+ else:
+ req_result = response_json['results'][0]['criteria']
+
+ if lab_version == "unknown":
+ lab_version = response_json['results'][0]['version']
+
+ except IndexError:
+ req_result = None
+
+ result = {'name': test,
+ 'result': req_result,
+ 'url': response_url}
+ LOGGER.debug("result: %s", result)
+
+ nb_tests += 1
+ if req_result == "PASS":
+ nb_pass += 1
+ LOGGER.debug("nb_pass: %s", nb_pass)
+ LOGGER.debug("nb_tests: %s", nb_tests)
+ score = round(100 * nb_pass / nb_tests)
+ LOGGER.debug("score: %s", score)
+ tier_results.append(result)
+
+ tier['score'] = score
+ tier['results'] = tier_results
+
+ # calculation of the overall SCORE
+ NB_TIERS = 0
+ for tier in TIERS:
+ NB_TIERS += 1
+ LOGGER.debug("Score %s", tier)
+ SCORE = SCORE + tier['score']
+ SCORE = round(SCORE / NB_TIERS)
+ LOGGER.info("Score: %s", str(SCORE))
+
+ # calculation of the evolution score versus trend
+ if SCORE > 1.05*SCORE_TREND:
+ # increasing
+ TREND_LINE = "long arrow alternate up icon"
+ LOGGER.info("Better status")
+ elif SCORE < 0.95*SCORE_TREND:
+ # decreasing
+ TREND_LINE = "long arrow alternate down icon"
+ LOGGER.info("Worst status")
+ else:
+ # stable
+ TREND_LINE = "long arrow alternate right icon"
+ LOGGER.info("stable status")
+
+ TEMPLATELOADER = jinja2.FileSystemLoader(".")
+ TEMPLATEENV = jinja2.Environment(
+ loader=TEMPLATELOADER, autoescape=True)
+ TEMPLATE_FILE = ("./template/index-tmpl.html")
+ TEMPLATE = TEMPLATEENV.get_template(TEMPLATE_FILE)
+ OUTPUT_TEXT = TEMPLATE.render(
+ tiers=TIERS,
+ pod=pod,
+ period=PERIOD,
+ date=REPORTINGDATE,
+ score=SCORE,
+ trend=TREND_LINE,
+ lab_version=lab_version,
+ lab_owner=lab_owner)
+
+ FILENAME = "./index.html"
+
+ with open(FILENAME, "w+") as fh:
+ fh.write(OUTPUT_TEXT)
+
+ # Generate txt reporting with my pretty Table
+ vote=2
+ score_daily = []
+ dashboard_table = PrettyTable()
+ dashboard_table.field_names = ["Test Name", "Category", "Status"]
+ dashboard_table._max_width = {"Test Name" : 30, "Category": 40,"Status" : 10}
+ #print(TIERS)
+ for tier in TIERS:
+ tier_score = {'tier': tier['name'],
+ 'score': tier['score']}
+ score_daily.append(tier_score)
+ for test in tier['results']:
+ if tier['name'] == "infrastructure-healthcheck":
+ if test['name'] == "onap-k8s" or test['name'] == "onap-helm":
+ if test['result'] == "FAIL" or test['result'] == None:
+ vote-=2
+ if tier['name'] == "healthcheck" or tier['name'] == "smoke usecases":
+ if test['result'] == "FAIL" or test['result'] == None:
+ vote-=1
+ dashboard_table.add_row([test['name'],tier['name'], test['result']])
+ if vote < -2:
+ vote = -2
+
+ LOGGER.info(dashboard_table)
+ LOGGER.info("If I could, I would vote " + str(vote))
+ with open("./daily-status.txt", "w") as write_file:
+ write_file.write(str(dashboard_table))
+ write_file.write("\n")
+ write_file.write("**********************\n")
+ write_file.write("* Automated vote: "+ str(vote) +"\n")
+ write_file.write("**********************\n")
+ write_file.close()
+ # Gating vote
+ # Infra HC onap-helm and onap-K8S MUST be OK
+ # HC > 90 only 1 error OK in Full if not critical component
+ # Smoke 1
+
+ # Generate heatlth json to build a time view odf the daily dashboard_table
+ # create a json file for version tracking
+
+ with open("./daily-scores.json", "w") as write_file:
+ json.dump(score_daily, write_file)
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 0000000..f6c5d07
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,7 @@
+jinja2
+requests
+prettytable
+anytree
+deepdiff
+lxml
+bs4
diff --git a/doc/stability_results.json b/doc/stability_results.json
new file mode 100644
index 0000000..4bd6f17
--- /dev/null
+++ b/doc/stability_results.json
@@ -0,0 +1,54 @@
+{
+ "daily":
+ [
+ {
+ "onap_daily_pod4_honolulu":
+ [
+ {
+ "date": "2021-04-06",
+ "infra": 25,
+ "healthcheck":88,
+ "smoke":42,
+ "security": 50
+
+ },
+ {
+ "date": "2021-04-09",
+ "infra": 25,
+ "healthcheck":77,
+ "smoke":14,
+ "security": 50
+
+ },
+ {
+ "date": "2021-04-10",
+ "infra": 25,
+ "healthcheck":88,
+ "smoke":75,
+ "security": 50
+
+ },
+ {
+ "date": "2021-04-11",
+ "infra": 25,
+ "healthcheck":100,
+ "smoke":37,
+ "security": 50
+
+ },
+ {
+ "date": "2021-04-12",
+ "infra": 25,
+ "healthcheck":88,
+ "smoke":37,
+ "security": 50
+
+ }
+
+
+ ]
+ }
+ ]
+
+
+} \ No newline at end of file
diff --git a/doc/template/docker-version-tmpl.html b/doc/template/docker-version-tmpl.html
new file mode 100644
index 0000000..e783f9f
--- /dev/null
+++ b/doc/template/docker-version-tmpl.html
@@ -0,0 +1,409 @@
+<!DOCTYPE html>
+<html>
+<meta charset="UTF-8">
+
+<head>
+ <title>Docker version</title>
+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.2.13/semantic.min.css">
+
+</head>
+
+<body>
+ <div class='ui inverted vertical masthead center aligned segment'>
+
+ <div class="ui text container">
+ <h1 class="ui inverted header">
+ Docker version evolution
+ </h1>
+ </div>
+
+ </div>
+
+ <div class='ui vertical stripe segment'>
+ <div class='ui grid container'>
+ <div class="row">
+ <div class="column">
+ <h1 class="ui header">
+ <i class="warehouse icon"></i>
+
+ </h1>
+ <div class="sub header">
+
+ <!-- <div class="ui olive statistic">
+
+ <div class="value">20%</div>
+ <div class="label">last check</div>
+ </div> -->
+ <!-- <div class="ui olive statistic">
+ <div class="value">
+ <i class=""></i>
+ </div>
+ <div class="label">trend</div>
+ </div> -->
+ <div class="ui tiny grey statistic">
+ <div class="value"></div>
+ <div class="label">ONAP Version</div>
+ </div>
+ <div class="ui tiny grey statistic">
+ <div class="value"></div>
+ <div class="label">Lab owner</div>
+ </div>
+ </div>
+ <div class="ui segment">
+ <div class="ui two column very relaxed grid middle aligned">
+ <div class="column">
+ <div class="ui list">
+ <div class="item">
+ <i class=" green big square full icon"></i>
+ <div style="vertical-align: middle" class="content">
+ New version
+ </div>
+ </div>
+ <div class="item">
+ <i class=" yellow big square full icon"></i>
+ <div style="vertical-align: middle" class="content">
+ No version change since last official release
+ </div>
+ </div>
+ <div class="item">
+ <i class=" blue big square full icon"></i>
+ <div style="vertical-align: middle" class="content">
+ New component
+ </div>
+ </div>
+ <div class="item">
+ <i class=" grey big square full icon"></i>
+ <div style="vertical-align: middle" class="content">
+ No version change since at least two official releases
+ </div>
+ </div>
+ <div class="item">
+ <i class=" black big square outline icon"></i>
+ <div style="vertical-align: middle" class="content">
+ Component deleted
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="center aligned column">
+
+ <div class="row" style="margin-bottom: 15px;">
+ <button id="green" class="ui green button">.</button>
+ <button id="yellow" class="ui yellow button">.</button>
+ <button id="blue" class="ui blue button">.</button>
+ <button id="grey" class="ui grey button">.</button>
+ <button id="black" class="ui black basic button">.</button>
+ <button id="all" class="ui basic button"> all </button>
+ </div>
+ <div class="row">
+ <div class="ui input focus">
+ <input type="text" id="search_input" onkeyup="search()" placeholder="Search...">
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="ui vertical divider">
+ Filter
+ </div>
+ </div>
+ <table id="version_table" class="ui celled table">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Container</th>
+ <th>Image</th>
+ <th>Current version</th>
+ </tr>
+ </thead>
+ <tbody>
+
+
+ </tbody>
+ </table>
+ </div>
+ </div>
+ </div>
+ </div>
+ <script>
+
+ /*
+ get ONAPS versions
+ add as many columns as versions
+ fill the table by adding rows
+
+ */
+ let DATA = [{% for data_item in data %}
+ {
+ "component": "{{ data_item.component }}",
+ "container": "{{ data_item.container }}",
+ "image": "{{ data_item.image }}",
+ "current_version": "{{ data_item.current_version }}",
+ "other_version": {{ data_item.other_version | safe }},
+ "status": "{{ data_item.status }}"
+ },
+ {% endfor %}]
+ // [
+ // {
+ // "component": "aaf-cass",
+ // "container": "aaf-cass",
+ // "image": "nexus3.onap.org:10001/onap/aaf/aaf_cass",
+ // "current_version": "2.1.23",
+ // "other_version": { "honolulu": "2.1.18" },
+ // "status" : 1
+ // },
+ // {
+ // "component": "aai",
+ // "container": " aai",
+ // "image": "docker.nexus.azure.onap.eu/aaionap/haproxy",
+ // "current_version": "1.4.2",
+ // "other_version": { "honolulu": "1.4.2" },
+ // "status" : 0
+ // },
+ // {
+ // "component": "cassandra",
+ // "container": "cassandra",
+ // "image": "docker.nexus.azure.onap.eu/cassandra",
+ // "current_version": "1.0",
+ // "other_version": {},
+ // "status" : 2
+ // },
+ // {
+ // "component": "sdnc",
+ // "container": "sdnc",
+ // "image": "",
+ // "current_version": "",
+ // "other_version": { "honolulu": "2.2" },
+ // "status" : 3
+ // },
+ // {
+ // "component": "onap-awx",
+ // "container": "awx-web",
+ // "image": "docker.nexus.azure.onap.eu/ansible/awx_web",
+ // "current_version": "9.0.1",
+ // "other_version": {
+ // "honolulu": "2.2",
+ // "guilin": "8.2.3",
+ // },
+ // "status" : ""
+ // }
+
+ // ]
+
+
+ const TABLE = document.getElementById("version_table");
+
+ function is_empty(object) {
+ return object && Object.keys(object).length === 0 && object.constructor === Object
+ }
+
+ function add_column_to_table(column_title) {
+ //create a th for title
+ let table = document.querySelector(" #version_table>thead>tr");
+ let new_column = document.createElement("th");
+ let new_title = document.createTextNode(column_title);
+
+ table.appendChild(new_column);
+ new_column.appendChild(new_title);
+
+ //create matching td
+ /* let columns = document.getElementsByTagName("th");
+ let last_column = columns[colums.length - 1];*/
+ }
+
+ let COMPONENTS = [];
+ let CONTAINERS = [];
+ let IMAGES = [];
+ let C_VERSIONS = []; //current versions
+ let O_VERSIONS = []; //other versions
+
+ DATA.forEach(element => {
+ //sort data
+ COMPONENTS.push(element.component);
+ CONTAINERS.push(element.container);
+ IMAGES.push(element.image);
+ C_VERSIONS.push(element.current_version);
+ O_VERSIONS.push(element.other_version)
+
+
+ });
+
+ let columns_to_add = [];
+ O_VERSIONS.forEach(element => {
+
+ if (!(is_empty(element))) {
+
+ // if there are other version
+
+ // console.log(Object.keys(element).length)
+
+ for (let i = 0; i < Object.keys(element).length; i++) {
+
+ columns_to_add.push(Object.keys(element)[i])
+
+ }
+
+
+ }
+
+ })
+ versions = [...new Set(columns_to_add)]; // turn the array into a set and thus remove duplicates
+ //console.log(versions)
+
+ //construct table
+ versions.forEach(element => {
+ add_column_to_table(element)
+ })
+
+ const columns = document.getElementsByTagName("th")
+
+
+ DATA.forEach(element => {
+ //create a row for each element
+ let new_row = TABLE.insertRow(-1);
+ new_row.style.color = "white"
+ switch (element.status) {
+ case "0":
+ new_row.style.backgroundColor = "#FBBD08";
+ break;
+ case "1":
+ new_row.style.backgroundColor = "#21BA45";
+ break;
+ case "2":
+ new_row.style.backgroundColor = "#2185D0";
+ break;
+ case "3":
+ new_row.style.backgroundColor = "grey";
+ break;
+ case "4":
+ new_row.style.color = "black"
+ break;
+ }
+ for (let i = 0; i <= columns.length - 1; i++) {
+
+ // for each row create 4 base cells
+ let cell = new_row.insertCell(-1);
+
+ cell.setAttribute("class", "cell" + i);
+ if (cell.className == "cell0") {
+ let fill = document.createTextNode(element.component)
+ cell.append(fill)
+ } else if (cell.className == "cell1") {
+ let fill = document.createTextNode(element.container)
+ cell.append(fill)
+ } else if (cell.className == "cell2") {
+ let fill = document.createTextNode(element.image)
+ cell.append(fill)
+ } else if (cell.className == "cell" + 3) {
+ let fill = document.createTextNode(element.current_version)
+ cell.append(fill)
+ }
+
+ // add as many cells as versions after the base cells
+ for (let j = 3 + versions.length; j > 3; j--) {
+ let version_item = j - (columns.length - versions.length)
+ if (cell.className == "cell" + j) {
+
+ console.log(version_item)
+ let txt = element.other_version[versions[version_item]];
+ let fill = document.createTextNode(txt)
+ cell.append(fill)
+ version_item++;
+ }
+
+
+ }
+
+ }
+
+ })
+ function filter(color) {
+ console.log(color + ": cliqué")
+ let table = document.getElementById("version_table");
+ let tr = table.getElementsByTagName("tr");
+ for (let i = 1; i < tr.length; i++) {
+ tr[i].style.display = "none";
+ switch (color) {
+ case "green":
+ if (tr[i].style.backgroundColor == "rgb(33, 186, 69)") {
+ tr[i].style.display = "";
+ }
+ break;
+
+ case "yellow":
+
+ if (tr[i].style.backgroundColor == "rgb(251, 189, 8)") {
+ tr[i].style.display = "";
+ }
+ break;
+
+ case "blue":
+
+ if (tr[i].style.backgroundColor == "rgb(33, 133, 208)") {
+ tr[i].style.display = "";
+ }
+ break;
+
+ case "grey":
+
+ if (tr[i].style.backgroundColor == "grey") {
+ tr[i].style.display = "";
+ }
+ break;
+
+ case "black":
+
+ if (tr[i].style.backgroundColor == "") {
+ tr[i].style.display = "";
+ }
+ break;
+
+ case "all":
+ tr[i].style.display = "";
+
+ break;
+ }
+
+ }
+ }
+ let green_button = document.getElementById("green")
+ let yellow_button = document.getElementById("yellow")
+ let blue_button = document.getElementById("blue")
+ let grey_button = document.getElementById("grey")
+ let black_button = document.getElementById("black")
+ let all_button = document.getElementById("all")
+
+
+
+ green_button.addEventListener("click", function () { filter("green") })
+ yellow_button.addEventListener("click", function () { filter("yellow") })
+ blue_button.addEventListener("click", function () { filter("blue") })
+ grey_button.addEventListener("click", function () { filter("grey") })
+ black_button.addEventListener("click", function () { filter("black") })
+ all_button.addEventListener("click", function () { filter("all") })
+
+ function search() {
+
+ var input, filter, table, tr, td, i, txtValue;
+ input = document.getElementById("search_input");
+ filter = input.value.toUpperCase();
+ table = document.getElementById("version_table");
+ tr = table.getElementsByTagName("tr");
+
+ // Loop through all table rows, and hide those who don't match the search query
+ for (i = 0; i < tr.length; i++) {
+ td = tr[i].getElementsByTagName("td")[0];
+ if (td) {
+ txtValue = td.textContent || td.innerText;
+ if (txtValue.toUpperCase().indexOf(filter) > -1) {
+ tr[i].style.display = "";
+ } else {
+ tr[i].style.display = "none";
+ }
+ }
+ }
+ }
+
+ </script>
+</body>
+
+</html> \ No newline at end of file
diff --git a/doc/template/index-gating-tmpl.html b/doc/template/index-gating-tmpl.html
new file mode 100644
index 0000000..0b0f650
--- /dev/null
+++ b/doc/template/index-gating-tmpl.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<html>
+ <meta charset="UTF-8">
+ <head>
+ <title>Xtesting ONAP Results</title>
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/header.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/icon.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/card.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/container.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/step.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/statistic.min.css">
+ <script
+ src="https://code.jquery.com/jquery-3.1.1.min.js"
+ integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8="
+ crossorigin="anonymous"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.js"></script>
+ <script>$(function(){
+ $('.ui.accordion').accordion();});
+ </script>
+ </head>
+
+ <body>
+ <div class='ui inverted vertical masthead center aligned segment'>
+
+ <div class="ui text container">
+ <i class="large gitlab icon"></i>
+ <h1 class="ui inverted header">
+ OOM Gating: list of patchset
+ </h1>
+ <div class="meta">{{ date }}</div>
+ </div>
+ </div>
+
+ <div class="ui equal width center aligned padded grid">
+ <div class="row">
+ <div class="white column"></div>
+ </div>
+ </div>
+
+ <div class='ui vertical stripe segment'>
+ <div class='ui middle aligned stackable grid container'>
+ <div class="row">
+ <div class="column">
+ <h1 class="ui header">
+ <div class="sub header">
+
+ <div class="ui list center aligned segment">
+ {% for patch in patchsets -%}
+ <div class="item center aligned segment">
+ <div class="content">
+ <a class="link"><a href={{ patch }}/index.html><h2>{{ patch }}</h2></a></a>
+ </div>
+ </div>
+ {%- endfor %}
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </body>
+</html>
diff --git a/doc/template/index-infra.html b/doc/template/index-infra.html
new file mode 100644
index 0000000..a613ac9
--- /dev/null
+++ b/doc/template/index-infra.html
@@ -0,0 +1,46 @@
+<!DOCTYPE html>
+<html>
+ <meta charset="UTF-8">
+ <head>
+ <title>Xtesting ONAP Results</title>
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/header.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/icon.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/card.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/container.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/step.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/statistic.min.css">
+ <script
+ src="https://code.jquery.com/jquery-3.1.1.min.js"
+ integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8="
+ crossorigin="anonymous"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.js"></script>
+ <script>$(function(){
+ $('.ui.accordion').accordion();});
+ </script>
+ </head>
+
+<body>
+ <div class='ui inverted vertical masthead center aligned segment'>
+
+ <div class="ui text container">
+ <h1 class="ui inverted header">
+ ONAP Xtesting Results
+ </h1>
+ </div>
+ </div>
+ <center>
+ <div class="ui main text container">
+ <h2 class="ui icon header">
+ <i class="gitlab icon"></i>
+ <div class="content">
+ <a tabindex="0" class="big ui orange basic button" href="./onap_xtesting_openlab/index.html">Openlab</a>
+ <a tabindex="0" class="big ui blue basic button" href="./onap_master_daily_pod4/index.html">Daily Master</a>
+ <a tabindex="0" class="big ui green basic button" href="./onap_casablanca_daily_pod4/index.html">Daily Casablanca</a>
+ </div>
+ </h2>
+ </div>
+ </center>
+</body>
+
+</html>
diff --git a/doc/template/index-stability-tmpl.html b/doc/template/index-stability-tmpl.html
new file mode 100644
index 0000000..073ff26
--- /dev/null
+++ b/doc/template/index-stability-tmpl.html
@@ -0,0 +1,391 @@
+<!DOCTYPE html>
+<html>
+<meta charset="UTF-8">
+
+<head>
+ <title>Xtesting ONAP Results</title>
+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.2.13/semantic.min.css">
+ <style>
+ canvas{
+ margin-bottom: 30px;
+ }
+ </style>
+</head>
+
+<body>
+ <div class='ui inverted vertical masthead center aligned segment'>
+
+ <div class="ui text container">
+ <h1 class="ui inverted header">
+ Stability testing results evolution
+ </h1>
+ </div>
+
+ </div>
+
+ <div class='ui vertical stripe segment'>
+ <div class='ui grid container'>
+ <div class="row">
+ <div class="column">
+ <h1 class="ui header">
+ <i class="warehouse icon"></i>
+
+ </h1>
+ <div class="sub header">
+
+ <!-- <div class="ui olive statistic">
+
+ <div class="value">20%</div>
+ <div class="label">last check</div>
+ </div> -->
+ <!-- <div class="ui olive statistic">
+ <div class="value">
+ <i class="{{ trend }}"></i>
+ </div>
+ <div class="label">trend</div>
+ </div> -->
+ <div class="ui tiny grey statistic">
+ <div class="value">{{ lab_version }}</div>
+ <div class="label">ONAP Version</div>
+ </div>
+ <div class="ui tiny grey statistic">
+ <div class="value">{{ lab_owner }}</div>
+ <div class="label">Lab owner</div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+
+ <div class="ui vertical segment">
+ <div class="ui middle aligned stackable grid container">
+ <div class="row">
+ <div class="eight wide column">
+ <h3 class="ui header center aligned">Infra test results</h3>
+ <div class=" column" style=" height:300px">
+ <canvas id="canvas_infra"></canvas>
+ </div>
+ </div>
+ <div class="eight wide column">
+ <h3 class="ui header center aligned">Healthcheck test results</h3>
+ <div class=" column" style=" height:300px">
+ <canvas id="canvas_healthcheck"></canvas>
+ </div>
+ </div>
+ <div class="eight wide column">
+ <h3 class="ui header center aligned">Smoke test results</h3>
+ <div class=" column" style=" height:300px">
+ <canvas id="canvas_smoke"></canvas>
+ </div>
+ </div>
+ <div class="eight wide column">
+ <h3 class="ui header center aligned">Security test results</h3>
+ <div class=" column" style=" height:300px">
+ <canvas id="canvas_security"></canvas>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+
+
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/3.3.2/chart.min.js"></script>
+ <script>
+
+ let DATA = {{ data |safe }}
+ let TEST_NAMES= ["infrastructure-healthcheck", "healthcheck","smoke-usecases", "security"]
+
+ function get_dates(data) {
+ let dates = []
+ for (const date of data){
+ dates.push(date["date"])
+ }
+ // data.forEach(element => {
+ // dates.push(element.date)
+ // });
+ return dates
+ }
+
+ function get_tests_results(data,test_name){
+ let results = []
+
+ for (const item of data){
+ console.log(item[test_name])
+ results.push(item[test_name])
+
+ }
+
+ return results
+ }
+
+ function trendlineEq(X, Y) {
+ // X : all x coordinates
+ // Y: all y coordinate
+ var x_sum = 0
+ var y_sum = 0
+
+ var N = 0;
+ var A = 0;
+ var B = 0;
+ var C = 0;
+ var D = 0;
+ var E = 0;
+ var F = 0;
+ var m = 0;
+ var b = 0;
+
+ if (X.length == Y.length) {
+
+ var N = X.length
+ console.log("N : " + N)
+
+ } else {
+
+ return console.log(" X and Y must be of the same size")
+ }
+
+ for (let i = 0; i <= N - 1; i++) {
+
+ A = A + (X[i] * Y[i])
+
+
+
+ x_sum = x_sum + X[i]
+ y_sum = y_sum + Y[i]
+
+ C = C + Math.pow(X[i], 2)
+
+ }
+ A = N * A;
+ console.log("A : " + A)
+
+ B = x_sum * y_sum
+ console.log("B : " + B)
+
+ C = N * C
+ console.log("C : " + C)
+
+ D = Math.pow(x_sum, 2)
+ console.log("D : " + D)
+
+ m = (A - B) / (C - D)
+ console.log("m : " + m)
+
+ E = y_sum
+ console.log("E : " + E)
+
+ F = m * x_sum
+ console.log("F : " + F)
+
+ b = (E - F) / N
+ console.log("b : " + b)
+
+ return [m, b]
+ }
+
+ function get_trend_line_data(X, params) {
+ let trend_line_data = []
+ for (let i = 0; i <= X.length - 1; i++) {
+ let y_param = (params[0] * X[i]) + params[1]
+ let coordinates = {
+ x: X[i],
+ y: y_param
+ }
+ trend_line_data.push(coordinates)
+ }
+ return trend_line_data
+ }
+
+ var x_coord = 0
+ var X_COORDS = []
+
+ DATA.forEach(element => {
+ X_COORDS.push(x_coord);
+ x_coord++
+
+ });
+
+ function generate_target_array(data, target) {
+ let target_array = []
+ for (const item of data){
+ target_array.push(target)
+ }
+ // data.forEach(element => {
+ // target_array.push(target)
+ // });
+ return target_array
+ }
+
+ const results_infra = get_tests_results(DATA,TEST_NAMES[0])
+ const results_healthcheck = get_tests_results(DATA,TEST_NAMES[1])
+ const results_smoke = get_tests_results(DATA,TEST_NAMES[2])
+ const results_security = get_tests_results(DATA,TEST_NAMES[3])
+
+ const target_infra = generate_target_array(DATA, 75)
+ const target_healthcheck = generate_target_array(DATA, 100)
+ const target_smoke = generate_target_array(DATA, 100)
+ const target_security = generate_target_array(DATA, 100)
+
+ const params_infra = trendlineEq(X_COORDS, results_infra)
+ const params_healthcheck = trendlineEq(X_COORDS, results_healthcheck)
+ const params_smoke = trendlineEq(X_COORDS, results_smoke)
+ const params_security = trendlineEq(X_COORDS, results_security)
+
+ const trend_line_infra = get_trend_line_data(X_COORDS, params_infra)
+ const trend_line_healthcheck = get_trend_line_data(X_COORDS, params_healthcheck)
+ const trend_line_smoke = get_trend_line_data(X_COORDS, params_smoke)
+ const trend_line_security = get_trend_line_data(X_COORDS, params_security)
+
+ const labels = get_dates(DATA)
+
+ const data_infra = {
+ labels: labels,
+ datasets: [{
+ label: 'Infra results',
+ borderColor: 'rgb(255, 99, 132)',
+ data: results_infra
+ }, {
+ type: 'line',
+ label: 'Target',
+ borderColor: 'rgb(99, 255, 132)',
+ data: target_infra,
+ }, {
+ type: 'line',
+ label: 'trend',
+ borderColor: 'rgb(66, 191, 245)',
+ data: trend_line_infra,
+ }
+ ]
+
+ };
+
+ const data_healthcheck = {
+ labels: labels,
+ datasets: [{
+ label: 'healthcheck results',
+ borderColor: 'rgb(255, 99, 132)',
+ data: results_healthcheck
+ }, {
+ type: 'line',
+ label: 'Target',
+ borderColor: 'rgb(99, 255, 132)',
+ data: target_healthcheck,
+ },{
+ type: 'line',
+ label: 'trend',
+ borderColor: 'rgb(66, 191, 245)',
+ data: trend_line_healthcheck,
+ }
+ ]
+ };
+
+ const data_smoke = {
+ labels: labels,
+ datasets: [{
+ label: 'smoke results',
+ borderColor: 'rgb(255, 99, 132)',
+ data: results_smoke
+ }, {
+ type: 'line',
+ label: 'Target',
+ borderColor: 'rgb(99, 255, 132)',
+ data: target_smoke,
+ },{
+ type: 'line',
+ label: 'trend',
+ borderColor: 'rgb(66, 191, 245)',
+ data: trend_line_smoke,
+ }
+ ]
+ };
+
+ const data_security = {
+ labels: labels,
+ datasets: [{
+ label: 'security results',
+ borderColor: 'rgb(255, 99, 132)',
+ data: results_security
+ }, {
+ type: 'line',
+ label: 'Target',
+ borderColor: 'rgb(99, 255, 132)',
+ data: target_security,
+ },{
+ type: 'line',
+ label: 'trend',
+ borderColor: 'rgb(66, 191, 245)',
+ data: trend_line_security,
+ }
+ ]
+ };
+
+ var options = {
+ responsive: true,
+ maintainAspectRatio: false,
+ scales: {
+ y: {
+ title: {
+ display: true,
+ text: 'Value'
+ },
+ min: 0,
+ max: 100,
+ ticks: {
+ stepSize: 10
+ }
+ }
+ }
+ }
+
+ const config_infra = {
+ type: 'line',
+ data: data_infra,
+ options: options,
+ };
+ const config_healthcheck = {
+ type: 'line',
+ data: data_healthcheck,
+ options: options
+ };
+ const config_smoke = {
+ type: 'line',
+ data: data_smoke,
+ options: options
+ };
+ const config_security = {
+ type: 'line',
+ data: data_security,
+ options: options
+ };
+
+ //charts rendering
+
+
+ var chart_infra = new Chart(
+ document.getElementById('canvas_infra'),
+ config_infra
+ );
+
+
+ var chart_healthcheck = new Chart(
+ document.getElementById('canvas_healthcheck'),
+ config_healthcheck
+ );
+
+ var chart_smoke = new Chart(
+ document.getElementById('canvas_smoke'),
+ config_smoke
+ );
+
+ var chart_security = new Chart(
+ document.getElementById('canvas_security'),
+ config_security
+ );
+
+</script>
+</body>
+
+</html>
diff --git a/doc/template/index-tmpl.html b/doc/template/index-tmpl.html
new file mode 100644
index 0000000..b8918c7
--- /dev/null
+++ b/doc/template/index-tmpl.html
@@ -0,0 +1,203 @@
+<!DOCTYPE html>
+<html>
+ <meta charset="UTF-8">
+ <head>
+ <title>Xtesting ONAP Results</title>
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/header.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/icon.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/card.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/container.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/step.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/statistic.min.css">
+ <script
+ src="https://code.jquery.com/jquery-3.1.1.min.js"
+ integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8="
+ crossorigin="anonymous"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.js"></script>
+ <script>$(function(){
+ $('.ui.accordion').accordion();});
+ </script>
+ </head>
+
+ <body>
+
+ <div class='ui vertical stripe segment'>
+ <div class='ui middle aligned stackable grid container'>
+ <div class="row">
+ <div class="column">
+ <h1 class="ui header">
+ <i class="warehouse icon"></i>
+ <div class="content">ONAP Xtesting results</div>
+ <div class="sub header">
+ {% if score > 99 -%}
+ <div class="ui olive statistic">
+ {%- elif score > 75 -%}
+ <div class="ui orange statistic">
+ {%- else -%}
+ <div class="ui red statistic">
+ {%- endif %}
+ <div class="value">{{ score }}%</div>
+ <div class="label">last check</div>
+ </div>
+ <div class="ui olive statistic">
+ <div class="value">
+ <i class="{{ trend }}"></i>
+ </div>
+ <div class="label">trend</div>
+ </div>
+ <div class="ui tiny grey statistic">
+ <div class="value">{{ lab_version }}</div>
+ <div class="label">ONAP Version</div>
+ </div>
+ <div class="ui tiny grey statistic">
+ <div class="value">{{ lab_owner }}</div>
+ <div class="label">Lab owner</div>
+ </div>
+
+ {% if "daily" in pod -%}
+ <div class="ui tiny grey statistic">
+ <a href="./index-versions.html" target="_blank">
+ <i class="tasks icon"></i>
+ <div class="label">Docker Versions</div>
+ </a>
+ </div>
+ <div class="ui tiny grey statistic">
+ <a href="./index-stability.html" target="_blank">
+ <i class="chart line icon"></i>
+ <div class="label">Stability charts</div>
+ </a>
+ </div>
+ </div>
+ </div>
+
+ {%- endif %}
+
+ </div>
+ </div>
+ </h1>
+ <div class="ui cards">
+ {% for tier in tiers -%}
+ <div class="ui card">
+ <div class="content">
+ <div class="header">{{ tier.name }}</div>
+ <div class="meta">{{ date }}</div>
+ <div class="center aligned description">
+ {% if tier.score > 99 -%}
+ <div class="ui green statistic">
+ {%- elif tier.score > 75 -%}
+ <div class="ui orange statistic">
+ {%- else -%}
+ <div class="ui red statistic">
+ {%- endif %}
+ <div class="value">{{ tier.score }}%</div>
+ <div class="label">last check</div>
+ </div>
+ <div class="ui fluid accordion">
+ <div class="title">
+ <div class="dropdown icon"></div>
+ Full results
+ </div>
+ <div class="content">
+ <div class="ui mini vertical steps">
+ {% for result in tier.results -%}
+ <div class="step">
+ {% if 'PASS' == result.result -%}
+ <i class="inverted green check icon"></i>
+ {%- else -%}
+ <i class="inverted red x icon"></i>
+ {%- endif %}
+ <div class="content">
+ {% if result.url != '' -%}
+ <div class="center aligned header"><a href="{{result.url}}">{{result.name}}</a></div>
+ {%- else -%}
+ <div class="center aligned header">{{result.name}}</div>
+ {%- endif %}
+ </div>
+ </div>
+ {%- endfor %}
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ {%- endfor %}
+ {% if "weekly" in pod -%}
+ <div class="ui card">
+ <div class="content">
+ <div class="header">stability</div>
+ <div class="meta">Note: results may be uploaded after several days, be patient..</div>
+ <div class="step">
+ <br>
+ <div class="ui fluid accordion">
+ <div class="title">
+ <div class="dropdown icon"></div>
+ Stability results
+ </div>
+ <div class="content">
+ <div class="ui mini vertical steps">
+ <div class="step">
+ <div class="center aligned header"><a href="./stability/results_instantiation_10_24h//onaptests_bench.html">10 // basic_vm - 24h</a></div>
+ </div>
+ <div class="step">
+ <div class="center aligned header"><a href="./stability/results_sdc_5_24h/onaptests_bench.html">5 // basic_onboard - 24h</a></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="ui card">
+ <div class="content">
+ <div class="header">resiliency</div>
+ <div class="meta">Note: results may be uploaded after several days, be patient..</div>
+ <div class="step">
+ <br>
+ <div class="ui fluid accordion">
+ <div class="title">
+ <div class="dropdown icon"></div>
+ Resiliency results
+ </div>
+ <div class="content">
+ <div class="ui mini vertical steps">
+ <div class="step">
+ <div class="center aligned header"><a href="./resiliency/reporting_chaos.html">Resiliency page</a></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="ui card">
+ <div class="content">
+ <div class="header">info only</div>
+ <div class="meta">Note: results may be uploaded a day after the page is created, be patient..</div>
+ <div class="step">
+ <br>
+ <div class="ui fluid accordion">
+ <div class="title">
+ <div class="dropdown icon"></div>
+ Results
+ </div>
+ <div class="content">
+ <div class="ui mini vertical steps">
+ <div class="step">
+ <div class="center aligned header"><a href="./legal/tern/index.html">tern - Docker SBoMs</a></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ {%- endif %}
+ </div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </body>
+</html>
diff --git a/doc/template/k8s.html b/doc/template/k8s.html
new file mode 100644
index 0000000..130553f
--- /dev/null
+++ b/doc/template/k8s.html
@@ -0,0 +1,103 @@
+<!DOCTYPE html>
+<html>
+ <meta charset="UTF-8">
+ <head>
+ <title>Xtesting ONAP Results</title>
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/header.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/icon.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/card.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/container.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/step.min.css">
+ <link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/statistic.min.css">
+ <script
+ src="https://code.jquery.com/jquery-3.1.1.min.js"
+ integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8="
+ crossorigin="anonymous"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/semantic-ui/2.4.1/components/accordion.min.js"></script>
+ <script>$(function(){
+ $('.ui.accordion').accordion();});
+ </script>
+ </head>
+
+<body>
+ <div class='ui inverted vertical masthead center aligned segment'>
+ <div class="ui text container">
+ <h1 class="ui inverted header">
+ Xtesting ONAP Results for Kubernetes Deployment
+ </h1>
+ </div>
+ </div>
+
+ <div class='ui vertical stripe segment'>
+ <div class='ui middle aligned stackable grid container'>
+ <div class="row">
+ <div class="column">
+ <h2 class="ui header">
+ Summary
+ </h2>
+ <div class="ui statistics">
+ <div class="statistic">
+ <div class="value">
+ 38M
+ </div>
+ <div class="label">
+ Duration
+ </div>
+ </div>
+ <div class="statistic">
+ <div class="value">
+ 108
+ </div>
+ <div class="label">
+ Total Pods Scheduled
+ </div>
+ </div>
+ <div class="statistic">
+ <div class="value">
+ 3
+ </div>
+ <div class="label">
+ Pods Failing
+ </div>
+ </div>
+ </div>
+
+ <h2 class="ui header">
+ Failing pods
+ </h2>
+ <div class="ui list">
+
+ <div class="item">
+ <i class="large inverted middle aligned red x icon"></i>
+ <div class="content">
+ <div class="header">Pod Name</div>
+ <div class="description">Fail Reason</div>
+ </div>
+ </div>
+ <div class="item">
+ <i class="large inverted middle aligned red x icon"></i>
+ <div class="content">
+ <div class="header">Pod Name</div>
+ <div class="description">Fail Reason</div>
+ </div>
+ </div>
+ <div class="item">
+ <i class="large inverted middle aligned red x icon"></i>
+ <div class="content">
+ <div class="header">Pod Name</div>
+ <div class="description">Fail Reason</div>
+ </div>
+ </div>
+ </div>
+ <h2 class="ui header">
+ full logs
+ </h2>
+ <a href="">Access to entire logs</a>
+ </div>
+ </div>
+ </div>
+ </div>
+</body>
+
+</html>
diff --git a/gitlab-ci/base.yml b/gitlab-ci/base.yml
new file mode 100644
index 0000000..3380aff
--- /dev/null
+++ b/gitlab-ci/base.yml
@@ -0,0 +1,768 @@
+---
+stages:
+ - lint
+ - prepare
+ - infrastructure-healthcheck
+ - onap-security
+ - healthcheck-1
+ - healthcheck-2
+ - smoke-usecases-1
+ - smoke-usecases-2
+ - infrastructure-healthcheck-teardown
+ - info-only
+ - onap-stability
+ - onap-resiliency
+ - deploy
+ - downstream
+
+variables:
+ GIT_SUBMODULE_STRATEGY: recursive
+ ANSIBLE_DOCKER_IMAGE: registry.gitlab.com/orange-opensource/lfn/ci_cd/docker_ansible
+ ANSIBLE_DOCKER_TAG: "2.10.6"
+ CHAINED_CI_INIT: scripts/chained-ci-tools/chained-ci-init.sh
+ use_jumphost: "True"
+
+.syntax_checking: &syntax_docker
+ extends: .syntax_checking_tags
+ stage: lint
+ rules:
+ - if: '$CI_PIPELINE_SOURCE == "trigger"'
+ when: never
+ - if: '$CI_PIPELINE_SOURCE == "schedule"'
+ when: never
+ - when: on_success
+
+yaml_linting:
+ image: docker.nexus.azure.onap.eu/sdesbure/yamllint:latest
+ script:
+ - "yamllint \
+ .gitlab-ci.yml"
+ <<: *syntax_docker
+
+ansible_linting:
+ image: docker.nexus.azure.onap.eu/sdesbure/ansible-lint:latest
+ script:
+ - "ansible-lint -x ANSIBLE0010 \
+ roles"
+ <<: *syntax_docker
+
+.runner_tags: &runner_tags
+ image: ${ANSIBLE_DOCKER_IMAGE}:${ANSIBLE_DOCKER_TAG}
+ retry: 1
+ extends: .ansible_run_tags
+
+.security_rules: &security_rules
+ rules:
+ - if: '$PROJECT == "oom" && $CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+ - if: '$PROJECT== null && $CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+ - if: '$PROJECT== "" && $CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+
+.fulldeploy_rules: &fulldeploy_rules
+ rules:
+ - if: '$DEPLOYMENT_TYPE == "full" && $CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+
+.trigger_rules: &trigger_rules
+ rules:
+ - if: '$CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+
+.weekly_rules: &weekly_rules
+ rules:
+ - if: '$pod =~ /^onap_weekly.*/'
+ when: always
+
+.onap_non_master_rules: &onap_non_master_rules
+ rules:
+ - if: '$ONAP_VERSION != "master" && $ONAP_VERSION != "jakarta" && $CI_PIPELINE_SOURCE == "trigger"'
+ when: always
+
+##
+# Generic Jobs
+##
+.get_artifact: &get_artifact
+ before_script:
+ - chmod 700 .
+ - . ./${CHAINED_CI_INIT} -a -i inventory/infra
+ - mkdir -p ./results/${run_type}
+ - ansible-galaxy install -r requirements.yaml
+ after_script:
+ - ./scripts/chained-ci-tools/clean.sh
+
+.prepare: &prepare
+ script:
+ - rm -Rf ./results && mkdir -p ./results
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-jumphost.yaml
+ <<: *get_artifact
+ <<: *runner_tags
+
+.run_healthcheck: &run_healthcheck
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-healthcheck.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} run_timeout=${run_timeout}"
+ <<: *get_artifact
+ timeout: 15 minutes
+
+.manage_artifacts: &manage_artifacts
+ artifacts:
+ paths:
+ - results/${run_tiers}/${run_type}/
+ when: always
+
+.run_infrastructure_healthcheck: &run_infrastructure_healthcheck
+ script:
+ - sleep 120
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-healthcheck-k8s.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} run_timeout=${run_timeout}"
+ timeout: 30 minutes
+
+.run_infrastructure_healthcheck_job: &run_infrastructure_healthcheck_job
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-healthcheck-k8s-job.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} run_timeout=${run_timeout}"
+ timeout: 360 minutes
+
+.run_onap_vnf: &run_onap_vnf
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-onap-vnf.yaml
+ --extra-vars "run_type=${run_type} before_launch_wait_time=${before_launch_wait_time} run_tiers=${run_tiers} vnf_settings=${vnf_settings} run_timeout=${run_timeout}"
+ <<: *get_artifact
+ timeout: 90 minutes
+
+.run_smoke_usecase_robot: &run_smoke_usecase_robot
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-healthcheck.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} run_timeout=${run_timeout}"
+ <<: *get_artifact
+ timeout: 30 minutes
+
+.run_onap_security: &run_onap_security
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra xtesting-onap-security.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} run_timeout=${run_timeout}"
+ timeout: 360 minutes
+
+.run_legal_tern: &run_legal_tern
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra legal-tern.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} lf_results_backup=${LF_RESULTS_BACKUP}
+ ci_pipeline_created_at=${CI_PIPELINE_CREATED_AT} "
+
+.run_onap_stability: &run_onap_stability
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra onap-stability.yaml
+ --extra-vars "run_type=${run_type} run_tiers=${run_tiers} lf_results_backup=${LF_RESULTS_BACKUP}
+ ci_pipeline_created_at=${CI_PIPELINE_CREATED_AT} "
+
+.run_onap_resiliency: &run_onap_resiliency
+ script:
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra onap-chaos-tests.yaml
+ --tags "prepare"
+
+.infrastructure_healthcheck: &infrastructure_healthcheck
+ variables:
+ run_tiers: infrastructure-healthcheck
+ run_type: k8s
+ run_timeout: 1000
+ stage: infrastructure-healthcheck
+ allow_failure: true
+ <<: *get_artifact
+ <<: *run_infrastructure_healthcheck
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.infrastructure_healthcheck_teardown: &infrastructure_healthcheck_teardown
+ variables:
+ run_tiers: infrastructure-healthcheck
+ run_type: k8s-teardown
+ run_timeout: 700
+ stage: infrastructure-healthcheck-teardown
+ allow_failure: true
+ <<: *get_artifact
+ <<: *run_infrastructure_healthcheck
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.infrastructure_healthcheck_internal_check_certs: &infrastructure_healthcheck_internal_check_certs
+ variables:
+ run_tiers: infrastructure-healthcheck
+ run_type: internal_check_certs
+ run_timeout: 700
+ stage: infrastructure-healthcheck
+ allow_failure: true
+ <<: *get_artifact
+ <<: *run_infrastructure_healthcheck_job
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.core: &core
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: core
+ run_timeout: 240
+ allow_failure: true
+ stage: healthcheck-1
+ <<: *get_artifact
+ <<: *run_healthcheck
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.full: &full
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: full
+ run_timeout: 600
+ stage: healthcheck-1
+ allow_failure: true
+ <<: *get_artifact
+ <<: *run_healthcheck
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.healthdist: &healthdist
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: healthdist
+ run_timeout: 600
+ stage: healthcheck-2
+ <<: *get_artifact
+ <<: *run_healthcheck
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.postinstall: &postinstall
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: postinstall
+ run_timeout: 600
+ stage: healthcheck-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_healthcheck
+ <<: *manage_artifacts
+
+.cps_healthcheck: &cps_healthcheck
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: cps-healthcheck
+ run_timeout: 300
+ stage: healthcheck-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_healthcheck
+ <<: *manage_artifacts
+
+.cps_temporal_healthcheck: &cps_temporal_healthcheck
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: cps-temporal-healthcheck
+ run_timeout: 300
+ stage: healthcheck-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_healthcheck
+ <<: *manage_artifacts
+
+.cps_dmi_plugin_healthcheck: &cps_dmi_plugin_healthcheck
+ variables:
+ run_tiers: xtesting-healthcheck
+ run_type: cps-dmi-plugin-healthcheck
+ run_timeout: 300
+ stage: healthcheck-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_healthcheck
+ <<: *manage_artifacts
+
+.vnf_basic_vm: &vnf_basic_vm
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_vm
+ vnf_settings: onaptests.configuration.basic_vm_settings
+ run_timeout: 2000
+ before_launch_wait_time: 120
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *run_onap_vnf
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.vnf_basic_vm_macro: &vnf_basic_vm_macro
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_vm_macro
+ vnf_settings: onaptests.configuration.basic_vm_macro_settings
+ run_timeout: 2000
+ before_launch_wait_time: 120
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *run_onap_vnf
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.basic_network: &basic_network
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_network
+ vnf_settings: onaptests.configuration.basic_network_nomulticloud_settings
+ run_timeout: 1500
+ before_launch_wait_time: 0
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *run_onap_vnf
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.basic_cnf: &basic_cnf
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_cnf
+ vnf_settings: onaptests.configuration.basic_cnf_yaml_settings
+ run_timeout: 2000
+ before_launch_wait_time: 360
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *run_onap_vnf
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+# .basic_cds: &basic_cds
+# variables:
+# run_tiers: smoke-usecases
+# run_type: basic_cds
+# vnf_settings: onaptests.configuration.cba_enrichment_settings
+# run_timeout: 1000
+# before_launch_wait_time: 30
+# stage: healthcheck
+# <<: *get_artifact
+# <<: *runner_tags
+# <<: *run_onap_vnf
+# <<: *manage_artifacts
+
+.basic_onboard: &basic_onboard
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_onboard
+ vnf_settings: onaptests.configuration.basic_onboard_settings
+ run_timeout: 1200
+ before_launch_wait_time: 120
+ stage: healthcheck-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_onap_vnf
+ <<: *manage_artifacts
+
+
+.basic_clamp: &basic_clamp
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_clamp
+ vnf_settings: onaptests.configuration.basic_clamp_settings
+ run_timeout: 1200
+ before_launch_wait_time: 120
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_onap_vnf
+ <<: *manage_artifacts
+
+.pnf_macro: &pnf_macro
+ variables:
+ run_tiers: smoke-usecases
+ run_type: pnf_macro
+ vnf_settings: onaptests.configuration.pnf_macro_settings
+ run_timeout: 2400
+ before_launch_wait_time: 180
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_onap_vnf
+ <<: *manage_artifacts
+
+.cds_resource_resolution: &cds_resource_resolution
+ variables:
+ run_tiers: smoke-usecases
+ run_type: cds_resource_resolution
+ vnf_settings: onaptests.configuration.cds_resource_resolution_settings
+ run_timeout: 1000
+ before_launch_wait_time: 180
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_onap_vnf
+ <<: *manage_artifacts
+
+.basic_cnf_macro: &basic_cnf_macro
+ variables:
+ run_tiers: smoke-usecases
+ run_type: basic_cnf_macro
+ vnf_settings: onaptests.configuration.basic_cnf_macro_settings
+ run_timeout: 1000
+ before_launch_wait_time: 180
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_onap_vnf
+ <<: *manage_artifacts
+
+.pnf_registrate: &pnf_registrate
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: pnf-registrate
+ run_timeout: 1000
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.5gbulkpm: &5gbulkpm
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: 5gbulkpm
+ run_timeout: 1200
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.hvves: &hvves
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: hv-ves
+ run_timeout: 120
+ stage: healthcheck-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.vescollector: &vescollector
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: ves-collector
+ run_timeout: 1000
+ stage: healthcheck-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.cmpv2: &cmpv2
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: cmpv2
+ run_timeout: 1000
+ stage: smoke-usecases-1
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.dcaemod: &dcaemod
+ variables:
+ run_tiers: xtesting-smoke-usecases-robot
+ run_type: dcaemod
+ run_timeout: 1000
+ stage: smoke-usecases-2
+ <<: *get_artifact
+ <<: *runner_tags
+ <<: *run_smoke_usecase_robot
+ <<: *manage_artifacts
+
+.security_root_pods: &security_root_pods
+ variables:
+ run_tiers: security
+ run_type: root_pods
+ run_timeout: 700
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.security_unlimitted_pods: &security_unlimitted_pods
+ variables:
+ run_tiers: security
+ run_type: unlimitted_pods
+ run_timeout: 1000
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+# .security_cis_kubernetes: &security_cis_kubernetes
+# variables:
+# run_tiers: security
+# run_type: cis_kubernetes
+# run_timeout: 700
+# stage: onap-security
+# <<: *get_artifact
+# <<: *run_onap_security
+# <<: *runner_tags
+# <<: *manage_artifacts
+
+.security_jdpw_ports: &security_jdpw_ports
+ variables:
+ run_tiers: security
+ run_type: jdpw_ports
+ run_timeout: 700
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.security_kube_hunter: &security_kube_hunter
+ variables:
+ run_tiers: security
+ run_type: kube_hunter
+ run_timeout: 700
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.security_nonssl_endpoints: &security_nonssl_endpoints
+ variables:
+ run_tiers: security
+ run_type: nonssl_endpoints
+ run_timeout: 700
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.security_versions: &security_versions
+ variables:
+ run_tiers: security
+ run_type: versions
+ run_timeout: 3600
+ stage: onap-security
+ <<: *get_artifact
+ <<: *run_onap_security
+ <<: *runner_tags
+ <<: *manage_artifacts
+
+.legal_tern: &legal_tern
+ variables:
+ run_tiers: legal
+ run_type: tern
+ stage: info-only
+ <<: *get_artifact
+ <<: *run_legal_tern
+ <<: *runner_tags
+
+.onap_stability: &onap_stability
+ variables:
+ run_tiers: stability
+ run_type: bench
+ stage: onap-stability
+ <<: *get_artifact
+ <<: *run_onap_stability
+ <<: *runner_tags
+
+.onap_resiliency: &onap_resiliency
+ variables:
+ run_tiers: resiliency
+ run_type: bench
+ stage: onap-resiliency
+ <<: *get_artifact
+ <<: *run_onap_resiliency
+ <<: *runner_tags
+
+# triggered PODs
+prepare:
+ <<: *prepare
+ <<: *trigger_rules
+ stage: prepare
+
+infrastructure_healthcheck:
+ <<: *infrastructure_healthcheck
+ <<: *trigger_rules
+
+core:
+ <<: *core
+ <<: *trigger_rules
+
+full:
+ <<: *full
+ <<: *fulldeploy_rules
+
+healthdist:
+ <<: *healthdist
+ <<: *fulldeploy_rules
+
+postinstall:
+ <<: *postinstall
+ <<: *fulldeploy_rules
+
+cps_healthcheck:
+ <<: *cps_healthcheck
+ <<: *fulldeploy_rules
+
+cps_temporal_healthcheck:
+ <<: *cps_temporal_healthcheck
+ <<: *fulldeploy_rules
+
+cps_dmi_plugin_healthcheck:
+ <<: *cps_dmi_plugin_healthcheck
+ <<: *fulldeploy_rules
+
+vnf_basic_vm:
+ <<: *vnf_basic_vm
+ <<: *trigger_rules
+
+vnf_basic_vm_macro:
+ <<: *vnf_basic_vm_macro
+ <<: *trigger_rules
+
+basic_network:
+ <<: *basic_network
+ <<: *trigger_rules
+
+basic_cnf:
+ <<: *basic_cnf
+ <<: *trigger_rules
+
+# basic_cds:
+# <<: *basic_cds
+# <<: *trigger_rules
+
+basic_onboard:
+ <<: *basic_onboard
+ <<: *trigger_rules
+
+basic_clamp:
+ <<: *basic_clamp
+ <<: *trigger_rules
+ <<: *onap_non_master_rules
+
+pnf_macro:
+ <<: *pnf_macro
+ <<: *trigger_rules
+
+cds_resource_resolution:
+ <<: *cds_resource_resolution
+ <<: *trigger_rules
+
+basic_cnf_macro:
+ <<: *basic_cnf_macro
+ <<: *trigger_rules
+
+pnf_registrate:
+ <<: *pnf_registrate
+ <<: *trigger_rules
+
+5gbulkpm:
+ <<: *5gbulkpm
+ <<: *trigger_rules
+
+vescollector:
+ <<: *vescollector
+ <<: *trigger_rules
+
+hvves:
+ <<: *hvves
+ <<: *trigger_rules
+
+cmpv2:
+ <<: *cmpv2
+ <<: *trigger_rules
+
+dcaemod:
+ <<: *dcaemod
+ <<: *trigger_rules
+
+security_root_pods:
+ <<: *security_root_pods
+ <<: *security_rules
+
+security_unlimitted_pods:
+ <<: *security_unlimitted_pods
+ <<: *security_rules
+
+# security_cis_kubernetes:
+# <<: *security_cis_kubernetes
+# <<: *security_rules
+
+security_jdpw_ports:
+ <<: *security_jdpw_ports
+ <<: *security_rules
+ <<: *weekly_rules
+
+security_kube_hunter:
+ <<: *security_kube_hunter
+ <<: *security_rules
+
+security_nonssl_endpoints:
+ <<: *security_nonssl_endpoints
+ <<: *security_rules
+
+infrastructure_healthcheck_teardown:
+ <<: *infrastructure_healthcheck_teardown
+ <<: *trigger_rules
+
+infrastructure_healthcheck_internal_check_certs:
+ <<: *infrastructure_healthcheck_internal_check_certs
+ <<: *trigger_rules
+ <<: *weekly_rules
+
+security_versions:
+ <<: *security_versions
+ <<: *security_rules
+ <<: *weekly_rules
+
+legal_tern:
+ <<: *legal_tern
+ <<: *weekly_rules
+
+onap_stability:
+ <<: *onap_stability
+ <<: *weekly_rules
+
+onap_resiliency:
+ <<: *onap_resiliency
+ <<: *weekly_rules
+
+pages:
+ stage: deploy
+ <<: *get_artifact
+ <<: *runner_tags
+ script:
+ - if [ -z "$GERRIT_REVIEW" ]; then TARGET_DIR=$pod-$CI_JOB_ID-$(date -d${CI_PIPELINE_CREATED_AT} +'%m-%d-%Y_%H-%M'); else TARGET_DIR=$GERRIT_REVIEW-$GERRIT_PATCHSET;fi
+ - mkdir -p public/$TARGET_DIR
+ - patch_list=$(echo $(find . -regextype posix-extended -regex '^.*[0-9]{5}-[0-9]*') | sed -e "s/ /,/g" -e "s/\.\///g")
+ - ansible-playbook ${ansible_verbose} ${VAULT_OPT}
+ -i inventory/infra ./xtesting-pages.yaml
+ --extra-vars "patch_list=$patch_list"
+ - rsync -avzh --ignore-errors ./doc/ public
+ - mv public/index* public/$TARGET_DIR
+ - mv public/daily-s* public/$TARGET_DIR
+ - rsync -avzh --ignore-errors ./results/ public/$TARGET_DIR
+ - if [ -n "$LF_RESULTS_BACKUP" ];then scripts/push_results_to_lf.sh public/$TARGET_DIR ;fi
+ - if [ -z "${S3_ENDPOINT_URL}" ]; then scripts/output_summary.sh $TARGET_DIR; fi
+ artifacts:
+ paths:
+ - public/
+ expire_in: 1 month
+ rules:
+ - if: '$CI_PIPELINE_SOURCE == "trigger"'
+ when: always
diff --git a/inventory/group_vars/all.yml b/inventory/group_vars/all.yml
new file mode 100644
index 0000000..5a97b59
--- /dev/null
+++ b/inventory/group_vars/all.yml
@@ -0,0 +1,210 @@
+---
+### General ###
+deployment_name: "{{ lookup('env','DEPLOYMENT')| default('oom', true) }}"
+pod: "{{ lookup('env','pod')| default('none', true) }}"
+pod_type: "{{ lookup('env','INFRA_DEPLOYMENT')| default('rancher', true) }}"
+organization: "{{ details.pod_owner | default('OPNFV') }}"
+node_name: "{{ pod }}-{{ organization }}-{{ deployment_name }}"
+build_tag: "{{ ci_name }}-functest-{{ pod_type }}-\
+ {{ pod_deployment }}-{{ deploy_frequency }}-\
+ {{ onap_version }}-{{ id_review }}-{{ id_patchset }}"
+scenario: "{{ lookup('env','DEPLOY_SCENARIO') |
+ default('onap-ftw', true) }}"
+project: "{{ lookup('env','PROJECT') | default('oom', true) }}"
+pod_deployment: baremetal
+ci_name: gitlab_ci
+deploy_scenario: onap-nofeature-noha
+pipe_id: "{{ lookup('env','CI_PIPELINE_ID') | default('64', true) }}"
+id_review: "{{ lookup('env','GERRIT_REVIEW') | default(pipe_id, true) }}"
+id_patchset: "{{ lookup('env','GERRIT_PATCHSET') |
+ default('onap', true) }}"
+
+test_result_url: "{{ lookup('env','TEST_RESULT_DB_URL')|
+ default('http://testresults.opnfv.org/onap/api/v1/results', true) }}"
+
+onap_version: "{{ lookup('env','ONAP_VERSION') |
+ default('master', true) }}"
+
+onap_namespace: "{{ lookup('env','ONAP_NAMESPACE') |
+ default('onap', true) }}"
+
+os_test_cloud: "{{ lookup('env','OS_TEST_CLOUD') |
+ default( openstack_tenant_name, true) }}"
+
+deploy_frequency: "{{ lookup('env','FREQUENCY') | default('daily', true) }}"
+
+experimental: "{{ ((lookup('env','EXPERIMENTAL') | default('', true )) != '') |
+ ternary('-experimental', '') }}"
+
+date_unique: "{{ ansible_date_time.year ~ '-' ~ ansible_date_time.month ~ '-' ~
+ansible_date_time.day ~ '-' ~ pipe_id }}"
+
+helmv3_use_sql: "{{ lookup('env','HELM3_USE_SQL') |
+ default(False, true) }}"
+
+random_wait: "{{ lookup('env','RANDOM_WAIT') |
+ default(False, true) }}"
+
+# robot_configmap:
+# "{{ (onap_version == 'elalto') | ternary('robot-robot-eteshare-configmap', 'robot-eteshare-configmap') }}"
+
+robot_configmap: robot-eteshare-configmap
+
+deployment_type: "{{ lookup('env','DEPLOYMENT_TYPE') |
+ default('core', true) }}"
+
+use_ingress: "{{ lookup('env', 'INGRESS') | default(False, true) }}"
+
+global_local_path: "/tmp/xtesting"
+exec_local_path: "{{ global_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+k8s_job__dir_path: "{{ ansible_user_dir }}/oom/{{ onap_version }}"
+res_local_path: "/dockerdata-nfs/{{ onap_namespace }}/integration"
+helm2_deploy_logs_path: "{{
+ ansible_user_dir }}/.helm/plugins/deploy/cache/onap/logs"
+helm3_deploy_logs_path: "{{
+ ansible_user_dir }}/.local/share/helm/plugins/deploy/cache/onap/logs"
+
+# Python and system packages
+pip_packages:
+ - docker
+ - openshift==0.11.2
+
+pip_packages_pages:
+ - requests
+ - Jinja2
+ - pysocks
+ - prettytable
+ - anytree
+ - lxml
+ - beautifulsoup4
+ - deepdiff
+
+apt_packages:
+ - apt-transport-https
+ - ca-certificates
+ - curl
+ - software-properties-common
+ - nfs-common
+ - python3-pip
+
+#docker_registry: registry.gitlab.com/orange-opensource/lfn/onap/integration/xtesting
+docker_registry: nexus3.onap.org:10003/onap
+
+# Robot healthcheck
+testing_container: "{{ docker_registry }}/{{ run_tiers }}"
+
+testing_container_tag: "{{ onap_version }}"
+
+job_file: healthcheck.yaml
+
+# VNF
+openstack_public_net_id: "{{ lookup('env','EXT_NET') |
+ default('admin', true) }}"
+
+is_pythonsdk_tests_gating: "{{ (project == 'testsuite/pythonsdk-tests') | ternary(true, false) }}"
+docker_base_name: xtesting
+docker_vnf_name: "{{ docker_base_name }}-{{ onap_namespace }}-vnf-{{ run_type }}"
+docker_vnf_image: "{{ is_pythonsdk_tests_gating | ternary('registry.gitlab.com/orange-opensource/lfn/onap/build-integration', docker_registry + '/xtesting-smoke-usecases-pythonsdk') }}"
+#docker_vnf_version: "{{ is_pythonsdk_tests_gating | ternary(id_review + '-' + id_patchset, 'master') }}"
+docker_vnf_version: "{{ is_pythonsdk_tests_gating | ternary(id_review + '-' + id_patchset, testing_container_tag) }}"
+
+clouds_path: .config/openstack/clouds.yaml
+kube_conf_file: config
+kube_conf_path: ".kube/{{ kube_conf_file }}"
+
+onaptests_path: /usr/lib/python3.8/site-packages/onaptests
+#onaptests_path: /src/onaptests/src/onaptests
+
+volumes:
+ - "{{ exec_local_path }}/env:/var/lib/xtesting/conf/env_file"
+ - "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}:/var/lib/xtesting/results"
+ - "{{ exec_local_path }}/settings.py:{{ onaptests_path }}/configuration/settings.py"
+ - "{{ ansible_user_dir }}/{{ clouds_path }}:/root/{{ clouds_path }}"
+ - "{{ ansible_user_dir }}/{{ kube_conf_path }}:{{ onaptests_path }}/templates/artifacts/{{ kube_conf_file }}"
+ - "{{ global_local_path }}/{{ kube_conf_file }}:/root/{{ kube_conf_path }}"
+ - "{{ exec_local_path }}/basic_vm-service.yaml:{{ onaptests_path }}/templates/vnf-services/basic_vm-service.yaml"
+ - "{{ exec_local_path }}/basic_vm_macro-service.yaml:{{ onaptests_path }}/templates/vnf-services/basic_vm_macro-service.yaml"
+
+k8sRegionID: k8sregion
+datacenter_id: cruguil
+cnf_namespace: "{{ lookup('env','CNF_NAMESPACE') |
+ default('k8s', true) }}"
+
+tests_list: "{{ lookup('env','tests_list')| default('all', true) }}"
+
+postgres_namespace: helm
+postgres_svc: postgres
+minio:
+ fqdn: minio.minio
+
+# To be removed => use directly the k8s alias
+etc_hosts: >
+ {
+ "{{ use_ingress | ternary('portal-ui.simpledemo.onap.org', 'portal.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('vid-ui.simpledemo.onap.org', 'vid.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('sdc-fe-ui.simpledemo.onap.org', 'sdc.api.fe.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('sdc-be-api.simpledemo.onap.org', 'sdc.api.be.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('aai-api.simpledemo.onap.org', 'aai.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('aai-api.simpledemo.onap.org', 'aai.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('aai-sparkybe-api.simpledemo.onap.org', 'aai.api.sparky.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('so-api.simpledemo.onap.org', 'so.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('appc-api.simpledemo.onap.org', 'appc.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('sdnc-api.simpledemo.onap.org', 'sdnc.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('nbi-api.simpledemo.onap.org', 'nbi.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('msb-iag-ui.simpledemo.onap.org', 'msb.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('dmaap-mr-api.simpledemo.onap.org', 'mr.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('policy-ui.simpledemo.onap.org', 'clamp.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('cds-blueprintsprocessor-api.simpledemo.onap.org', 'cds-blueprintsprocessor.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ use_ingress | ternary('dcae-ves-collector-api.simpledemo.onap.org', 'ves-collector.api.simpledemo.onap.org') }}": "{{ oom_cluster_ip }}",
+ "{{ postgres_svc }}.{{ postgres_namespace }}": "{{ oom_cluster_ip }}",
+ "{{ minio.fqdn }}": "{{ oom_cluster_ip }}",
+ }
+
+dcae_collector_ip: 10.43.207.101
+
+# k8s healthcheck
+docker_healthcheck_k8s_name: "{{ docker_base_name }}-{{ onap_namespace }}-healthcheck-k8s"
+docker_health_k8s_image: "{{ docker_registry }}/xtesting-infra-healthcheck"
+docker_health_k8s_version: "{{ onap_version }}"
+
+volumes_security:
+ - "{{ ansible_user_dir }}/{{ kube_conf_path }}:/root/{{ kube_conf_path }}"
+ - "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}:/var/lib/xtesting/results"
+
+volumes_healthcheck_k8s: "{{ volumes_security +
+ [helm_deploy_logs_path ~ ':/onap_helm_logs'] }}"
+
+# onap-security
+docker_onap_security_name: "{{ docker_base_name }}-{{ onap_namespace }}-security-{{ run_type }}"
+docker_onap_security_image: "{{ docker_registry }}/xtesting-security"
+docker_onap_security_version: "{{ onap_version }}"
+# ONAP IP used for nodeport cert verification
+onap_ip: "{{ oom_cluster_ip }}"
+
+# Doc & pages
+doc_path: doc
+doc_dir_target: /tmp
+
+so_url: http://so.api.simpledemo.onap.org
+
+
+is_a_gating: "{{ (lookup('env','GERRIT_REVIEW') | default('', true )) != '' }}"
+gating_identifier: "{{ id_review ~ '-' ~ id_patchset ~ '-' ~ pipe_id }}"
+scheduled_identifier: "{{ pod ~ '/' ~ date_unique }}"
+
+# S3 Integration
+s3_endpoint: "{{ lookup('env','S3_ENDPOINT_URL') | default('', true) }}"
+s3_internal_url: "{{ lookup('env','S3_INTERNAL_ENDPOINT_URL') | default(s3_endpoint, true) }}"
+s3_http_url_endpoint: "{{ lookup('env','S3_HTTP_DST_URL') | default('', true) }}"
+use_s3: "{{ s3_endpoint is defined and s3_endpoint != '' }}"
+test_type: "{{ (is_a_gating | bool |
+ ternary('gating', deploy_frequency)) ~ experimental }}"
+identifier: "{{ is_a_gating | bool |
+ ternary(gating_identifier, scheduled_identifier) }}"
+s3_raw_dst: "onap/{{ test_type }}/{{ identifier }}/{{ run_type }}"
+s3_dst: "s3://{{ s3_raw_dst }}"
+s3_access_key: "{{ lookup('env','S3_ACCESS_KEY') }}"
+s3_secret_key: "{{ lookup('env','S3_SECRET_KEY') }}"
+
+lf_it_nexus_pwd: "{{ lookup('env','LF_RESULTS_BACKUP') | default('', true) }}"
diff --git a/legal-tern.yaml b/legal-tern.yaml
new file mode 100644
index 0000000..a96d42c
--- /dev/null
+++ b/legal-tern.yaml
@@ -0,0 +1,7 @@
+---
+- hosts: kube-master
+ vars_files:
+ - inventory/group_vars/all.yml
+ run_once: "yes"
+ roles:
+ - legal-tern
diff --git a/onap-chaos-tests.yaml b/onap-chaos-tests.yaml
new file mode 100644
index 0000000..28fe5a4
--- /dev/null
+++ b/onap-chaos-tests.yaml
@@ -0,0 +1,7 @@
+---
+- hosts: kube-master
+ vars_files:
+ - inventory/group_vars/all.yml
+ run_once: "yes"
+ roles:
+ - onap-chaos-tests
diff --git a/onap-stability.yaml b/onap-stability.yaml
new file mode 100644
index 0000000..0ff6fff
--- /dev/null
+++ b/onap-stability.yaml
@@ -0,0 +1,7 @@
+---
+- hosts: kube-master
+ vars_files:
+ - inventory/group_vars/all.yml
+ run_once: "yes"
+ roles:
+ - onap-stability-tests
diff --git a/requirements.yaml b/requirements.yaml
new file mode 100644
index 0000000..4fed125
--- /dev/null
+++ b/requirements.yaml
@@ -0,0 +1,11 @@
+---
+collections:
+ - name: ansible.posix
+ source: https://galaxy.ansible.com
+ version: 1.1.1
+ - name: community.general
+ source: https://galaxy.ansible.com
+ version: 1.3.0
+ - name: community.kubernetes
+ source: https://galaxy.ansible.com
+ version: 1.0.0 \ No newline at end of file
diff --git a/roles/legal-tern/tasks/main.yaml b/roles/legal-tern/tasks/main.yaml
new file mode 100644
index 0000000..534672f
--- /dev/null
+++ b/roles/legal-tern/tasks/main.yaml
@@ -0,0 +1,90 @@
+---
+- name: "Install tern prerequisites"
+ become: yes
+ ansible.builtin.apt:
+ name:
+ - attr
+ - fuse-overlayfs
+ - python3-venv
+ - jq
+ state: latest
+
+- name: "Install pip dependencies"
+ become: yes
+ ansible.builtin.pip:
+ name:
+ - wheel
+ - lftools
+ state: latest
+
+- name: "Set variables for tern run"
+ ansible.builtin.set_fact:
+ tern_output: "/tmp/tern/archives/{{ run_tiers }}/{{ run_type }}"
+ tern_archives: "/tmp/tern"
+ tern_k8_namespace: "onap"
+
+- name: "Delete directory with/for results"
+ become: yes
+ ansible.builtin.file:
+ path: "{{ tern_output }}"
+ state: absent
+
+- name: "Copy tern script"
+ ansible.builtin.copy:
+ src: scripts/run_tern.sh
+ dest: "{{ ansible_user_dir }}/run_tern.sh"
+ mode: '500'
+
+- name: "Copy netrc for lftool"
+ ansible.builtin.template:
+ src: .netrc.j2
+ dest: "{{ ansible_user_dir }}/.netrc"
+ mode: 0600
+
+- name: "Create directory for results"
+ become: yes
+ ansible.builtin.file:
+ path: "{{ tern_output }}"
+ state: directory
+ mode: '0700'
+ recurse: yes
+ owner: "{{ ansible_user }}"
+
+- name: "Add fuse group"
+ become: yes
+ ansible.builtin.group:
+ name: "fuse"
+ state: present
+ system: yes
+
+- name: "Add user to fuse group"
+ become: yes
+ ansible.builtin.user:
+ append: yes
+ groups: fuse
+ user: "{{ ansible_user }}"
+
+- name: "Launch tern analysis & push artifacts"
+ become: no
+ ansible.builtin.shell:
+ cmd: "{{ ansible_user_dir }}/run_tern.sh > {{ tern_output }}/run_tern.log"
+ chdir: "{{ tern_output }}"
+ environment:
+ LF_RESULTS_BACKUP: '{{ lf_results_backup }}'
+ POD: '{{ pod }}'
+ CI_PIPELINE_CREATED_AT: '{{ ci_pipeline_created_at }}'
+ TERN_LOCATION: '{{ ansible_user_dir }}'
+ K8NAMESPACE: '{{ tern_k8_namespace }}'
+ ARCHIVES_LOCATION: '{{ tern_archives }}'
+ async: 259200 # 60*60*24*3 = 3 days
+ poll: 0 # dont wait for it
+ register: tern_analysis
+
+- name: "Check if tern analysis is running"
+ become: no
+ async_status:
+ jid: "{{ tern_analysis.ansible_job_id }}"
+ register: tern_result
+ until: tern_result.started
+ retries: 10
+ delay: 10
diff --git a/roles/legal-tern/templates/.netrc.j2 b/roles/legal-tern/templates/.netrc.j2
new file mode 100644
index 0000000..e4c22e3
--- /dev/null
+++ b/roles/legal-tern/templates/.netrc.j2
@@ -0,0 +1,3 @@
+machine nexus.onap.org
+login onap-integration
+password {{ lf_it_nexus_pwd }}
diff --git a/roles/onap-chaos-tests/tasks/cassandra.yaml b/roles/onap-chaos-tests/tasks/cassandra.yaml
new file mode 100644
index 0000000..ed97d53
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/cassandra.yaml
@@ -0,0 +1 @@
+---
diff --git a/roles/onap-chaos-tests/tasks/kafka.yaml b/roles/onap-chaos-tests/tasks/kafka.yaml
new file mode 100644
index 0000000..ed97d53
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/kafka.yaml
@@ -0,0 +1 @@
+---
diff --git a/roles/onap-chaos-tests/tasks/main.yaml b/roles/onap-chaos-tests/tasks/main.yaml
new file mode 100644
index 0000000..2fc48d2
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/main.yaml
@@ -0,0 +1,22 @@
+---
+# - name: Check the chaos target is defined
+# ansible.builtin.fail:
+# msg: "You must specify a chaos target (node_drain,...)"
+# when: experiment_name is not defined
+# tags: init_check
+
+- import_tasks: prepare.yaml
+ tags: prepare
+
+- import_tasks: node-drain.yaml
+ tags: node-drain
+
+- import_tasks: node-cpu-hog.yaml
+ tags: node-cpu-hog
+
+- import_tasks: node-memory-hog.yaml
+ tags: node-memory-hog
+
+- import_tasks: pod-delete-aai.yaml
+ tags: aai
+
diff --git a/roles/onap-chaos-tests/tasks/node-cpu-hog.yaml b/roles/onap-chaos-tests/tasks/node-cpu-hog.yaml
new file mode 100644
index 0000000..c70f339
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/node-cpu-hog.yaml
@@ -0,0 +1,86 @@
+---
+- name: create directory for cpu hog scenario
+ ansible.builtin.file:
+ path: /tmp/resiliency/node-cpu-hog
+ state: directory
+ mode: '0755'
+ tags: prepare
+
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+ tags: prepare
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+ tags: prepare
+
+- name: Set Compute for the node cpu hog
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+ tags: prepare
+
+- name: Prepare rbac file for node cpu hog experiment
+ ansible.builtin.template:
+ src: node-cpu-hog-rbac.yaml.j2
+ dest: /tmp/resiliency/node-cpu-hog/node-cpu-hog-rbac.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Prepare chaos file for node cpu hog experiment
+ ansible.builtin.template:
+ src: node-cpu-hog-chaos.yaml.j2
+ dest: /tmp/resiliency/node-cpu-hog/node-cpu-hog-chaos.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Apply node cpu hog rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-cpu-hog/node-cpu-hog-rbac.yaml
+
+- name: Apply node cpu hog experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-cpu-hog/node-cpu-hog-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-cpu
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-cpu-node-cpu-hog
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ (chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass" or
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Fail" )
+ until: *chaos_result
+ retries: 5
+ delay: 20
+- name: Print the chaos result verdict
+ ansible.builtin.debug:
+ msg: " The test is {{ chaosresult_drain.resources[0].status.experimentStatus.verdict}}-ed"
diff --git a/roles/onap-chaos-tests/tasks/node-drain.yaml b/roles/onap-chaos-tests/tasks/node-drain.yaml
new file mode 100644
index 0000000..84d53c4
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/node-drain.yaml
@@ -0,0 +1,105 @@
+---
+- name: create directory for drain scenario
+ ansible.builtin.file:
+ path: /tmp/resiliency/node-drain
+ state: directory
+ mode: '0755'
+ tags: prepare
+
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+ tags: prepare
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+ tags: prepare
+
+- name: Set Compute for the drain chaos
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+ tags: prepare
+
+- name: Prepare rbac file for drain experiment
+ ansible.builtin.template:
+ src: node-drain-rbac.yaml.j2
+ dest: /tmp/resiliency/node-drain/node-drain-rbac.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Prepare chaos file for drain experiment
+ ansible.builtin.template:
+ src: node-drain-chaos.yaml.j2
+ dest: /tmp/resiliency/node-drain/node-drain-chaos.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Apply drain rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-drain/node-drain-rbac.yaml
+ tags: apply
+
+- name: Cordon the Chosen node
+ ansible.builtin.shell: "kubectl cordon {{ compute_chaos }}"
+
+- name: Apply chaos drain experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-drain/node-drain-chaos.yaml
+ tags: apply
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-drain
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+ tags: wait
+# - name: Print the chaos engine object
+# ansible.builtin.debug:
+# msg: "{{chaosengine_status.resources[0].status.engineStatus }}"
+
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-drain-node-drain
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ (chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass" or
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Fail" )
+ until: *chaos_result
+ retries: 5
+ delay: 10
+ tags: wait
+
+- name: Print the chaos result object
+ ansible.builtin.debug:
+ msg: "{{ chaosresult_drain.resources[0].status.experimentStatus.verdict}}"
+
+- name: Print the chaos result verdict
+ ansible.builtin.debug:
+ msg: " The test is {{ chaosresult_drain.resources[0].status.experimentStatus.verdict}}-ed"
+
+- name: Uncordon the Chosen node
+ ansible.builtin.shell: "kubectl uncordon {{ compute_chaos }}"
diff --git a/roles/onap-chaos-tests/tasks/node-memory-hog.yaml b/roles/onap-chaos-tests/tasks/node-memory-hog.yaml
new file mode 100644
index 0000000..82ad014
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/node-memory-hog.yaml
@@ -0,0 +1,86 @@
+---
+- name: create directory for memory hog scenario
+ ansible.builtin.file:
+ path: /tmp/resiliency/node-memory-hog
+ state: directory
+ mode: '0755'
+ tags: prepare
+
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+ tags: prepare
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+ tags: prepare
+
+- name: Set Compute for the node memory hog
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+ tags: prepare
+
+- name: Prepare rbac file for node memory hog experiment
+ ansible.builtin.template:
+ src: node-memory-hog-rbac.yaml.j2
+ dest: /tmp/resiliency/node-memory-hog/node-memory-hog-rbac.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Prepare chaos file for node memory hog experiment
+ ansible.builtin.template:
+ src: node-memory-hog-chaos.yaml.j2
+ dest: /tmp/resiliency/node-memory-hog/node-memory-hog-chaos.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Apply node memory hog rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-memory-hog/node-memory-hog-rbac.yaml
+
+- name: Apply node memory hog experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/node-memory-hog/node-memory-hog-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-memory
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-memory-node-memory-hog
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ (chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass" or
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Fail" )
+ until: *chaos_result
+ retries: 5
+ delay: 20
+- name: Print the chaos result verdict
+ ansible.builtin.debug:
+ msg: " The test is {{ chaosresult_drain.resources[0].status.experimentStatus.verdict}}-ed"
diff --git a/roles/onap-chaos-tests/tasks/pod-delete-aai.yaml b/roles/onap-chaos-tests/tasks/pod-delete-aai.yaml
new file mode 100644
index 0000000..87fbb17
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/pod-delete-aai.yaml
@@ -0,0 +1,103 @@
+---
+- name: create directory for pod delete aai scenario
+ ansible.builtin.file:
+ path: /tmp/resiliency/pod-delete-aai
+ state: directory
+ mode: '0755'
+ tags: prepare
+
+- name: Get deployments name
+ ansible.builtin.shell: kubectl get deployments -n onap | grep aai |awk '{print $1}'
+ register: deployments
+ tags: prepare
+
+- name: print grep output
+ ansible.builtin.debug:
+ msg: "{{deployments.stdout_lines}}"
+
+
+- name: add labels to deployments
+ community.kubernetes.k8s:
+ state: present
+ definition:
+ apiVersion: apps/v1
+ kind: Deployment
+ metadata:
+ name: "{{ item }}"
+ namespace: "{{ onap_namespace }}"
+ labels:
+ component: onap-aai
+ spec:
+ template:
+ metadata:
+ label:
+ component: onap-aai
+ loop: "{{deployments.stdout_lines}}"
+ tags: prepare
+
+- name: get AAI pod name
+ ansible.builtin.shell: kubectl get pods -n onap | grep aai |awk '{print $1}'
+ register: pod_list
+ tags: prepare
+
+- name: print pod list
+ ansible.builtin.debug:
+ msg: "{{pod_list.stdout_lines | join(', ') }}"
+
+- name: Prepare rbac file for pod delete aai experiment
+ ansible.builtin.template:
+ src: pod-delete-rbac.yaml.j2
+ dest: /tmp/resiliency/pod-delete-aai/pod-delete-aai-rbac.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Prepare chaos file for pod delete aai experiment
+ ansible.builtin.template:
+ src: pod-delete-aai-chaos.yaml.j2
+ dest: /tmp/resiliency/pod-delete-aai/pod-delete-aai-chaos.yaml
+ mode: 0600
+ tags: prepare
+
+- name: Apply pod delete aai rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/pod-delete-aai/pod-delete-rbac.yaml
+
+- name: Apply pod delete aai experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/pod-delete-aai/pod-delete-aai-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: aai-chaos
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: aai-chaos-pod-delete
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_aai
+ changed_when:
+ &chaos_result chaosresult_aai is defined and
+ (chaosresult_aai.resources[0].status.experimentStatus.verdict == "Pass" or
+ chaosresult_aai.resources[0].status.experimentStatus.verdict == "Fail" )
+ until: *chaos_result
+ retries: 5
+ delay: 20
+- name: Print the chaos result verdict
+ ansible.builtin.debug:
+ msg: " The test is {{ chaosresult_aai.resources[0].status.experimentStatus.verdict}}-ed"
diff --git a/roles/onap-chaos-tests/tasks/pod-delete-sdc.yaml b/roles/onap-chaos-tests/tasks/pod-delete-sdc.yaml
new file mode 100644
index 0000000..d46fdd1
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/pod-delete-sdc.yaml
@@ -0,0 +1,76 @@
+---
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+
+- name: Set Compute for the drain chaos
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+
+- name: Prepare rbac file for drain experiment
+ ansible.builtin.template:
+ src: drain-rbac.yaml.j2
+ dest: /tmp/drain-rbac.yaml
+ mode: 0600
+
+- name: Prepare chaos file for drain experiment
+ ansible.builtin.template:
+ src: drain-chaos.yaml.j2
+ dest: /tmp/drain-chaos.yaml
+ mode: 0600
+
+- name: Apply drain rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-rbac.yaml
+
+- name: Cordon the Chosen node
+ ansible.builtin.shell: "kubectl cordon {{ compute_chaos }}"
+
+- name: Apply chaos drain experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-node-drain
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass"
+ until: *chaos_result
+ retries: 5
+ delay: 10
+
+- name: Uncordon the Chosen node
+ ansible.builtin.shell: "kubectl uncordon {{ compute_chaos }}"
diff --git a/roles/onap-chaos-tests/tasks/pod-delete-sdnc.yaml b/roles/onap-chaos-tests/tasks/pod-delete-sdnc.yaml
new file mode 100644
index 0000000..d46fdd1
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/pod-delete-sdnc.yaml
@@ -0,0 +1,76 @@
+---
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+
+- name: Set Compute for the drain chaos
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+
+- name: Prepare rbac file for drain experiment
+ ansible.builtin.template:
+ src: drain-rbac.yaml.j2
+ dest: /tmp/drain-rbac.yaml
+ mode: 0600
+
+- name: Prepare chaos file for drain experiment
+ ansible.builtin.template:
+ src: drain-chaos.yaml.j2
+ dest: /tmp/drain-chaos.yaml
+ mode: 0600
+
+- name: Apply drain rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-rbac.yaml
+
+- name: Cordon the Chosen node
+ ansible.builtin.shell: "kubectl cordon {{ compute_chaos }}"
+
+- name: Apply chaos drain experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-node-drain
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass"
+ until: *chaos_result
+ retries: 5
+ delay: 10
+
+- name: Uncordon the Chosen node
+ ansible.builtin.shell: "kubectl uncordon {{ compute_chaos }}"
diff --git a/roles/onap-chaos-tests/tasks/pod-delete-so.yaml b/roles/onap-chaos-tests/tasks/pod-delete-so.yaml
new file mode 100644
index 0000000..d46fdd1
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/pod-delete-so.yaml
@@ -0,0 +1,76 @@
+---
+- name: Get compute node list
+ community.kubernetes.k8s_info:
+ kind: Node
+ label_selectors: "! node-role.kubernetes.io/master"
+ register: kubernetes_computer_node_list
+
+- name: Set Fact first compute node Internal IP
+ ansible.builtin.set_fact:
+ first_node_ip: "{{ item.address }}"
+ when: "'Hostname' in item.type"
+ with_items:
+ "{{ (kubernetes_computer_node_list.resources | first).status.addresses }}"
+
+- name: Set Compute for the drain chaos
+ ansible.builtin.set_fact:
+ compute_chaos: "{{ first_node_ip }}"
+ when: compute_chaos is not defined
+
+- name: Prepare rbac file for drain experiment
+ ansible.builtin.template:
+ src: drain-rbac.yaml.j2
+ dest: /tmp/drain-rbac.yaml
+ mode: 0600
+
+- name: Prepare chaos file for drain experiment
+ ansible.builtin.template:
+ src: drain-chaos.yaml.j2
+ dest: /tmp/drain-chaos.yaml
+ mode: 0600
+
+- name: Apply drain rbac
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-rbac.yaml
+
+- name: Cordon the Chosen node
+ ansible.builtin.shell: "kubectl cordon {{ compute_chaos }}"
+
+- name: Apply chaos drain experiment
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/drain-chaos.yaml
+
+- name: wait for the end of chaos
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosEngine
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos
+ namespace: "{{ onap_namespace }}"
+ register: chaosengine_status
+ changed_when:
+ &chaos_test chaosengine_status is defined and
+ chaosengine_status.resources[0].status.engineStatus == "completed"
+ until: *chaos_test
+ retries: 5
+ delay: 120
+
+- name: get results
+ run_once: true
+ community.kubernetes.k8s_info:
+ kind: ChaosResult
+ api_version: litmuschaos.io/v1alpha1
+ name: onap-chaos-node-drain
+ namespace: "{{ onap_namespace }}"
+ register: chaosresult_drain
+ changed_when:
+ &chaos_result chaosresult_drain is defined and
+ chaosresult_drain.resources[0].status.experimentStatus.verdict == "Pass"
+ until: *chaos_result
+ retries: 5
+ delay: 10
+
+- name: Uncordon the Chosen node
+ ansible.builtin.shell: "kubectl uncordon {{ compute_chaos }}"
diff --git a/roles/onap-chaos-tests/tasks/prepare.yaml b/roles/onap-chaos-tests/tasks/prepare.yaml
new file mode 100644
index 0000000..827156e
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/prepare.yaml
@@ -0,0 +1,38 @@
+---
+- name: create directory for resiliency
+ ansible.builtin.file:
+ path: /tmp/resiliency
+ state: directory
+ mode: '0755'
+
+- name: Download Litmus manifest.
+ ansible.builtin.get_url:
+ url: https://litmuschaos.github.io/litmus/litmus-operator-v1.13.5.yaml
+ dest: /tmp/resiliency/litmus_manifest.yaml
+ mode: '0664'
+
+- name: Apply Litmus manifest
+ community.kubernetes.k8s:
+ state: present
+ src: /tmp/resiliency/litmus_manifest.yaml
+
+- name: Ensure litmus Chaos Operator is running
+ ansible.builtin.shell: "kubectl get pods -n litmus"
+
+- name: Download generic experiments manifest
+ ansible.builtin.get_url:
+ url: https://hub.litmuschaos.io/api/chaos/1.13.5?file=charts/generic/experiments.yaml
+ dest: /tmp/resiliency/litmus_experiments_manifest.yaml
+ mode: '0664'
+
+- name: Apply Litmus Experiment manifest
+ community.kubernetes.k8s:
+ namespace: "{{ onap_namespace }}"
+ state: present
+ src: /tmp/resiliency/litmus_experiments_manifest.yaml
+
+- name: "Copy resiliency script"
+ ansible.builtin.copy:
+ src: scripts/run_chaos_tests.sh
+ dest: /tmp/resiliency
+ mode: '755'
diff --git a/roles/onap-chaos-tests/tasks/reporting.yaml b/roles/onap-chaos-tests/tasks/reporting.yaml
new file mode 100644
index 0000000..ed97d53
--- /dev/null
+++ b/roles/onap-chaos-tests/tasks/reporting.yaml
@@ -0,0 +1 @@
+---
diff --git a/roles/onap-chaos-tests/templates/node-cpu-hog-chaos.yaml.j2 b/roles/onap-chaos-tests/templates/node-cpu-hog-chaos.yaml.j2
new file mode 100644
index 0000000..ce72420
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-cpu-hog-chaos.yaml.j2
@@ -0,0 +1,30 @@
+apiVersion: litmuschaos.io/v1alpha1
+kind: ChaosEngine
+metadata:
+ name: node-cpu-hog
+ namespace: {{ onap_namespace }}
+spec:
+ # It can be true/false
+ annotationCheck: 'false'
+ # It can be active/stop
+ engineState: 'active'
+ #ex. values: ns1:name=percona,ns2:run=nginx
+ auxiliaryAppInfo: ''
+ chaosServiceAccount: node-cpu-hog-sa
+ # It can be delete/retain
+ jobCleanUpPolicy: 'delete'
+ experiments:
+ - name: node-cpu-hog
+ spec:
+ components:
+ env:
+ # set chaos duration (in sec) as desired
+ - name: TOTAL_CHAOS_DURATION
+ value: '120'
+
+ - name: NODE_CPU_CORE
+ value: '6'
+
+ # ENTER THE COMMA SEPARATED TARGET NODES NAME
+ - name: TARGET_NODES
+ value: {{ compute_chaos }}
diff --git a/roles/onap-chaos-tests/templates/node-cpu-hog-rbac.yaml.j2 b/roles/onap-chaos-tests/templates/node-cpu-hog-rbac.yaml.j2
new file mode 100644
index 0000000..e240b5e
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-cpu-hog-rbac.yaml.j2
@@ -0,0 +1,49 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: node-cpu-hog-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: node-cpu-hog-sa
+ app.kubernetes.io/part-of: litmus
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+ name: node-cpu-hog-sa
+ labels:
+ name: node-cpu-hog-sa
+ app.kubernetes.io/part-of: litmus
+rules:
+- apiGroups: [""]
+ resources: ["pods","events"]
+ verbs: ["create","list","get","patch","update","delete","deletecollection"]
+- apiGroups: [""]
+ resources: ["pods/exec","pods/log"]
+ verbs: ["list","get","create"]
+- apiGroups: ["batch"]
+ resources: ["jobs"]
+ verbs: ["create","list","get","delete","deletecollection"]
+- apiGroups: ["litmuschaos.io"]
+ resources: ["chaosengines","chaosexperiments","chaosresults"]
+ verbs: ["create","list","get","patch","update"]
+- apiGroups: [""]
+ resources: ["nodes"]
+ verbs: ["get","list"]
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: node-cpu-hog-sa
+ labels:
+ name: node-cpu-hog-sa
+ app.kubernetes.io/part-of: litmus
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: node-cpu-hog-sa
+subjects:
+- kind: ServiceAccount
+ name: node-cpu-hog-sa
+ namespace: {{ onap_namespace }} \ No newline at end of file
diff --git a/roles/onap-chaos-tests/templates/node-drain-chaos.yaml.j2 b/roles/onap-chaos-tests/templates/node-drain-chaos.yaml.j2
new file mode 100644
index 0000000..a90a5f1
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-drain-chaos.yaml.j2
@@ -0,0 +1,28 @@
+apiVersion: litmuschaos.io/v1alpha1
+kind: ChaosEngine
+metadata:
+ name: node-drain
+ namespace: {{ onap_namespace }}
+spec:
+ # It can be true/false
+ annotationCheck: 'false'
+ # It can be active/stop
+ engineState: 'active'
+ #ex. values: ns1:name=percona,ns2:run=onap
+ auxiliaryAppInfo: ''
+ chaosServiceAccount: node-drain-sa
+ # It can be delete/retain
+ jobCleanUpPolicy: 'delete'
+ experiments:
+ - name: node-drain
+ spec:
+ components:
+ # nodeSelector:
+ # # provide the node labels
+ # kubernetes.io/hostname: 'node02'
+ env:
+ # enter the target node name
+ - name: TARGET_NODE
+ value: {{ compute_chaos }}
+ - name: TOTAL_CHAOS_DURATION
+ value: 120s
diff --git a/roles/onap-chaos-tests/templates/node-drain-rbac.yaml.j2 b/roles/onap-chaos-tests/templates/node-drain-rbac.yaml.j2
new file mode 100644
index 0000000..d7e4a78
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-drain-rbac.yaml.j2
@@ -0,0 +1,53 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: node-drain-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: node-drain-sa
+ app.kubernetes.io/part-of: litmus
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+ name: node-drain-sa
+ labels:
+ name: node-drain-sa
+ app.kubernetes.io/part-of: litmus
+rules:
+- apiGroups: [""]
+ resources: ["pods","events"]
+ verbs: ["create","list","get","patch","update","delete","deletecollection"]
+- apiGroups: [""]
+ resources: ["pods/exec","pods/log","pods/eviction"]
+ verbs: ["list","get","create"]
+- apiGroups: ["batch"]
+ resources: ["jobs"]
+ verbs: ["create","list","get","delete","deletecollection"]
+- apiGroups: ["apps"]
+ resources: ["daemonsets"]
+ verbs: ["list","get","delete"]
+- apiGroups: ["litmuschaos.io"]
+ resources: ["chaosengines","chaosexperiments","chaosresults"]
+ verbs: ["create","list","get","patch","update"]
+- apiGroups: [""]
+ resources: ["nodes"]
+ verbs: ["patch","get","list"]
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: node-drain-sa
+ labels:
+ name: node-drain-sa
+ app.kubernetes.io/part-of: litmus
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: node-drain-sa
+subjects:
+- kind: ServiceAccount
+ name: node-drain-sa
+ namespace: {{ onap_namespace }}
+
diff --git a/roles/onap-chaos-tests/templates/node-memory-hog-chaos.yaml.j2 b/roles/onap-chaos-tests/templates/node-memory-hog-chaos.yaml.j2
new file mode 100644
index 0000000..fb39b9b
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-memory-hog-chaos.yaml.j2
@@ -0,0 +1,32 @@
+apiVersion: litmuschaos.io/v1alpha1
+kind: ChaosEngine
+metadata:
+ name: node-memory-hog
+ namespace: {{ onap_namespace }}
+spec:
+ # It can be true/false
+ annotationCheck: 'false'
+ # It can be active/stop
+ engineState: 'active'
+ #ex. values: ns1:name=percona,ns2:run=nginx
+ auxiliaryAppInfo: ''
+ chaosServiceAccount: node-memory-hog-sa
+ # It can be delete/retain
+ jobCleanUpPolicy: 'delete'
+ experiments:
+ - name: node-memory-hog
+ spec:
+ components:
+ env:
+ # set chaos duration (in sec) as desired
+ - name: TOTAL_CHAOS_DURATION
+ value: '120'
+
+ ## Specify the size as percent of total node capacity Ex: '30'
+ ## Note: For consuming memory in mebibytes change the variable to MEMORY_CONSUMPTION_MEBIBYTES
+ - name: MEMORY_CONSUMPTION_PERCENTAGE
+ value: '30'
+
+ # ENTER THE COMMA SEPARATED TARGET NODES NAME
+ - name: TARGET_NODES
+ value: {{ compute_chaos }}
diff --git a/roles/onap-chaos-tests/templates/node-memory-hog-rbac.yaml.j2 b/roles/onap-chaos-tests/templates/node-memory-hog-rbac.yaml.j2
new file mode 100644
index 0000000..9b21e05
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/node-memory-hog-rbac.yaml.j2
@@ -0,0 +1,49 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: node-memory-hog-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: node-memory-hog-sa
+ app.kubernetes.io/part-of: litmus
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+ name: node-memory-hog-sa
+ labels:
+ name: node-memory-hog-sa
+ app.kubernetes.io/part-of: litmus
+rules:
+- apiGroups: [""]
+ resources: ["pods","events"]
+ verbs: ["create","list","get","patch","update","delete","deletecollection"]
+- apiGroups: [""]
+ resources: ["pods/exec","pods/log"]
+ verbs: ["create","list","get"]
+- apiGroups: ["batch"]
+ resources: ["jobs"]
+ verbs: ["create","list","get","delete","deletecollection"]
+- apiGroups: ["litmuschaos.io"]
+ resources: ["chaosengines","chaosexperiments","chaosresults"]
+ verbs: ["create","list","get","patch","update"]
+- apiGroups: [""]
+ resources: ["nodes"]
+ verbs: ["get","list"]
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+ name: node-memory-hog-sa
+ labels:
+ name: node-memory-hog-sa
+ app.kubernetes.io/part-of: litmus
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: node-memory-hog-sa
+subjects:
+- kind: ServiceAccount
+ name: node-memory-hog-sa
+ namespace: {{ onap_namespace }} \ No newline at end of file
diff --git a/roles/onap-chaos-tests/templates/pod-delete-aai-chaos.yaml.j2 b/roles/onap-chaos-tests/templates/pod-delete-aai-chaos.yaml.j2
new file mode 100644
index 0000000..1c6eb01
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/pod-delete-aai-chaos.yaml.j2
@@ -0,0 +1,35 @@
+apiVersion: litmuschaos.io/v1alpha1
+kind: ChaosEngine
+metadata:
+ name: aai-chaos
+ namespace: {{ onap_namespace }}
+spec:
+ appinfo:
+ appns: {{ onap_namespace }}
+ applabel: 'component=onap-aai'
+ appkind: 'deployment'
+ # It can be active/stop
+ engineState: 'active'
+ chaosServiceAccount: pod-delete-sa
+ experiments:
+ - name: pod-delete
+ spec:
+ components:
+ env:
+ - name: TARGET_PODS
+ value: {{ pod_list.stdout_lines | join(', ') }}
+ # set chaos duration (in sec) as desired
+ - name: TOTAL_CHAOS_DURATION
+ value: '30'
+
+ # set chaos interval (in sec) as desired
+ - name: CHAOS_INTERVAL
+ value: '10'
+
+ # pod failures without '--force' & default terminationGracePeriodSeconds
+ - name: FORCE
+ value: 'true'
+
+ ## percentage of total pods to target
+ - name: PODS_AFFECTED_PERC
+ value: '100'
diff --git a/roles/onap-chaos-tests/templates/pod-delete-aai-rbac.yaml.j2 b/roles/onap-chaos-tests/templates/pod-delete-aai-rbac.yaml.j2
new file mode 100644
index 0000000..2b85d42
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/pod-delete-aai-rbac.yaml.j2
@@ -0,0 +1,57 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: Role
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+rules:
+- apiGroups: [""]
+ resources: ["pods","events"]
+ verbs: ["create","list","get","patch","update","delete","deletecollection"]
+- apiGroups: [""]
+ resources: ["pods/exec","pods/log","replicationcontrollers"]
+ verbs: ["create","list","get"]
+- apiGroups: ["batch"]
+ resources: ["jobs"]
+ verbs: ["create","list","get","delete","deletecollection"]
+- apiGroups: ["apps"]
+ resources: ["deployments","statefulsets","daemonsets","replicasets"]
+ verbs: ["list","get"]
+- apiGroups: ["apps.openshift.io"]
+ resources: ["deploymentconfigs"]
+ verbs: ["list","get"]
+- apiGroups: ["argoproj.io"]
+ resources: ["rollouts"]
+ verbs: ["list","get"]
+- apiGroups: ["litmuschaos.io"]
+ resources: ["chaosengines","chaosexperiments","chaosresults"]
+ verbs: ["create","list","get","patch","update"]
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: RoleBinding
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: Role
+ name: pod-delete-sa
+subjects:
+- kind: ServiceAccount
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }} \ No newline at end of file
diff --git a/roles/onap-chaos-tests/templates/pod-delete-rbac.yaml.j2 b/roles/onap-chaos-tests/templates/pod-delete-rbac.yaml.j2
new file mode 100644
index 0000000..2b85d42
--- /dev/null
+++ b/roles/onap-chaos-tests/templates/pod-delete-rbac.yaml.j2
@@ -0,0 +1,57 @@
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: Role
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+rules:
+- apiGroups: [""]
+ resources: ["pods","events"]
+ verbs: ["create","list","get","patch","update","delete","deletecollection"]
+- apiGroups: [""]
+ resources: ["pods/exec","pods/log","replicationcontrollers"]
+ verbs: ["create","list","get"]
+- apiGroups: ["batch"]
+ resources: ["jobs"]
+ verbs: ["create","list","get","delete","deletecollection"]
+- apiGroups: ["apps"]
+ resources: ["deployments","statefulsets","daemonsets","replicasets"]
+ verbs: ["list","get"]
+- apiGroups: ["apps.openshift.io"]
+ resources: ["deploymentconfigs"]
+ verbs: ["list","get"]
+- apiGroups: ["argoproj.io"]
+ resources: ["rollouts"]
+ verbs: ["list","get"]
+- apiGroups: ["litmuschaos.io"]
+ resources: ["chaosengines","chaosexperiments","chaosresults"]
+ verbs: ["create","list","get","patch","update"]
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: RoleBinding
+metadata:
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }}
+ labels:
+ name: pod-delete-sa
+ app.kubernetes.io/part-of: litmus
+roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: Role
+ name: pod-delete-sa
+subjects:
+- kind: ServiceAccount
+ name: pod-delete-sa
+ namespace: {{ onap_namespace }} \ No newline at end of file
diff --git a/roles/onap-stability-tests/tasks/main.yaml b/roles/onap-stability-tests/tasks/main.yaml
new file mode 100644
index 0000000..09c03f5
--- /dev/null
+++ b/roles/onap-stability-tests/tasks/main.yaml
@@ -0,0 +1,61 @@
+---
+- name: "Install stability tests prerequisites"
+ become: yes
+ ansible.builtin.apt:
+ name:
+ - python3-venv
+ - libssl-dev
+ state: latest
+
+- name: "Set variables for stability tests run"
+ ansible.builtin.set_fact:
+ stability_tests_output: "/tmp/stability/archives"
+ stability_tests_archives: "/tmp/stability"
+
+- name: "Delete directory with/for results"
+ ansible.builtin.file:
+ path: "{{ stability_tests_output }}"
+ state: absent
+
+- name: "Copy stability tests script"
+ ansible.builtin.copy:
+ src: scripts/run_stability_tests.sh
+ dest: "{{ ansible_user_dir }}/run_stability_tests.sh"
+ mode: '500'
+
+- name: "Copy netrc for lftool"
+ ansible.builtin.template:
+ src: .netrc.j2
+ dest: "{{ ansible_user_dir }}/.netrc"
+ mode: 0600
+
+- name: "Create directory for stability test execution"
+ ansible.builtin.file:
+ path: "{{ stability_tests_output }}"
+ state: directory
+ mode: '0755'
+ recurse: yes
+ owner: "{{ ansible_user }}"
+
+- name: "Launch stability tests & push artifacts"
+ ansible.builtin.shell:
+ cmd: "{{ ansible_user_dir }}/run_stability_tests.sh > {{ stability_tests_output }}/run_stability.log"
+ chdir: "{{ ansible_user_dir }}"
+ environment:
+ LF_RESULTS_BACKUP: '{{ lf_results_backup }}'
+ POD: '{{ pod }}'
+ CI_PIPELINE_CREATED_AT: '{{ ci_pipeline_created_at }}'
+ STABILITY_TESTS_LOCATION: '{{ stability_tests_output }}'
+ ARCHIVES_LOCATION: '{{ stability_tests_archives }}'
+ async: 259200 # 60*60*24*3 = 3 days
+ poll: 0 # dont wait for it
+ register: stability_tests
+
+- name: "Check if stability tests are running"
+ become: no
+ async_status:
+ jid: "{{ stability_tests.ansible_job_id }}"
+ register: stability_tests_result
+ until: stability_tests_result.started
+ retries: 10
+ delay: 10
diff --git a/roles/onap-stability-tests/templates/.netrc.j2 b/roles/onap-stability-tests/templates/.netrc.j2
new file mode 100644
index 0000000..e4c22e3
--- /dev/null
+++ b/roles/onap-stability-tests/templates/.netrc.j2
@@ -0,0 +1,3 @@
+machine nexus.onap.org
+login onap-integration
+password {{ lf_it_nexus_pwd }}
diff --git a/roles/xtesting-healthcheck-k8s-job/defaults/main.yaml b/roles/xtesting-healthcheck-k8s-job/defaults/main.yaml
new file mode 100644
index 0000000..7b9bf68
--- /dev/null
+++ b/roles/xtesting-healthcheck-k8s-job/defaults/main.yaml
@@ -0,0 +1,47 @@
+---
+internal_check_certs_deployment:
+ apiVersion: batch/v1
+ kind: Job
+ metadata:
+ name: "integration-onap-internal-check-certs"
+ namespace: "{{ onap_namespace }}"
+ spec:
+ template:
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: xtesting-onap
+ image: "{{ docker_health_k8s_image }}:{{ testing_container_tag }}"
+ imagePullPolicy: Always
+ command: ["run_tests","-t","internal_check_certs","-r"]
+ env:
+ - name: INSTALLER_TYPE
+ value: "{{ deployment_name }}"
+ - name: TEST_ENVIRONMENT
+ value: internal_job
+ - name: DEPLOY_SCENARIO
+ value: "{{ deploy_scenario }}"
+ - name: NODE_NAME
+ value: "{{ node_name }}"
+ - name: TEST_DB_URL
+ value:
+ "{{ test_result_url }}"
+ - name: BUILD_TAG
+ value: "{{ build_tag }}"
+ - name: TAG
+ value: "{{ run_type }}"
+ volumeMounts:
+ - name: localtime
+ mountPath: /etc/localtime
+ readOnly: true
+ - name: job-save-results
+ mountPath:
+ /var/lib/xtesting/results/
+ volumes:
+ - name: localtime
+ hostPath:
+ path: /etc/localtime
+ - name: job-save-results
+ hostPath:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ backoffLimit: 0
diff --git a/roles/xtesting-healthcheck-k8s-job/tasks/main.yaml b/roles/xtesting-healthcheck-k8s-job/tasks/main.yaml
new file mode 100644
index 0000000..6ed429a
--- /dev/null
+++ b/roles/xtesting-healthcheck-k8s-job/tasks/main.yaml
@@ -0,0 +1,51 @@
+---
+# tasks file for functest (tests)
+
+##
+- block:
+ - name: create directories as root
+ become: yes
+ file:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ state: directory
+ mode: 0755
+
+ - name: Delete old logs
+ become: yes
+ file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+ - name: Delete healthcheck job
+ k8s:
+ state: absent
+ api: batch/v1
+ kind: Job
+ namespace: onap
+ name: "integration-onap-internal-check-certs"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+
+ - name: save internal check certs deployment to file
+ copy:
+ content: "{{ internal_check_certs_deployment | to_nice_yaml }}"
+ dest:
+ "{{ k8s_job__dir_path }}/healthcheck-internal-check-certs.yaml"
+
+ - name: start internal check certs job
+ k8s:
+ state: present
+ src: "{{ k8s_job__dir_path }}/healthcheck-internal-check-certs.yaml"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+ - name: Wait until the healthcheck test is "completed"
+ wait_for:
+ timeout: "{{ run_timeout }}"
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}/xtesting.log"
+ search_regex: Result.EX_
+ always:
+ - name: "save healthcheck {{ run_type }} results for artifacts"
+ synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ use_ssh_args: true
+ dest: "./results/{{ run_tiers }}"
+ mode: pull
+ ignore_errors: True
diff --git a/roles/xtesting-healthcheck-k8s/defaults/main.yaml b/roles/xtesting-healthcheck-k8s/defaults/main.yaml
new file mode 100644
index 0000000..02190ea
--- /dev/null
+++ b/roles/xtesting-healthcheck-k8s/defaults/main.yaml
@@ -0,0 +1,9 @@
+---
+postgres_secret_name: postgres-postgresql
+postgres_user: helm
+postgres_db: helm
+postgres_port: 30347
+postgres_url: "postgresql://{{
+ postgres_svc }}.{{ postgres_namespace }}:{{ postgres_port }}/{{
+ postgres_db }}?user={{ postgres_user }}&password={{
+ postgres_password }}&sslmode=disable" \ No newline at end of file
diff --git a/roles/xtesting-healthcheck-k8s/tasks/main.yaml b/roles/xtesting-healthcheck-k8s/tasks/main.yaml
new file mode 100644
index 0000000..2f727e9
--- /dev/null
+++ b/roles/xtesting-healthcheck-k8s/tasks/main.yaml
@@ -0,0 +1,126 @@
+---
+# tasks file for functest (tests)
+
+##
+- block:
+ - name: Clean directory
+ ansible.builtin.file:
+ path: "{{ exec_local_path }}"
+ state: absent
+
+ - name: Create directory
+ ansible.builtin.file:
+ path: "{{ exec_local_path }}"
+ state: directory
+ mode: 0755
+
+ - name: check helm version
+ command: "helm version --template {% raw %}'{{.Version}}'{% endraw %}"
+ register: helm_version
+
+ # Return of previous command will be "v3.3.4" for v3 and up and "<no value>"
+ # for version 2.
+ - name: store helm version
+ ansible.builtin.set_fact:
+ helmv3: "{{ ('<no' in helm_version.stdout) | ternary(false, true) }}"
+
+ - name: retrieve helm postgres secret
+ community.kubernetes.k8s_info:
+ api_version: v1
+ kind: Secret
+ name: "{{ postgres_secret_name }}"
+ namespace: "{{ postgres_namespace }}"
+ register: postgres_secrets
+ when: helmv3 | bool and helmv3_use_sql | bool
+
+ - name: retrieve helm postgres password
+ set_fact:
+ postgres_password: "{{
+ postgres_secrets.resources[0].data['postgresql-password'] | b64decode }}"
+ when: helmv3 | bool and helmv3_use_sql | bool
+
+ - name: generate fake postgres_url
+ set_fact:
+ postgres_url: ""
+ when: not helmv3_use_sql | bool
+
+ - name: Create env file
+ ansible.builtin.template:
+ src: env-os.j2
+ dest: "{{ exec_local_path }}/env"
+ mode: "0644"
+
+ - name: create directories as root
+ become: yes
+ ansible.builtin.file:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ state: directory
+ mode: 0755
+
+ - name: Delete old logs
+ become: yes
+ ansible.builtin.file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+ - name: set helm deploy log folder
+ ansible.builtin.set_fact:
+ helm_deploy_logs_path: "{{ helmv3 | bool | ternary(
+ helm3_deploy_logs_path, helm2_deploy_logs_path) }}"
+
+ - name: "clean {{ docker_healthcheck_k8s_name }} docker"
+ community.general.docker_container:
+ name: "{{ docker_healthcheck_k8s_name }}"
+ state: absent
+ force_kill: yes
+
+ - name: generate pre command to run
+ ansible.builtin.set_fact:
+ command: chmod 700 /root/.kube && chmod 600 /root/.kube/config
+
+ - name: generate command to run
+ ansible.builtin.set_fact:
+ command: "{{ command }} && run_tests --test all --report"
+
+ - name: "launch {{ docker_healthcheck_k8s_name }} docker"
+ community.general.docker_container:
+ container_default_behavior: no_defaults
+ name: "{{ docker_healthcheck_k8s_name }}"
+ image: "{{ docker_health_k8s_image }}:{{ docker_health_k8s_version }}"
+ env_file: "{{ exec_local_path }}/env"
+ state: started
+ command: "/bin/bash -c '{{ command }}'"
+ recreate: yes
+ volumes: "{{ volumes_healthcheck_k8s }}"
+ etc_hosts: "{{ etc_hosts }}"
+ detach: yes
+ pull: yes
+ keep_volumes: no
+
+ - name: wait for test docker to be finished
+ community.docker.docker_container_info:
+ name: "{{ docker_healthcheck_k8s_name }}"
+ register: container_info
+ until: container_info.container.State.Status == "exited"
+ retries: "{{ run_timeout }}"
+ delay: 1
+
+ - name: "{{ docker_healthcheck_k8s_name }} has failed"
+ ansible.builtin.fail:
+ msg: "The test {{ docker_healthcheck_k8s_name }} has failed"
+ when: container_info.container.State.ExitCode != 0
+ always:
+ - name: "save {{ docker_healthcheck_k8s_name }} results for artifacts"
+ ansible.posix.synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ dest: "./results/{{ run_tiers }}"
+ use_ssh_args: true
+ mode: pull
+ ignore_errors: True
+ when: not use_s3 | bool
+ # temporitary work in order to wait for xtesting to handle thousands of
+ # files upload
+ - name: "push {{ docker_healthcheck_k8s_name }} results to S3"
+ command: "mc cp --recursive {{ res_local_path }}/{{ run_tiers }}/{{
+ run_type }} s3/{{ s3_raw_dst }}"
+ when: use_s3 | bool
diff --git a/roles/xtesting-healthcheck-k8s/templates/env-os.j2 b/roles/xtesting-healthcheck-k8s/templates/env-os.j2
new file mode 100644
index 0000000..eeb2bf2
--- /dev/null
+++ b/roles/xtesting-healthcheck-k8s/templates/env-os.j2
@@ -0,0 +1,17 @@
+INSTALLER_TYPE={{ deployment_name }}
+DEPLOY_SCENARIO= {{ scenario }}
+TEST_DB_URL={{ test_result_url }}
+NODE_NAME={{ node_name }}
+BUILD_TAG={{ build_tag }}
+ONAP_IP={{ onap_ip }}
+{% if project != 'oom' %}
+DEPLOY_ENVIRONMENT='gating_component'
+CHART={{ project }}
+{% endif %}
+{% if helmv3 | bool %}
+HELM_BIN=helm3
+{% if helmv3_use_sql | bool %}
+HELM_DRIVER=sql
+HELM_DRIVER_SQL_CONNECTION_STRING={{ postgres_url }}
+{% endif %}
+{% endif %} \ No newline at end of file
diff --git a/roles/xtesting-healthcheck/defaults/main.yaml b/roles/xtesting-healthcheck/defaults/main.yaml
new file mode 100644
index 0000000..6312a16
--- /dev/null
+++ b/roles/xtesting-healthcheck/defaults/main.yaml
@@ -0,0 +1,130 @@
+---
+tests:
+ - core
+
+healthcheck_secret:
+ apiVersion: v1
+ kind: Secret
+ metadata:
+ name: s3-keys
+ namespace: "{{ onap_namespace }}"
+ data:
+ access-key: "{{ s3_access_key | string | b64encode }}"
+ secret-key: "{{ s3_secret_key | string | b64encode }}"
+
+healthcheck_deployment:
+ apiVersion: batch/v1
+ kind: Job
+ metadata:
+ name: "integration-onap-{{ run_type }}"
+ namespace: "{{ onap_namespace }}"
+ spec:
+ backoffLimit: 0
+ template:
+ metadata:
+ annotations:
+ sidecar.istio.io/inject: "false"
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: "functest-onap-{{ run_type }}"
+ image: "{{ testing_container }}:{{ testing_container_tag }}"
+ imagePullPolicy: Always
+ env: "{{ healthcheck_deployment_env }}"
+ volumeMounts:
+ - name: localtime
+ mountPath: /etc/localtime
+ readOnly: true
+ - name: robot-eteshare
+ mountPath: /share/config
+ - name: robot-save-results
+ mountPath: /var/lib/xtesting/results/
+ command:
+ - run_tests
+ args: "{{ args }}"
+ volumes: "{{ job_volumes }}"
+
+healthcheck_deployment_env_legacy:
+ - name: INSTALLER_TYPE
+ value: "{{ deployment_name }}"
+ - name: DEPLOY_SCENARIO
+ value: "{{ deploy_scenario }}"
+ - name: NODE_NAME
+ value: "{{ node_name }}"
+ - name: TEST_DB_URL
+ value: "{{ test_result_url }}"
+ - name: BUILD_TAG
+ value: "{{ build_tag }}"
+ - name: TAG
+ value: "{{ run_type }}"
+
+healthcheck_deployment_env_s3:
+ - name: INSTALLER_TYPE
+ value: "{{ deployment_name }}"
+ - name: DEPLOY_SCENARIO
+ value: "{{ deploy_scenario }}"
+ - name: NODE_NAME
+ value: "{{ node_name }}"
+ - name: TEST_DB_URL
+ value: "{{ test_result_url }}"
+ - name: BUILD_TAG
+ value: "{{ build_tag }}"
+ - name: TAG
+ value: "{{ run_type }}"
+ - name: S3_ENDPOINT_URL
+ value: "{{ s3_internal_url }}"
+ - name: S3_DST_URL
+ value: "{{ s3_dst }}"
+ - name: HTTP_DST_URL
+ value: "{{ s3_http_url_endpoint }}"
+ - name: AWS_ACCESS_KEY_ID
+ valueFrom:
+ secretKeyRef:
+ key: access-key
+ name: s3-keys
+ - name: AWS_SECRET_ACCESS_KEY
+ valueFrom:
+ secretKeyRef:
+ key: secret-key
+ name: s3-keys
+
+healthcheck_deployment_env: "{{ use_s3 | bool |
+ternary(healthcheck_deployment_env_s3, healthcheck_deployment_env_legacy) }}"
+
+args_legacy:
+ - --test
+ - "{{ run_type }}"
+ - --report
+
+args_s3:
+ - --test
+ - "{{ run_type }}"
+ - --push
+ - --report
+
+args: "{{ use_s3 | bool | ternary(args_s3, args_legacy) }}"
+
+volumes_legacy:
+ - name: localtime
+ hostPath:
+ path: /etc/localtime
+ - name: robot-eteshare
+ configMap:
+ name: "{{ onap_namespace }}-{{ robot_configmap }}"
+ defaultMode: 0755
+ - name: robot-save-results
+ hostPath:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+volumes_s3:
+ - name: localtime
+ hostPath:
+ path: /etc/localtime
+ - name: robot-eteshare
+ configMap:
+ name: "{{ onap_namespace }}-{{ robot_configmap }}"
+ defaultMode: 0755
+ - name: robot-save-results
+ emptyDir: {}
+
+job_volumes: "{{ use_s3 | bool | ternary(volumes_s3, volumes_legacy) }}"
diff --git a/roles/xtesting-healthcheck/tasks/launch.yaml b/roles/xtesting-healthcheck/tasks/launch.yaml
new file mode 100644
index 0000000..5ec978b
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/launch.yaml
@@ -0,0 +1,50 @@
+---
+- block:
+ - name: ensure secret is present
+ community.kubernetes.k8s:
+ state: present
+ src: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+ when: use_s3 | bool
+
+ - name: start healthcheck job
+ community.kubernetes.k8s:
+ state: present
+ src: "{{ k8s_job__dir_path }}/healthcheck-{{ run_type }}.yaml"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+
+ - name: wait for end of job
+ community.kubernetes.k8s_info:
+ kind: Job
+ name: "integration-onap-{{ run_type }}"
+ namespace: "{{ onap_namespace }}"
+ register: job_info
+ until: (job_info.resources[0].status.succeeded is defined and
+ job_info.resources[0].status.succeeded == 1) or
+ (job_info.resources[0].status.failed is defined and
+ job_info.resources[0].status.failed >= 1)
+ retries: "{{ run_timeout }}"
+ delay: 1
+
+ - name: job has failed
+ ansible.builtin.fail:
+ msg: "The job has failed"
+ when: job_info.resources[0].status.failed is defined and
+ job_info.resources[0].status.failed >= 1
+
+ always:
+ - name: "save healthcheck {{ run_type }} results for artifacts"
+ ansible.posix.synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ use_ssh_args: true
+ dest: "./results/{{ run_tiers }}"
+ mode: pull
+ rsync_opts:
+ - "--exclude=output.xml"
+ ignore_errors: True
+ when: not use_s3 | bool
+
+ - name: remove secret file
+ ansible.builtin.file:
+ path: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ state: absent
diff --git a/roles/xtesting-healthcheck/tasks/main.yaml b/roles/xtesting-healthcheck/tasks/main.yaml
new file mode 100644
index 0000000..5fb373c
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/main.yaml
@@ -0,0 +1,5 @@
+---
+# tasks file for ONAP healthcheck
+
+- import_tasks: prepare.yaml
+- import_tasks: launch.yaml
diff --git a/roles/xtesting-healthcheck/tasks/prepare.yaml b/roles/xtesting-healthcheck/tasks/prepare.yaml
new file mode 100644
index 0000000..712916e
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/prepare.yaml
@@ -0,0 +1,52 @@
+---
+- name: create directories
+ ansible.builtin.file:
+ path: "{{ item }}"
+ state: directory
+ mode: 0755
+ loop:
+ - "{{ exec_local_path }}/"
+ - "{{ ansible_user_dir }}/oom/{{ onap_version }}/{{ run_tiers }}/"
+
+- name: create directories as root
+ become: yes
+ ansible.builtin.file:
+ path: "{{ item }}"
+ state: directory
+ mode: 0755
+ loop:
+ - "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+- name: Delete healthcheck job
+ community.kubernetes.k8s:
+ state: absent
+ api: batch/v1
+ kind: Job
+ namespace: onap
+ name: "integration-onap-{{ run_type }}"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+
+- name: Delete old logs
+ become: yes
+ ansible.builtin.file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+- name: Override docker version for CPS (python3 migration)
+ ansible.builtin.set_fact:
+ testing_container:
+ "nexus3.onap.org:10003/onap/xtesting-smoke-usecases-robot-py3"
+ when: (run_type == "cps-healthcheck") or
+ (run_type == "cps-temporal-healthcheck") or
+ (run_type == "cps-dmi-plugin-healthcheck")
+
+- name: save healthcheck deployment to file
+ ansible.builtin.copy:
+ content: "{{ healthcheck_deployment | to_nice_yaml }}"
+ dest: "{{ k8s_job__dir_path }}/healthcheck-{{ run_type }}.yaml"
+
+- name: save secret for S3 to file
+ ansible.builtin.copy:
+ content: "{{ healthcheck_secret | to_nice_yaml }}"
+ dest: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ when: use_s3 | bool
diff --git a/roles/xtesting-jumphost/tasks/main.yaml b/roles/xtesting-jumphost/tasks/main.yaml
new file mode 100644
index 0000000..0fa5e01
--- /dev/null
+++ b/roles/xtesting-jumphost/tasks/main.yaml
@@ -0,0 +1,101 @@
+---
+# tasks file for roles/prepare_test
+
+##
+# Install deps and docker
+##
+- name: Install needed packages
+ become: "yes"
+ ansible.builtin.apt:
+ name: "{{ apt_packages }}"
+ state: present
+ update_cache: yes
+
+- name: install pip dependencies
+ become: "yes"
+ ansible.builtin.pip:
+ name: "{{ item }}"
+ state: latest
+ loop: "{{ pip_packages }}"
+
+- name: retrieve linux type
+ ansible.builtin.set_fact:
+ distri: "{{ ansible_distribution.lower() }}"
+
+- name: retrieve docker apt key
+ ansible.builtin.apt_key:
+ url: "https://download.docker.com/linux/{{ distri }}/gpg"
+ id: 0EBFCD88
+ state: present
+
+- name: retrieve linux release
+ ansible.builtin.set_fact:
+ release: "{{ ansible_distribution_release.lower() }}"
+
+- name: add docker repo
+ ansible.builtin.apt_repository:
+ repo: >
+ deb [arch=amd64] https://download.docker.com/linux/{{ distri }}
+ {{ release }} stable
+ state: present
+
+- name: Wait for automatic system updates
+ become: yes
+ shell:
+ "if [ -e /bin/fuser ]; then while sudo fuser /var/lib/dpkg/lock >/dev/null \
+ 2>&1; do sleep 1; done; fi;"
+ changed_when: false
+
+- name: install docker
+ become: yes
+ ansible.builtin.apt:
+ update_cache: "yes"
+ name: "docker-ce"
+ state: present
+
+- name: add login user to docker group
+ become: true
+ ansible.builtin.user:
+ name: "{{ ansible_user }}"
+ groups: docker
+ append: yes
+
+- name: create xtesting directory
+ ansible.builtin.file:
+ path: "{{ global_local_path }}"
+ state: directory
+ mode: 0755
+
+- name: copy description file
+ ansible.builtin.copy:
+ src: "vars/kube-config"
+ dest: "{{ global_local_path }}/{{ kube_conf_file }}"
+ mode: 0644
+
+##
+# Setup containers
+##
+- name: remove container
+ community.general.docker_container:
+ name: "{{ docker_base_name }}*"
+ state: absent
+
+##
+# Setup S3
+# This is done as workaround because xtesting cannot handle well push of
+# thousands of file for now.
+# checksum: sha256:https://dl.min.io/client/mc/release/linux-amd64/mc.sha256sum
+# doesn't work for now
+##
+- name: "retrieve mc (minio client)"
+ become: "yes"
+ ansible.builtin.get_url:
+ url: https://dl.min.io/client/mc/release/linux-amd64/mc
+ dest: /usr/local/bin/mc
+ mode: "0777"
+ when: use_s3 | bool
+
+- name: "set s3 alias"
+ command: "mc alias set s3 {{ s3_endpoint }} {{ s3_access_key }} {{
+ s3_secret_key }}"
+ when: use_s3 | bool \ No newline at end of file
diff --git a/roles/xtesting-onap-security/tasks/main.yaml b/roles/xtesting-onap-security/tasks/main.yaml
new file mode 100644
index 0000000..8148010
--- /dev/null
+++ b/roles/xtesting-onap-security/tasks/main.yaml
@@ -0,0 +1,88 @@
+---
+# tasks file for functest (tests)
+
+##
+- block:
+ - name: Create directory
+ ansible.builtin.file:
+ path: "{{ exec_local_path }}"
+ state: directory
+ mode: 0755
+
+ - name: Create env file
+ ansible.builtin.template:
+ src: env-os.j2
+ dest: "{{ exec_local_path }}/env"
+ mode: "0644"
+
+ - name: Delete old logs
+ become: yes
+ ansible.builtin.file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+ - name: "clean {{ docker_onap_security_name }} docker"
+ community.general.docker_container:
+ name: "{{ docker_onap_security_name }}-{{ run_type }}"
+ state: absent
+ force_kill: yes
+
+ - name: generate pre command to run
+ ansible.builtin.set_fact:
+ command: "mkdir -p /var/lib/xtesting/results/{{ run_type }}"
+
+ - name: generate command to run
+ ansible.builtin.set_fact:
+ command: "{{ command }} && run_tests --test {{ run_type }} --report"
+
+ - name: add S3 upload to command
+ ansible.builtin.set_fact:
+ command: "{{ command }} --push"
+ when: use_s3 | bool
+
+ - name: "launch {{ docker_onap_security_name }} docker"
+ community.general.docker_container:
+ container_default_behavior: no_defaults
+ name: "{{ docker_onap_security_name }}-{{ run_type }}"
+ image: "{{ docker_onap_security_image }}:{{ docker_onap_security_version }}"
+ env_file: "{{ exec_local_path }}/env"
+ state: started
+ command: "/bin/bash -c '{{ command }}'"
+ recreate: yes
+ pid_mode: host
+ volumes: "{{ volumes_security }}"
+ detach: true
+ pull: yes
+ keep_volumes: no
+
+ - name: wait for test docker to be finished
+ community.docker.docker_container_info:
+ name: "{{ docker_onap_security_name }}-{{ run_type }}"
+ register: container_info
+ until: container_info.container.State.Status == "exited"
+ retries: "{{ run_timeout }}"
+ delay: 1
+
+ - name: "{{ docker_onap_security_name }} has failed"
+ ansible.builtin.fail:
+ msg: "The test {{ docker_onap_security_name }} has failed"
+ when: container_info.container.State.ExitCode != 0
+ always:
+ - name: retrieve container logs
+ shell: "docker logs {{ docker_onap_security_name }}-{{ run_type }}"
+ register: container_logs
+ ignore_errors: True
+
+ - name: display container logs
+ debug:
+ msg: "{{ container_logs.stdout }}"
+ ignore_errors: True
+
+ - name: "save {{ docker_onap_security_name }} results for artifacts"
+ ansible.posix.synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ dest: "./results/{{ run_tiers }}"
+ use_ssh_args: true
+ mode: pull
+ ignore_errors: True
+ when: not use_s3 | bool
diff --git a/roles/xtesting-onap-security/templates/env-os.j2 b/roles/xtesting-onap-security/templates/env-os.j2
new file mode 100644
index 0000000..06a31a1
--- /dev/null
+++ b/roles/xtesting-onap-security/templates/env-os.j2
@@ -0,0 +1,11 @@
+INSTALLER_TYPE={{ deployment_name }}
+TEST_DB_URL={{ test_result_url }}
+NODE_NAME={{ node_name }}
+BUILD_TAG={{ build_tag }}
+{% if use_s3 | bool %}
+S3_ENDPOINT_URL={{ s3_endpoint }}
+S3_DST_URL={{ s3_dst }}
+HTTP_DST_URL={{ s3_http_url_endpoint }}
+AWS_ACCESS_KEY_ID={{ s3_access_key }}
+AWS_SECRET_ACCESS_KEY={{ s3_secret_key }}
+{% endif %} \ No newline at end of file
diff --git a/roles/xtesting-onap-vnf/tasks/launch.yaml b/roles/xtesting-onap-vnf/tasks/launch.yaml
new file mode 100644
index 0000000..4220647
--- /dev/null
+++ b/roles/xtesting-onap-vnf/tasks/launch.yaml
@@ -0,0 +1,75 @@
+---
+# tasks file for functest (tests)
+
+##
+- block:
+ - name: generate pre command to run
+ ansible.builtin.set_fact:
+ command: "mkdir -p /var/lib/xtesting/results/{{ run_type }}"
+
+ - name: generate command to run
+ ansible.builtin.set_fact:
+ command: "{{ command }} && run_tests --test {{ run_type }} --report"
+
+ - name: add S3 upload to command
+ ansible.builtin.set_fact:
+ command: "{{ command }} --push"
+ when: use_s3 | bool
+
+ - name: generate a random number between 0 and 600
+ ansible.builtin.set_fact:
+ before_launch_wait_time: "{{ 600 | random }}"
+ when: random_wait and before_launch_wait_time is not defined
+
+ - name: "wait {{ before_launch_wait_time }}s in order to allow 'sequential' tests"
+ run_once: yes
+ wait_for:
+ timeout: "{{ before_launch_wait_time }}"
+ delegate_to: localhost
+ when: random_wait
+
+ - name: "launch onap-vnf docker for {{ run_type }}"
+ community.general.docker_container:
+ container_default_behavior: no_defaults
+ name: "{{ docker_vnf_name }}-{{ run_type }}"
+ image: "{{ docker_vnf_image }}:{{ docker_vnf_version }}"
+ env_file: "{{ exec_local_path }}/env"
+ state: started
+ command: "/bin/bash -c '{{ command }}'"
+ pull: yes
+ recreate: yes
+ volumes: "{{ volumes }}"
+ etc_hosts: "{{ etc_hosts }}"
+ detach: yes
+ keep_volumes: no
+
+ - name: wait for test docker to be finished
+ community.docker.docker_container_info:
+ name: "{{ docker_vnf_name }}-{{ run_type }}"
+ register: container_info
+ until: container_info.container.State.Status == "exited"
+ retries: "{{ run_timeout }}"
+ delay: 1
+
+ - name: "{{ run_type }} has failed"
+ ansible.builtin.fail:
+ msg: "The test {{ run_type }} has failed"
+ when: container_info.container.State.ExitCode != 0
+ always:
+ - name: retrieve container logs
+ shell: "docker logs {{ docker_vnf_name }}-{{ run_type }}"
+ register: container_logs
+ ignore_errors: True
+
+ - name: display container logs
+ debug:
+ msg: "{{ container_logs.stdout }}"
+ ignore_errors: True
+ - name: "save VNF results for artifacts"
+ ansible.posix.synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ dest: "./results/{{ run_tiers }}"
+ mode: pull
+ use_ssh_args: true
+ ignore_errors: True
+ when: not use_s3 | bool
diff --git a/roles/xtesting-onap-vnf/tasks/main.yaml b/roles/xtesting-onap-vnf/tasks/main.yaml
new file mode 100644
index 0000000..db10573
--- /dev/null
+++ b/roles/xtesting-onap-vnf/tasks/main.yaml
@@ -0,0 +1,5 @@
+---
+- import_tasks: prepare_cnf_test.yaml
+ when: run_type == "basic_cnf"
+- import_tasks: prepare.yaml
+- import_tasks: launch.yaml
diff --git a/roles/xtesting-onap-vnf/tasks/prepare.yaml b/roles/xtesting-onap-vnf/tasks/prepare.yaml
new file mode 100644
index 0000000..addab8c
--- /dev/null
+++ b/roles/xtesting-onap-vnf/tasks/prepare.yaml
@@ -0,0 +1,57 @@
+---
+# tasks file for roles/prepare_test
+
+##
+# Prepare config
+##
+- name: Clean directory
+ ansible.builtin.file:
+ path: "{{ exec_local_path }}"
+ state: absent
+
+- name: Create directory
+ ansible.builtin.file:
+ path: "{{ exec_local_path }}/vnf-services"
+ state: directory
+ mode: 0755
+
+- name: Create env file
+ ansible.builtin.template:
+ src: env-os.j2
+ dest: "{{ exec_local_path }}/env"
+ mode: "0644"
+
+- name: copy pythonsdk-tests configuration file
+ ansible.builtin.template:
+ src: settings.py.j2
+ dest: "{{ exec_local_path }}/settings.py"
+ mode: "0644"
+
+- name: create basic_vm configuration file
+ ansible.builtin.template:
+ src: basic_vm-service-{{ onap_version }}.yaml.j2
+ dest: "{{ exec_local_path }}/basic_vm-service.yaml"
+ mode: "0644"
+
+- name: create basic_vm_macro configuration file
+ ansible.builtin.template:
+ src: basic_vm_macro-service-{{ onap_version }}.yaml.j2
+ dest: "{{ exec_local_path }}/basic_vm_macro-service.yaml"
+ mode: "0644"
+
+- name: create directories as root
+ become: yes
+ ansible.builtin.file:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ state: directory
+ mode: 0755
+- name: Delete old logs
+ become: yes
+ ansible.builtin.file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+- name: clean onap-vnf docker
+ community.general.docker_container:
+ name: "{{ docker_vnf_name }}-{{ run_type }}"
+ state: absent
diff --git a/roles/xtesting-onap-vnf/tasks/prepare_cnf_test.yaml b/roles/xtesting-onap-vnf/tasks/prepare_cnf_test.yaml
new file mode 100644
index 0000000..47ee366
--- /dev/null
+++ b/roles/xtesting-onap-vnf/tasks/prepare_cnf_test.yaml
@@ -0,0 +1,86 @@
+---
+- name: Check that cloud-site service does not exist
+ community.kubernetes.k8s_service:
+ name: cloud-site
+ namespace: onap
+ state: absent
+
+- name: Expose nodePort with created cloud-site service
+ shell: kubectl -n onap expose svc so-catalog-db-adapter --name cloud-site --type=NodePort
+
+- name: Get nodePort for cloud-site service
+ community.kubernetes.k8s_info:
+ kind: service
+ namespace: onap
+ name: cloud-site
+ register: output
+
+- name: set variable for api port
+ ansible.builtin.set_fact:
+ cloudsite_port: "{{ output.resources[0].spec.ports[0].nodePort }}"
+
+- name: Get DEFAULT Cloud Site
+ ansible.builtin.uri:
+ url: "{{ so_url }}:{{ cloudsite_port }}/\
+ cloudSite/DEFAULT"
+ method: GET
+ status_code: 200
+ body_format: json
+ validate_certs: "no"
+ return_content: "yes"
+ headers:
+ authorization: Basic YnBlbDpwYXNzd29yZDEk
+ Accept: application/json
+ Cache-Control: no-cache
+ register: output
+
+- name: Add cloud site CloudRegion
+ ansible.builtin.uri:
+ url: "{{ so_url }}:{{ cloudsite_port }}/\
+ cloudSite/{{ k8sRegionID }}"
+ method: PUT
+ status_code:
+ - 200
+ - 201
+ body_format: json
+ validate_certs: "no"
+ return_content: "yes"
+ headers:
+ authorization: Basic YnBlbDpwYXNzd29yZDEk
+ Accept: application/json
+ Cache-Control: no-cache
+ body:
+ id: "{{ k8sRegionID }}"
+ region_id: "{{ k8sRegionID }}"
+ aic_version: 2.5
+ clli: "{{ datacenter_id }}"
+ orchestrator: multicloud
+ identity_service_id: DEFAULT_KEYSTONE
+ identityService: "{{ output.json.identityService }}"
+
+- name: Remove created cloud-site service with NodePort
+ community.kubernetes.k8s_service:
+ name: cloud-site
+ namespace: onap
+ state: absent
+
+- name: delete cnf namespace
+ community.kubernetes.k8s:
+ state: absent
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: "{{ cnf_namespace }}"
+
+- name: wait for cnf namespace full deletion
+ shell: |
+ set -o pipefail && kubectl get namespace |
+ grep -c {{ cnf_namespace }} || true
+ args:
+ executable: /bin/bash
+ register: kube
+ changed_when: kube.stdout == '0'
+ until: kube.stdout == '0'
+ retries: 600
+ delay: 1
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm-service-istanbul.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm-service-istanbul.yaml.j2
new file mode 100644
index 0000000..53ffc91
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm-service-istanbul.yaml.j2
@@ -0,0 +1,2 @@
+{% extends "basic_vm-service-master.yaml.j2" %}
+{% block heat_file_path %}onaptests/templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %} \ No newline at end of file
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm-service-jakarta.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm-service-jakarta.yaml.j2
new file mode 100644
index 0000000..d0ca6f2
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm-service-jakarta.yaml.j2
@@ -0,0 +1,40 @@
+---
+basic_vm:
+ vnfs:
+ - vnf_name: basic_vm
+ heat_files_to_upload: {% block heat_file_path %}templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %}
+
+ vnf_parameters: [
+ {"name": "ubuntu20_image_name",
+ "value": "Ubuntu_2004"
+ },
+ {"name": "ubuntu20_key_name",
+ "value": "onap_dt"
+ },
+ {"name": "ubuntu20_pub_key",
+ "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAA\
+BAQDY15cdBmIs2XOpe4EiFCsaY6bmUmK/GysMoLl4UG51JCfJwvwoWCoA+6mDIbymZxhxq9IGx\
+ilp/yTA6WQ9s/5pBag1cUMJmFuda9PjOkXl04jgqh5tR6I+GZ97AvCg93KAECis5ubSqw1xOCj4\
+utfEUtPoF1OuzqM/lE5mY4N6VKXn+fT7pCD6cifBEs6JHhVNvs5OLLp/tO8Pa3kKYQOdyS0xc3r\
+h+t2lrzvKUSWGZbX+dLiFiEpjsUL3tDqzkEMNUn4pdv69OJuzWHCxRWPfdrY9Wg0j3mJesP29EBh\
+t+w+EC9/kBKq+1VKdmsXUXAcjEvjovVL8l1BrX3BY0R8D imported-openssh-key"
+ },
+ {"name": "ubuntu20_flavor_name",
+ "value": "m1.small"
+ },
+ {"name": "VM_name",
+ "value": "ubuntu20agent-VM-01"
+ },
+ {"name": "vnf_id",
+ "value": "ubuntu20agent-VNF-instance"
+ },
+ {"name": "vf_module_id",
+ "value": "ubuntu20agent-vfmodule-instance"
+ },
+ {"name": "vnf_name",
+ "value": "ubuntu20agent-VNF"
+ },
+ {"name": "admin_plane_net_name",
+ "value": "{{ openstack_public_net_id }}"
+ }
+ ]
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm-service-master.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm-service-master.yaml.j2
new file mode 100644
index 0000000..d0ca6f2
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm-service-master.yaml.j2
@@ -0,0 +1,40 @@
+---
+basic_vm:
+ vnfs:
+ - vnf_name: basic_vm
+ heat_files_to_upload: {% block heat_file_path %}templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %}
+
+ vnf_parameters: [
+ {"name": "ubuntu20_image_name",
+ "value": "Ubuntu_2004"
+ },
+ {"name": "ubuntu20_key_name",
+ "value": "onap_dt"
+ },
+ {"name": "ubuntu20_pub_key",
+ "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAA\
+BAQDY15cdBmIs2XOpe4EiFCsaY6bmUmK/GysMoLl4UG51JCfJwvwoWCoA+6mDIbymZxhxq9IGx\
+ilp/yTA6WQ9s/5pBag1cUMJmFuda9PjOkXl04jgqh5tR6I+GZ97AvCg93KAECis5ubSqw1xOCj4\
+utfEUtPoF1OuzqM/lE5mY4N6VKXn+fT7pCD6cifBEs6JHhVNvs5OLLp/tO8Pa3kKYQOdyS0xc3r\
+h+t2lrzvKUSWGZbX+dLiFiEpjsUL3tDqzkEMNUn4pdv69OJuzWHCxRWPfdrY9Wg0j3mJesP29EBh\
+t+w+EC9/kBKq+1VKdmsXUXAcjEvjovVL8l1BrX3BY0R8D imported-openssh-key"
+ },
+ {"name": "ubuntu20_flavor_name",
+ "value": "m1.small"
+ },
+ {"name": "VM_name",
+ "value": "ubuntu20agent-VM-01"
+ },
+ {"name": "vnf_id",
+ "value": "ubuntu20agent-VNF-instance"
+ },
+ {"name": "vf_module_id",
+ "value": "ubuntu20agent-vfmodule-instance"
+ },
+ {"name": "vnf_name",
+ "value": "ubuntu20agent-VNF"
+ },
+ {"name": "admin_plane_net_name",
+ "value": "{{ openstack_public_net_id }}"
+ }
+ ]
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-istanbul.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-istanbul.yaml.j2
new file mode 100644
index 0000000..ada0289
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-istanbul.yaml.j2
@@ -0,0 +1,2 @@
+{% extends "basic_vm_macro-service-master.yaml.j2" %}
+{% block heat_file_path %}onaptests/templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %} \ No newline at end of file
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-jakarta.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-jakarta.yaml.j2
new file mode 100644
index 0000000..c35ca34
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-jakarta.yaml.j2
@@ -0,0 +1,55 @@
+---
+basic_vm_macro:
+ instantiation_type: "Macro"
+ vnfs:
+ - vnf_name: basic_vm_macro
+ properties:
+ controller_actor: "CDS"
+ skip_post_instantiation_configuration: False
+ sdnc_artifact_name: "vnf"
+ sdnc_model_version: "1.0.0"
+ sdnc_model_name: "ubuntu20"
+ vnf_artifact_type: "CONTROLLER_BLUEPRINT_ARCHIVE"
+ vnf_artifact_name: "BASIC_VM_enriched.zip"
+ vnf_artifact_label: "vnfcds"
+ vnf_artifact_file_path: "/tmp/BASIC_VM_enriched.zip"
+ heat_files_to_upload: {% block heat_file_path %}templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %}
+
+ vf_module_parameters:
+ - vf_module_name: base_ubuntu20
+ parameters: [
+ {"name": "ubuntu20_image_name",
+ "value": "Ubuntu_2004"
+ },
+ {"name": "ubuntu20_key_name",
+ "value": "onap_dt"
+ },
+ {"name": "ubuntu20_pub_key",
+ "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAA\
+ BAQDY15cdBmIs2XOpe4EiFCsaY6bmUmK/GysMoLl4UG51JCfJwvwoWCoA+6mDIbymZxhxq9IGx\
+ ilp/yTA6WQ9s/5pBag1cUMJmFuda9PjOkXl04jgqh5tR6I+GZ97AvCg93KAECis5ubSqw1xOCj4\
+ utfEUtPoF1OuzqM/lE5mY4N6VKXn+fT7pCD6cifBEs6JHhVNvs5OLLp/tO8Pa3kKYQOdyS0xc3r\
+ h+t2lrzvKUSWGZbX+dLiFiEpjsUL3tDqzkEMNUn4pdv69OJuzWHCxRWPfdrY9Wg0j3mJesP29EBh\
+ t+w+EC9/kBKq+1VKdmsXUXAcjEvjovVL8l1BrX3BY0R8D imported-openssh-key"
+ },
+ {"name": "ubuntu20_flavor_name",
+ "value": "m1.small"
+ },
+ {"name": "VM_name",
+ "value": "basic_vm_macro-VM-01"
+ },
+ {"name": "vnf_id",
+ "value": "basic_vm_macro-VNF-instance"
+ },
+ {"name": "vf_module_id",
+ "value": "basic_vm_macro-vfmodule-instance"
+ },
+ {"name": "vnf_name",
+ "value": "basic_vm_macro-VNF"
+ },
+ {"name": "admin_plane_net_name",
+ "value": "{{ openstack_public_net_id }}"
+ },
+ {"name": "ubuntu20_name_0",
+ "value": "ubuntu20agent-VNF"}
+ ]
diff --git a/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-master.yaml.j2 b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-master.yaml.j2
new file mode 100644
index 0000000..c35ca34
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/basic_vm_macro-service-master.yaml.j2
@@ -0,0 +1,55 @@
+---
+basic_vm_macro:
+ instantiation_type: "Macro"
+ vnfs:
+ - vnf_name: basic_vm_macro
+ properties:
+ controller_actor: "CDS"
+ skip_post_instantiation_configuration: False
+ sdnc_artifact_name: "vnf"
+ sdnc_model_version: "1.0.0"
+ sdnc_model_name: "ubuntu20"
+ vnf_artifact_type: "CONTROLLER_BLUEPRINT_ARCHIVE"
+ vnf_artifact_name: "BASIC_VM_enriched.zip"
+ vnf_artifact_label: "vnfcds"
+ vnf_artifact_file_path: "/tmp/BASIC_VM_enriched.zip"
+ heat_files_to_upload: {% block heat_file_path %}templates/heat-files/ubuntu20/ubuntu20agent.zip{% endblock %}
+
+ vf_module_parameters:
+ - vf_module_name: base_ubuntu20
+ parameters: [
+ {"name": "ubuntu20_image_name",
+ "value": "Ubuntu_2004"
+ },
+ {"name": "ubuntu20_key_name",
+ "value": "onap_dt"
+ },
+ {"name": "ubuntu20_pub_key",
+ "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAA\
+ BAQDY15cdBmIs2XOpe4EiFCsaY6bmUmK/GysMoLl4UG51JCfJwvwoWCoA+6mDIbymZxhxq9IGx\
+ ilp/yTA6WQ9s/5pBag1cUMJmFuda9PjOkXl04jgqh5tR6I+GZ97AvCg93KAECis5ubSqw1xOCj4\
+ utfEUtPoF1OuzqM/lE5mY4N6VKXn+fT7pCD6cifBEs6JHhVNvs5OLLp/tO8Pa3kKYQOdyS0xc3r\
+ h+t2lrzvKUSWGZbX+dLiFiEpjsUL3tDqzkEMNUn4pdv69OJuzWHCxRWPfdrY9Wg0j3mJesP29EBh\
+ t+w+EC9/kBKq+1VKdmsXUXAcjEvjovVL8l1BrX3BY0R8D imported-openssh-key"
+ },
+ {"name": "ubuntu20_flavor_name",
+ "value": "m1.small"
+ },
+ {"name": "VM_name",
+ "value": "basic_vm_macro-VM-01"
+ },
+ {"name": "vnf_id",
+ "value": "basic_vm_macro-VNF-instance"
+ },
+ {"name": "vf_module_id",
+ "value": "basic_vm_macro-vfmodule-instance"
+ },
+ {"name": "vnf_name",
+ "value": "basic_vm_macro-VNF"
+ },
+ {"name": "admin_plane_net_name",
+ "value": "{{ openstack_public_net_id }}"
+ },
+ {"name": "ubuntu20_name_0",
+ "value": "ubuntu20agent-VNF"}
+ ]
diff --git a/roles/xtesting-onap-vnf/templates/clearwater-ims-service.yaml.j2 b/roles/xtesting-onap-vnf/templates/clearwater-ims-service.yaml.j2
new file mode 100644
index 0000000..bf950a7
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/clearwater-ims-service.yaml.j2
@@ -0,0 +1,55 @@
+---
+ims:
+ tosca_file_from_SDC: service-Ims-template
+ version: "1.0"
+ subscription_type: "ims"
+ vnfs:
+ - vnf_name: Clearwater_vnf
+ heat_files_to_upload: onaptests/templates/heat-files/clearwater_ims/clearwater_ims.zip
+ vnf_parameters: [
+ {"vnf-parameter-name":"public_net_id",
+ "vnf-parameter-value":"{{ openstack_public_net_id }}"},
+ {"vnf-parameter-name":"bono_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"bono_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"dime_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"dime_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"dns_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"dns_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"ellis_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"ellis_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"homer_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"homer_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"robot_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"robot_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"sprout_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"sprout_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"vellum_image_name",
+ "vnf-parameter-value":"Ubuntu_1404"},
+ {"vnf-parameter-name":"vellum_flavor_name",
+ "vnf-parameter-value":"m1.medium"},
+ {"vnf-parameter-name":"dns_ip",
+ "vnf-parameter-value":"8.8.8.8"},
+ {"vnf-parameter-name": "clearwater_pub_key",
+ "vnf-parameter-value":"ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAr0If\
+62QHgf/xKzomkwBD9c1ol7edWpyG5+p9UBRE0D/bJcA5lyRpaYlcjxp3pfnN+WiVYfzjwHDjeDlyAO\
+pH2o3yrBCA9U+sU6PjhIH/BXFVkVQBY4xAmtjQnN3QCzjn8BA6PbaEt53OvvaYqtgg0yc5OOA0nyDl\
+cg/FU88I1MnhZvjTU90V4QEYKCMAyYcz6NdjGfC7PmpKIVmSWgHOdR59d5CGIRv6BnIWIBqXy+z+el\
+dbSKBrRVo/dv/H8Q0uwS6/rA2gLXeXhsQmEFZXsxwovypWm6t5hWMK/4cXA88AwylMd9xSroic398R\
+ZiO3QygZ9L7aQN2rnuAQHHOOaw=="
+ },
+ {"vnf-parameter-name":"clearwater_key_name",
+ "vnf-parameter-value":"onap_key"}]
diff --git a/roles/xtesting-onap-vnf/templates/env-os.j2 b/roles/xtesting-onap-vnf/templates/env-os.j2
new file mode 100644
index 0000000..5bbcdd4
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/env-os.j2
@@ -0,0 +1,14 @@
+INSTALLER_TYPE={{ deployment_name }}
+TEST_DB_URL={{ test_result_url }}
+NODE_NAME={{ node_name }}
+BUILD_TAG={{ build_tag }}
+DEBUG=False
+OS_TEST_CLOUD={{ os_test_cloud }}
+ONAP_PYTHON_SDK_SETTINGS={{ vnf_settings }}
+{% if use_s3 | bool %}
+S3_ENDPOINT_URL={{ s3_endpoint }}
+S3_DST_URL={{ s3_dst }}
+HTTP_DST_URL={{ s3_http_url_endpoint }}
+AWS_ACCESS_KEY_ID={{ s3_access_key }}
+AWS_SECRET_ACCESS_KEY={{ s3_secret_key }}
+{% endif %}
diff --git a/roles/xtesting-onap-vnf/templates/settings.py.j2 b/roles/xtesting-onap-vnf/templates/settings.py.j2
new file mode 100644
index 0000000..c5eaba3
--- /dev/null
+++ b/roles/xtesting-onap-vnf/templates/settings.py.j2
@@ -0,0 +1,63 @@
+"""Specific settings module.""" # pylint: disable=bad-whitespace
+
+######################
+# #
+# ONAP INPUTS DATAS #
+# #
+######################
+
+
+# Variables to set logger information
+# Possible values for logging levels in onapsdk: INFO, DEBUG , WARNING, ERROR
+LOG_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "default": {
+ "class": "logging.Formatter",
+ "format": "%(asctime)s %(levelname)s %(lineno)d:%(filename)s(%(process)d) - %(message)s"
+ }
+ },
+ "handlers": {
+ "console": {
+ "level": "INFO",
+ "class": "logging.StreamHandler",
+ "formatter": "default"
+ },
+ "file": {
+ "level": "DEBUG",
+ "class": "logging.FileHandler",
+ "formatter": "default",
+ "filename": "/var/lib/xtesting/results/{{ run_type }}/pythonsdk.debug.log",
+ "mode": "w"
+ }
+ },
+ "root": {
+ "level": "DEBUG",
+ "handlers": ["console", "file"]
+ }
+}
+CLEANUP_FLAG = False
+
+# SOCK_HTTP = "socks5h://127.0.0.1:8080"
+REPORTING_FILE_PATH = "/var/lib/xtesting/results/{{ run_type }}/reporting.html"
+K8S_REGION_TYPE = "k8s"
+TILLER_HOST = "localhost"
+K8S_CONFIG = None # None means it will use default config (~/.kube/config)
+K8S_NAMESPACE = "onap" # Kubernetes namespace
+K8S_ONAP_NAMESPACE = "onap" # ONAP Kubernetes namespace
+K8S_ADDITIONAL_RESOURCES_NAMESPACE = K8S_ONAP_NAMESPACE # Resources created on tests namespac
+ORCHESTRATION_REQUEST_TIMEOUT = 60.0 * 30 # 30 minutes in seconds
+{% if use_ingress | bool %}
+AAI_URL = "https://aai-api.simpledemo.onap.org"
+CDS_URL = "https://cds-blueprintsprocessor-api.simpledemo.onap.org"
+MSB_URL = "https://msb-iag-ui.simpledemo.onap.org"
+SDC_BE_URL = "https://sdc-be-api.simpledemo.onap.org"
+SDC_FE_URL = "https://sdc-fe-ui.simpledemo.onap.org"
+SDNC_URL = "https://sdnc-api.simpledemo.onap.org"
+SO_URL = "https://so-api.simpledemo.onap.org"
+CLAMP_URL = "https://policy-ui.simpledemo.onap.org"
+VES_URL = "https://dcae-ves-collector-api.simpledemo.onap.org"
+DMAAP_URL = "https://dmaap-mr-api.simpledemo.onap.org"
+{% endif %}
+
diff --git a/roles/xtesting-pages/tasks/main.yaml b/roles/xtesting-pages/tasks/main.yaml
new file mode 100644
index 0000000..e9fe0d0
--- /dev/null
+++ b/roles/xtesting-pages/tasks/main.yaml
@@ -0,0 +1,69 @@
+---
+- name: install pip dependencies
+ become: "yes"
+ ansible.builtin.pip:
+ name: "{{ item }}"
+ state: latest
+ loop: "{{ pip_packages_pages }}"
+
+- name: Copy template dir to Jumphost to generate pages
+ ansible.builtin.copy:
+ src: "{{ doc_path }}/template"
+ dest: "{{ doc_dir_target }}/{{ doc_path }}"
+
+- name: Copy dashboard page generator to Jumphost
+ ansible.builtin.copy:
+ src: "{{ doc_path }}/generate_status.py"
+ dest: "{{ doc_dir_target }}/{{ doc_path }}"
+
+- name: "[LEGACY] Generate pages"
+ shell: "python3 generate_status.py -p {{ node_name }} -d {{ test_result_url }} -t {{ build_tag }} -m legacy"
+ args:
+ chdir: "{{ doc_dir_target }}/{{ doc_path }}"
+ when: not use_s3 | bool
+
+- name: "[S3] Generate pages"
+ shell: "python3 generate_status.py -p {{ node_name }} -d {{ test_result_url }} -t {{ build_tag }} -m s3"
+ args:
+ chdir: "{{ doc_dir_target }}/{{ doc_path }}"
+ when: use_s3 | bool
+ register: page_generation
+
+- name: Manage additional status pages
+ block:
+ - name: Copy stability page generator to Jumphost
+ ansible.builtin.copy:
+ src: "{{ doc_path }}/generate_stability_graphs.py"
+ dest: "{{ doc_dir_target }}/{{ doc_path }}"
+
+ - name: Copy docker version page generator to Jumphost
+ ansible.builtin.copy:
+ src: "{{ doc_path }}/generate_docker_version.py"
+ dest: "{{ doc_dir_target }}/{{ doc_path }}"
+
+ - name: "Generate stability page"
+ shell: "python3 generate_stability_graphs.py -v {{ onap_version }}"
+ args:
+ chdir: "{{ doc_dir_target }}/{{ doc_path }}"
+
+ - name: "Generate docker version page"
+ shell: "python3 generate_docker_version.py"
+ args:
+ chdir: "{{ doc_dir_target }}/{{ doc_path }}"
+ when: '"daily" in pod'
+ ignore_errors: yes
+
+- name: Fetch pages
+ ansible.posix.synchronize:
+ src: "{{ doc_dir_target }}/{{ doc_path }}/*"
+ dest: "{{ doc_path }}"
+ use_ssh_args: true
+ mode: pull
+ recursive: no
+ rsync_opts:
+ - "--exclude=*.py"
+
+- name: Delete remote doc dir
+ ansible.builtin.file:
+ path: "{{ doc_dir_target }}/{{ doc_path }}"
+ state: absent
diff --git a/scripts/.netrc b/scripts/.netrc
new file mode 100644
index 0000000..64b5c65
--- /dev/null
+++ b/scripts/.netrc
@@ -0,0 +1,3 @@
+machine nexus.onap.org
+login onap-integration
+password LF_IT_NEXUS_PWD
diff --git a/scripts/output_summary.sh b/scripts/output_summary.sh
new file mode 100755
index 0000000..d67f2aa
--- /dev/null
+++ b/scripts/output_summary.sh
@@ -0,0 +1,161 @@
+#!/bin/bash
+
+echo '_____________________________ Results ______________________'
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '************ Infrastructure-healthcheck Results ************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "./public/$1/infrastructure-healthcheck/k8s/kubernetes-status/onap-k8s.log" ]; then
+ echo '--------> onap-k8s'
+ grep '>>>' "./public/$1/infrastructure-healthcheck/k8s/kubernetes-status/onap-k8s.log" | tr ',' '\n' | sed 's/>>>/ */; s/\[\([^]]\)/[\'$'\n - \\1/; s/^[ ]\([^\* ]\)/ - \\1/'
+else
+ echo '--------> onap-k8s NOT executed'
+fi
+if [ -f "./public/$1/infrastructure-healthcheck/k8s/onap-helm/onap-helm.log" ]; then
+ echo '--------> onap-helm'
+ grep '>>>' "./public/$1/infrastructure-healthcheck/k8s/onap-helm/onap-helm.log" | tr ',' '\n' | sed 's/>>>/ */; s/\[\([^]]\)/[\'$'\n - \\1/; s/^[ ]\([^\* ]\)/ - \\1/'
+else
+ echo '--------> onap-helm NOT executed'
+fi
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '********************* Healthcheck Results ******************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "./public/$1/xtesting-healthcheck/full/xtesting.log" ]; then
+ echo '--------> robot full healthcheck tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-healthcheck/full/xtesting.log"
+else
+ echo '--------> robot full healthcheck tests NOT executed'
+fi
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '********************* Basic tests Results ******************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "./public/$1/xtesting-healthcheck/healthdist/xtesting.log" ]; then
+ echo '--------> healthdist (vFW onboarding and distribution)'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-healthcheck/healthdist/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> healthdist tests NOT executed'
+fi
+if [ -f "./public/$1/xtesting-healthcheck/postinstall/xtesting.log" ]; then
+ echo '--------> postinstall tests (dmaap and A&AI)'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-healthcheck/postinstall/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> postinstall tests NOT executed'
+fi
+if [ -f "./public/$1/xtesting-smoke-usecases-robot/cmpv2/xtesting.log" ]; then
+ echo '--------> CMPv2 tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-smoke-usecases-robot/cmpv2/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> CMPv2 tests NOT executed'
+fi
+if [ -f "./public/$1/xtesting-smoke-usecases-robot/dcaemod/xtesting.log" ]; then
+ echo '--------> DCAEMOD tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-smoke-usecases-robot/dcaemod/xtesting.log" | grep '|'
+else
+ echo '--------> DCAEMOD tests NOT executed'
+fi
+if [ -f "./public/$1/xtesting-smoke-usecases-robot/hv-ves/xtesting.log" ]; then
+ echo '--------> HV-VES tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-smoke-usecases-robot/hv-ves/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> HV-VES tests NOT executed'
+fi
+if [ -f "./public/$1/xtesting-smoke-usecases-robot/ves-collector/xtesting.log" ]; then
+ echo '--------> VES collector tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-smoke-usecases-robot/ves-collector/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> VES collector tests NOT executed'
+fi
+if [ -f "./public/$1/smoke-usecases/basic_onboard/xtesting.log" ]; then
+ echo "--------> Basic Onboard tests (SDC)"
+ NORMAL_RUN=$(grep -A2 RESULT "./public/$1/smoke-usecases/basic_onboard/xtesting.log" | grep basic_onboard | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "./public/$1/smoke-usecases/basic_onboard/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "basic_onboard: $RESULT"
+ echo "basic_onboard hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+
+else
+ echo "--------> Basic onboard tests NOT executed"
+fi
+if [ -f "./public/$1/smoke-usecases/basic_cds/xtesting.log" ]; then
+ echo "--------> CDS tests"
+ NORMAL_RUN=$(grep -A2 RESULT "./public/$1/smoke-usecases/basic_cds/xtesting.log" | grep basic_cds | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "./public/$1/smoke-usecases/basic_cds/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "basic_cds: $RESULT"
+ echo "basic_cds hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+else
+ echo "--------> CDS tests NOT executed"
+fi
+echo ''
+
+echo '************************************************************'
+echo '************************************************************'
+echo '******************** End to End usecases *******************'
+echo '************************************************************'
+echo '************************************************************'
+for test in pnf-registrate 5gbulkpm;do
+ if [ -f "./public/$1/xtesting-smoke-usecases-robot/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "./public/$1/xtesting-smoke-usecases-robot/$test/xtesting.log" | grep '::' | grep '|'
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+
+for test in basic_vm basic_network basic_cnf basic_vm_macro basic_clamp pnf_macro cds_resource_resolution basic_cnf_macro;do
+ if [ -f "./public/$1/smoke-usecases/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ NORMAL_RUN=$(grep -A2 RESULT "./public/$1/smoke-usecases/$test/xtesting.log" |grep $test | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "./public/$1/smoke-usecases/$test/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "$test: $RESULT"
+ echo "$test hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+echo ''
+
+echo '************************************************************'
+echo '************************************************************'
+echo '********************** Security tests **********************'
+echo '************************************************************'
+echo '************************************************************'
+for test in nonssl_endpoints jdpw_ports kube_hunter root_pods unlimitted_pods;do
+ if [ -f "./public/$1/security/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ NORMAL_RUN=$(grep -A2 RESULT "./public/$1/security/$test/xtesting.log" |grep $test | grep -v -E -- 'DEBUG|INFO|ERROR' | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "./public/$1/security/$test/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "$test: $RESULT"
+ echo "$test hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+echo ''
+echo '____________________________________________________________'
diff --git a/scripts/output_summary_s3.sh b/scripts/output_summary_s3.sh
new file mode 100755
index 0000000..411216c
--- /dev/null
+++ b/scripts/output_summary_s3.sh
@@ -0,0 +1,162 @@
+#!/bin/bash
+
+echo '_____________________________ Results ______________________'
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '************ Infrastructure-healthcheck Results ************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "$1/k8s/k8s/kubernetes-status/onap-k8s.log" ]; then
+ echo '--------> onap-k8s'
+ grep '>>>' "$1/k8s/k8s/kubernetes-status/onap-k8s.log" | tr ',' '\n' | sed 's/>>>/ */; s/\[\([^]]\)/[\'$'\n - \\1/; s/^[ ]\([^\* ]\)/ - \\1/'
+else
+ echo '--------> onap-k8s NOT executed'
+fi
+if [ -f "$1/k8s/k8s/onap-helm/onap-helm.log" ]; then
+echo '--------> onap-helm'
+grep '>>>' "$1/k8s/k8s/onap-helm/onap-helm.log" | tr ',' '\n' | sed 's/>>>/ */; s/\[\([^]]\)/[\'$'\n - \\1/; s/^[ ]\([^\* ]\)/ - \\1/'
+else
+ echo '--------> onap-helm NOT executed'
+fi
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '********************* Healthcheck Results ******************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "$1/full/xtesting.log" ]; then
+ echo '--------> robot full healthcheck tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/full/xtesting.log"
+else
+ echo '--------> robot full healthcheck tests NOT executed'
+fi
+echo ''
+echo '************************************************************'
+echo '************************************************************'
+echo '********************* Basic tests Results ******************'
+echo '************************************************************'
+echo '************************************************************'
+if [ -f "$1/healthdist/xtesting.log" ]; then
+ echo '--------> healthdist (vFW onboarding and distribution)'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/healthdist/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> healthdist tests NOT executed'
+fi
+if [ -f "$1/postinstall/xtesting.log" ]; then
+ echo '--------> postinstall tests (dmaap and A&AI)'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/postinstall/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> postinstall tests NOT executed'
+fi
+if [ -f "$1/cmpv2/xtesting.log" ]; then
+ echo '--------> CMPv2 tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/cmpv2/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> CMPv2 tests NOT executed'
+fi
+if [ -f "$1/dcaemod/xtesting.log" ]; then
+ echo '--------> DCAEMOD tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/dcaemod/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> DCAEMOD tests NOT executed'
+fi
+if [ -f "$1/hv-ves/xtesting.log" ]; then
+ echo '--------> HV-VES tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/hv-ves/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> HV-VES tests NOT executed'
+fi
+if [ -f "$1/ves-collector/xtesting.log" ]; then
+ echo '--------> VES collector tests'
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/ves-collector/xtesting.log" | grep '::' | grep '|'
+else
+ echo '--------> VES collector tests NOT executed'
+fi
+if [ -f "$1/basic_onboard/xtesting.log" ]; then
+ echo "--------> Basic Onboard tests (SDC)"
+ NORMAL_RUN=$(grep -A2 RESULT "$1/basic_onboard/xtesting.log" | grep basic_onboard | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "$1/basic_onboard/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "basic_onboard: $RESULT"
+ echo "basic_onboard hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+
+else
+ echo "--------> Basic onboard tests NOT executed"
+fi
+if [ -f "$1/basic_cds/xtesting.log" ]; then
+ echo "--------> CDS tests"
+ NORMAL_RUN=$(grep -A2 RESULT "$1/basic_cds/xtesting.log" | grep basic_cds | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "$1/basic_cds/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "basic_cds: $RESULT"
+ echo "basic_cds hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+
+else
+ echo "--------> CDS tests NOT executed"
+fi
+echo ''
+
+echo '************************************************************'
+echo '************************************************************'
+echo '******************** End to End usecases *******************'
+echo '************************************************************'
+echo '************************************************************'
+for test in pnf-registrate 5gbulkpm;do
+ if [ -f "$1/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ sed -n '/xtesting.core.robotframework - INFO - $/,/Output/p' "$1/$test/xtesting.log" | grep '::' | grep '|'
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+
+for test in basic_vm basic_network basic_cnf basic_vm_macro basic_clamp pnf_macro cds_resource_resolution basic_cnf_macro;do
+ if [ -f "$1/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ NORMAL_RUN=$(grep -A2 RESULT "$1/$test/xtesting.log" |grep $test | grep -v ERROR | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "$1/$test/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "$test: $RESULT"
+ echo "$test hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+echo ''
+
+echo '************************************************************'
+echo '************************************************************'
+echo '********************** Security tests **********************'
+echo '************************************************************'
+echo '************************************************************'
+for test in nonssl_endpoints jdpw_ports kube_hunter root_pods unlimitted_pods;do
+ if [ -f "$1/$test/xtesting.log" ]; then
+ echo "--------> $test tests"
+ NORMAL_RUN=$(grep -A2 RESULT "$1/$test/xtesting.log" |grep $test | grep -v -E -- 'DEBUG|INFO|ERROR' | awk {'print $2 ": " $8 " (" $6 ")"'})
+ if [ -z "$NORMAL_RUN" ]
+ then
+ RESULT=$(tail -n 1 "$1/$test/xtesting.log" | cut -d'-' -f6 | cut -d':' -f 2)
+ echo "$test: $RESULT"
+ echo "$test hasn't finished well, check logs"
+ else
+ echo "$NORMAL_RUN"
+ fi
+ else
+ echo "--------> $test tests NOT executed"
+ fi
+done
+echo ''
+echo '____________________________________________________________'
diff --git a/scripts/push_results_to_lf.sh b/scripts/push_results_to_lf.sh
new file mode 100755
index 0000000..5a0cbe7
--- /dev/null
+++ b/scripts/push_results_to_lf.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+# backup results to LF server
+
+NEXUS_URL=https://nexus.onap.org
+SILO=onap-integration
+ARCHIVES_DIR=/tmp
+
+# We need minio client in order to retrieve files when on S3 mode
+if [ -z "${S3_ENDPOINT_URL}" ]
+then
+ echo "S3 vars are not set, not installing mc"
+else
+ wget https://dl.min.io/client/mc/release/linux-amd64/mc
+ chmod +x mc
+ ./mc alias set s3 "${S3_ENDPOINT_URL}" "${S3_ACCESS_KEY}" "${S3_SECRET_KEY}"
+fi
+
+# We need lftools to push results tyo LF
+pip install lftools
+
+# create .netrc
+# netrc contains credentials to push artifacts to LF
+# the password is defined as a gitlab-ci variable
+if [ ! -f ~/.netrc ]; then
+ # If .netrc does not exist create one from the template
+ cp scripts/.netrc ~
+else
+ # if one already exists, save this config in /tmp
+ # and replace it by the template
+ mv ~/.netrc /tmp
+ cp scripts/.netrc ~
+fi
+sed -i 's/LF_IT_NEXUS_PWD/'$LF_RESULTS_BACKUP'/g' ~/.netrc
+chmod 600 ~/.netrc
+
+# prepare the archives
+echo "Prepare the archive for $pod"
+if [ -z "$GERRIT_REVIEW" ]
+then
+ if [[ $1 == *"weekly"* ]]
+ then
+ FREQUENCY="weekly"
+ else
+ FREQUENCY="daily"
+ fi
+ if [ -z "${CI_PIPELINE_CREATED_AT}" ]
+ then
+ NEXUS_PATH="${SILO}/$FREQUENCY/$pod/$(date +'%Y-%m')/$(date +'%d_%H-%M')"
+ else
+ NEXUS_PATH="${SILO}/$FREQUENCY/$pod/$(date -d${CI_PIPELINE_CREATED_AT} +'%Y-%m')/$(date -d${CI_PIPELINE_CREATED_AT} +'%d_%H-%M')"
+ fi
+else
+ if [ -z "$EXPERIMENTAL" ]
+ then
+ NEXUS_PATH="${SILO}/gating/$GERRIT_REVIEW-$GERRIT_PATCHSET"
+ else
+ NEXUS_PATH="${SILO}/experimental-gating/$GERRIT_REVIEW-$GERRIT_PATCHSET"
+ fi
+fi
+mkdir -p $ARCHIVES_DIR/archives
+
+if [ -z "${CI_PIPELINE_ID}" ]
+then
+ CI_PIPELINE_ID="64"
+fi
+
+if [ -z "${S3_ENDPOINT_URL}" ]
+then
+ echo "*** non S3 mode, use legacy method ***"
+ cp -rf $1/* $ARCHIVES_DIR/archives
+else
+ echo "*** S3 mode ***"
+ if [ -z "$GERRIT_REVIEW" ]
+ then
+ echo "** non gating result"
+ if [ -z "${CI_PIPELINE_CREATED_AT}" ]
+ then
+ DATE=$(date "+%Y-%m-%d")
+ else
+ DATE=$(date -d${CI_PIPELINE_CREATED_AT} "+%Y-%m-%d")
+ fi
+ IDENTIFIER="${pod}/${DATE}-${CI_PIPELINE_ID}"
+ if [ -z "$FREQUENCY" ]
+ then
+ TEST_TYPE="daily"
+ else
+ TEST_TYPE="${FREQUENCY}"
+ fi
+ else
+ echo "** gating result"
+ TEST_TYPE="gating"
+ IDENTIFIER="${GERRIT_REVIEW}-${GERRIT_PATCHSET}-${CI_PIPELINE_ID}"
+ fi
+ if [ -z "$EXPERIMENTAL" ]
+ then
+ echo "* not an experimental test"
+ else
+ echo "* experimental test"
+ TEST_TYPE="${TEST_TYPE}-experimental"
+ fi
+
+ S3_PATH="s3/onap/${TEST_TYPE}/${IDENTIFIER}/"
+ cp -rf $1/index.html $ARCHIVES_DIR/archives/index.html
+ ./mc cp --recursive "${S3_PATH}" $ARCHIVES_DIR/archives
+ scripts/output_summary_s3.sh $ARCHIVES_DIR/archives
+fi
+
+# Push results to LF nexus
+echo " call lftools"
+lftools deploy archives $NEXUS_URL $NEXUS_PATH $ARCHIVES_DIR
+echo "Results uploaded to $NEXUS_URL/content/sites/logs/$NEXUS_PATH"
+
+# clean
+rm -Rf $ARCHIVES_TMP_DIR
+# restore old .netrc
+if [ -f ~/tmp/.netrc ]; then
+ mv ~/tmp/.netrc ~/.netrc
+fi
diff --git a/scripts/run_chaos_tests.sh b/scripts/run_chaos_tests.sh
new file mode 100755
index 0000000..f57b97e
--- /dev/null
+++ b/scripts/run_chaos_tests.sh
@@ -0,0 +1,116 @@
+#!/usr/bin/env bash
+#
+# Copyright Oranges (c) 2021 All rights reserved
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+# This script will do the following:
+# - execute a list of chaos tests
+# - an aggregation of the results in an html page
+
+#set -euxo pipefail
+
+CHAOS_TESTS_LOCATION=${STABILITY_TESTS_LOCATION:-/tmp/resiliency}
+ARCHIVES_LOCATION=${ARCHIVES_LOCATION:-/tmp/resiliency/}
+CHAOS_DRAIN_SDC=$ARCHIVES_LOCATION/archives/chaos/chaos-drain
+
+prepare_chaos_tests() {
+ # current release has bug with image name parsing, need to install
+ # from source until release (end of March 2021)
+ local chaos_tests_location=$1
+ mkdir -p $CHAOS_DRAIN_SDC
+ cd $chaos_tests_location
+ python3 -m venv resiliency_tests_env
+ cd resiliency_tests_env
+ . bin/activate
+ pip install pip --upgrade
+ pip install git+https://gitlab.com/Orange-OpenSource/lfn/onap/integration/onaptests_chaos.git
+ cd $chaos_tests_location
+}
+
+launch_chaos_tests() {
+ local chaos_tests_location=$1
+ cd $chaos_tests_location
+
+ # the goal of this script is to run sequentially
+ # the selected resiliency tests
+
+ TEST_DIR=$(pwd)
+ NB_RETRY_MAX=10
+ TARGET_NODE="compute01-onap-master"
+
+ for test in node-cpu-hog node-memory-hog node-drain pod-delete-aai
+ do
+ echo "Setup $test RBAC"
+ rbac_file=$test"-rbac.yaml"
+ kubectl apply -f $TEST_DIR/$test/$rbac_file
+ echo "launch chaos for $test"
+ if [ $test = "node-drain" ]
+ then
+ kubectl cordon $TARGET_NODE
+ fi
+
+ chaos_file=$test"-chaos.yaml"
+ kubectl apply -f $TEST_DIR/$test/$chaos_file
+
+ # check the chaos is Completed
+ echo "Wait for chaos completion"
+ check_status=1
+ nb_retry=0
+ while [ $nb_retry -lt 10 ] && [ $check_status -gt 0 ]
+ do
+ kubectl get chaosengine -n onap $test | grep Completed
+ check_status=$?
+ let "nb_retry++"
+ sleep 30
+ done
+ echo "Chaos $test completed"
+ done
+
+ if [ $test = "node-drain" ]
+ then
+ kubectl uncordon $TARGET_NODE
+ fi
+
+ sleep 120
+
+ # get the results, wait for the result of the last test to be Completed
+ # we expect that the previous ones are completed
+ check_status=1
+ nb_retry=0
+ while [ $nb_retry -lt 10 ] && [ $check_status -gt 0 ]
+ do
+ kubectl describe chaosengine -n onap node-drain | grep Completed
+ check_status=$?
+ let "nb_retry++"
+ sleep 30
+ echo "Test still running...."
+ done
+
+ # we collect all the chaosresults in json files
+ for result in $(kubectl get chaosresult -n onap |awk {'print $1'} | grep -v NAME)
+ do
+ result_file=$result".json"
+ kubectl get chaosresult -n onap $result -o json > $result_file
+ done
+
+ # Cleanup chaos resources
+ kubectl delete chaosengine -n onap --all
+ kubectl delete chaosresult -n onap --all
+}
+
+generate_html_page() {
+ echo "Generate html page"
+ generate_chaos_reporting -r /tmp/resiliency/reporting_chaos.html
+}
+
+echo "Prepare chaos tests"
+prepare_chaos_tests $CHAOS_TESTS_LOCATION
+
+launch_chaos_tests $CHAOS_TESTS_LOCATION
+
+generate_html_page
diff --git a/scripts/run_stability_tests.sh b/scripts/run_stability_tests.sh
new file mode 100755
index 0000000..e69a56e
--- /dev/null
+++ b/scripts/run_stability_tests.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+#
+# Copyright Oranges (c) 2021 All rights reserved
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+# This script will do the following:
+# 1. prepare the benchmark env
+# 2. clone onaptests_bench
+# 3. run 2 tests:
+# - 5 // onboarding during 24h (basic_onboard)
+# - 10 // instantiation using a_la_carte bpmn during 24h (basic_vm)
+# 4. push results via lftools taking ARCHIVES_LOCATION as argument.
+# Requires CI_PIPELINE_CREATED_AT, $POD, $LF_RESULTS_BACKUP vars to be set
+#
+# Dependencies:
+# - python3-venv
+# - libssl-dev
+# - onaptests_bench
+
+set -euxo pipefail
+
+STABILITY_TESTS_LOCATION=${STABILITY_TESTS_LOCATION:-"$HOME"}
+ARCHIVES_LOCATION=${ARCHIVES_LOCATION:-/tmp/stability/}
+RESULTS_STABILITY_SDC=$ARCHIVES_LOCATION/archives/stability/results_sdc_5_24h/
+RESULTS_STABILITY_INSTANTIATION=$ARCHIVES_LOCATION/archives/stability/results_instantiation_10_24h/
+
+prepare_stability_tests() {
+ # current release has bug with image name parsing, need to install
+ # from source until release (end of March 2021)
+ local stability_tests_location=$1
+ mkdir -p $RESULTS_STABILITY_SDC
+ mkdir -p $RESULTS_STABILITY_INSTANTIATION
+ echo $CI_PIPELINE_CREATED_AT
+
+ cd /tmp
+ echo "Create virtualenv to launch stability tests"
+ python3 -m venv stability_tests_env
+ cd stability_tests_env
+ . bin/activate
+ echo "Install onaptests_bench as a python module"
+ export CRYPTOGRAPHY_DONT_BUILD_RUST=1
+ pip install pip --upgrade
+ pip install --no-cache-dir git+https://gitlab.com/Orange-OpenSource/lfn/onap/integration/onaptests_bench.git
+}
+
+launch_stability_tests() {
+ local stability_tests_location=$1
+ cd /tmp/stability_tests_env
+ . bin/activate
+ # Tests are launched sequentially
+ echo "===========> Launch Instantiation stability test"
+ run_stability_tests -t basic_vm -s 10 -d 1440 -r $RESULTS_STABILITY_INSTANTIATION
+ echo "===========> Launch SDC stability test"
+ run_stability_tests -t basic_onboard -s 5 -d 1440 -r $RESULTS_STABILITY_SDC
+}
+
+push_results() {
+ local archives_location=$1
+ local nexus_url="https://nexus.onap.org"
+ local nexus_path="onap-integration/weekly/$POD/$(date -d${CI_PIPELINE_CREATED_AT} +'%Y-%m')/$(date -d${CI_PIPELINE_CREATED_AT} +'%d_%H-%M')"
+ sudo chown -Rf debian:debian $ARCHIVES_LOCATION
+ echo "===========> Send Result to LF Backend"
+ echo "nexus url:"$nexus_url
+ echo "nexus_path"$nexus_path
+ lftools deploy archives $nexus_url $nexus_path $archives_location
+}
+
+echo "Prepare stability tests"
+prepare_stability_tests $STABILITY_TESTS_LOCATION
+
+launch_stability_tests $STABILITY_TESTS_LOCATION
+
+echo "push results to LF backend.."
+push_results ${ARCHIVES_LOCATION}
+
+# Once the stability tests results have been pushed to LF, we can
+# - sync the results of the tests checking the versions
+# - start the resiliency tests
+
+# push the versions if results exist
+if [ -f /dockerdata-nfs/onap/integration/security/versions/versions_reporting.html ]; then
+ mkdir -p /tmp/versions/archives/security/versions/
+ cp /dockerdata-nfs/onap/integration/security/versions/versions_reporting.html /tmp/versions/archives/security/versions/versions.html
+ push_results /tmp/versions
+fi
+
+# execute the resiliency tests then push the results to LF backend
+cd /tmp/resiliency
+./run_chaos_tests.sh
+if [ -f /tmp/resiliency/reporting_chaos.html ]; then
+ mkdir -p /tmp/resiliency/archives/resiliency
+ cp /tmp/resiliency/reporting_chaos.html /tmp/resiliency/archives/resiliency/reporting_chaos.html
+ push_results /tmp/resiliency
+fi
diff --git a/scripts/run_tern.sh b/scripts/run_tern.sh
new file mode 100755
index 0000000..d6fb3ce
--- /dev/null
+++ b/scripts/run_tern.sh
@@ -0,0 +1,184 @@
+#!/usr/bin/env bash
+#
+# Copyright Samsung Electronics (c) 2021 All rights reserved
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+# This script will do the following:
+# 1. install tern in $TERN_LOCATION/ternenv (defaults to $HOME)
+# 2. query kubectl for all images from $K8NAMESPACE
+# 3. run tern analysis on each image while generating $HTML_REPORT in
+# current directory & placing results in $OUT directory. Each report
+# and log will have the image name with '/' substituted to '_'.
+# 4. push results via lftools taking ARCHIVES_LOCATION as argument.
+# Requires CI_PIPELINE_CREATED_AT, $POD, $LF_RESULTS_BACKUP vars to be set
+#
+# Dependencies:
+# - fuse-overlayfs
+# - attr
+# - python3-venv
+# - jq
+# - lftools (python package)
+
+set -euxo pipefail
+
+TERN_LOCATION=${TERN_LOCATION:-"$HOME"}
+OUT=${OUT:-tern}
+HTML_REPORT=${HTML_REPORT:-index.html}
+K8NAMESPACE=${K8NAMESPACE:-onap}
+ARCHIVES_LOCATION=${ARCHIVES_LOCATION:-/tmp/tern}
+
+install_tern() {
+ # current release has bug with image name parsing, need to install
+ # from source until release (end of March 2021)
+ local tern_location=$1
+ local initial_dir=$(pwd)
+ cd $tern_location
+ python3 -m venv ternenv
+ cd ternenv
+ . bin/activate
+ git clone https://github.com/tern-tools/tern --branch main || true
+ cd tern
+ git pull origin main
+ git checkout 52fd8f3ee915c0c637d82dbeb0856219780688c7
+ python3 -m pip install wheel
+ python3 -m pip install .
+ cd $initial_dir
+ echo "===========> Tern installed"
+}
+
+init_tern() {
+ local tern_location=$1
+ local initial_dir=$(pwd)
+ cd $tern_location
+ cd ternenv
+ . bin/activate
+ cd $initial_dir
+}
+
+
+print_head() {
+ local html_report=$1
+
+ echo '<!DOCTYPE html>
+ <html lang="en">
+ <head>
+ <meta charset="utf-8">
+ <title>ONAP Tern analysis</title>
+ </head>
+ <body>
+ <table>
+ <caption>Results</caption>
+ <thead>
+ <tr>
+ <th>Image</th>
+ <th>Version</th>
+ <th>Report</th>
+ <th>Log</th>
+ <th>Pkgs with GPLv3</th>
+ <th>Pkgs with undefined lic</th>
+ <th>Notes</th>
+ </tr>
+ </thead>
+ <tbody>' >> $html_report
+}
+
+print_tail() {
+ local html_report=$1
+
+ echo ' </tbody>
+ </table>
+ </body>
+ </html>' >> $html_report
+}
+
+print_image() {
+ local html_report=$1
+ local full_img_name=$2
+ local report=$3
+ local log=$4
+
+
+ local pkglicenses=""
+ local gplv3pkgs=""
+ local licnotfound=""
+ local notes=""
+
+ local img=${2%:*}
+ local ver=${2##*:}
+
+ if [[ -s "$report" ]]
+ then
+ pkglicenses=$(jq '.images | .[].image.layers | .[]?.packages | .[] | "\(.name) \(.pkg_licenses) \(.pkg_license)"' ${report}) || true
+ gplv3pkgs=$(echo "${pkglicenses}" |grep GPL-3 | awk '{ print substr($1,2); }' | tr '\n' ' ') || true
+ licnotfound=$(echo "${pkglicenses}" |grep -e ' \[\] \"' | awk '{ print substr($1,2); }' | tr '\n' ' ') || true
+ else
+ if [[ $(grep -m 1 -hEe "(Traceback|CRITICAL)" $log) ]];
+ then
+ notes='Report not generated, check logs for traceback/critical error'
+ fi
+ fi
+
+ echo " <tr>
+ <td>${img}</td>
+ <td>${ver}</td>
+ <td><a href="${report}">Report</a></td>
+ <td><a href="${log}">Log</a></td>
+ <td>${gplv3pkgs}</td>
+ <td>${licnotfound}</td>
+ <td>${notes}</td>
+ </tr>" >> $1
+}
+
+analyze() {
+ local img=$1
+ local report=$2
+ local log=$3
+ echo "$img analysis started"
+ tern report -f json -i ${img} 1> ${report} 2> ${log} || true
+}
+
+get_images() {
+ local namespace=$1
+ kubectl get pods --namespace $namespace \
+ -o jsonpath="{.items[*].spec.containers[*].image}" |\
+ tr -s '[[:space:]]' '\n' | sort | uniq -u
+}
+
+push_results() {
+ local archives_location=$1
+ local nexus_url="https://nexus.onap.org"
+ local nexus_path="onap-integration/weekly/$POD/$(date -d${CI_PIPELINE_CREATED_AT} +'%Y-%m')/$(date -d${CI_PIPELINE_CREATED_AT} +'%d_%H-%M')"
+ echo "===========> Send Result to LF Backend"
+ echo "nexus_url:"$nexus_url
+ echo "nexus_path"$nexus_path
+ cd $archives_location && lftools deploy archives $nexus_url $nexus_path $archives_location
+}
+
+images=( $(get_images $K8NAMESPACE) )
+
+mkdir -p $OUT
+rm -f $HTML_REPORT
+install_tern $TERN_LOCATION
+
+print_head ${HTML_REPORT}
+
+for (( i=0; i<${#images[@]}; i++ ))
+do
+
+ fname=${images[$i]//\//_}
+ report=${OUT}/${fname}".json"
+ log=${OUT}/${fname}".log"
+
+ analyze ${images[$i]} ${report} ${log}
+ print_image ${HTML_REPORT} ${images[$i]} ${report} ${log}
+done
+
+print_tail ${HTML_REPORT}
+echo "===========> Finished analysis of all images in "$K8NAMESPACE
+
+push_results ${ARCHIVES_LOCATION}
diff --git a/xtesting-healthcheck-k8s-job.yaml b/xtesting-healthcheck-k8s-job.yaml
new file mode 100644
index 0000000..8f8b696
--- /dev/null
+++ b/xtesting-healthcheck-k8s-job.yaml
@@ -0,0 +1,8 @@
+---
+- hosts: kube-master
+ run_once: "yes"
+ vars_files:
+ - "vars/pdf.yml"
+ - "vars/cluster.yml"
+ roles:
+ - xtesting-healthcheck-k8s-job
diff --git a/xtesting-healthcheck-k8s.yaml b/xtesting-healthcheck-k8s.yaml
new file mode 100644
index 0000000..cab171e
--- /dev/null
+++ b/xtesting-healthcheck-k8s.yaml
@@ -0,0 +1,8 @@
+---
+- hosts: kube-master
+ run_once: "yes"
+ vars_files:
+ - "vars/pdf.yml"
+ - "vars/cluster.yml"
+ roles:
+ - xtesting-healthcheck-k8s
diff --git a/xtesting-healthcheck.yaml b/xtesting-healthcheck.yaml
new file mode 100644
index 0000000..6c7d141
--- /dev/null
+++ b/xtesting-healthcheck.yaml
@@ -0,0 +1,7 @@
+---
+- hosts: kube-master
+ run_once: "yes"
+ vars_files:
+ - "vars/pdf.yml"
+ roles:
+ - xtesting-healthcheck
diff --git a/xtesting-jumphost.yaml b/xtesting-jumphost.yaml
new file mode 100644
index 0000000..43a10de
--- /dev/null
+++ b/xtesting-jumphost.yaml
@@ -0,0 +1,5 @@
+---
+# file: webservers.yml
+- hosts: kube-master
+ roles:
+ - xtesting-jumphost
diff --git a/xtesting-onap-security.yaml b/xtesting-onap-security.yaml
new file mode 100644
index 0000000..d84382d
--- /dev/null
+++ b/xtesting-onap-security.yaml
@@ -0,0 +1,7 @@
+---
+- hosts: kube-master
+ run_once: "yes"
+ vars_files:
+ - "vars/pdf.yml"
+ roles:
+ - xtesting-onap-security
diff --git a/xtesting-onap-vnf.yaml b/xtesting-onap-vnf.yaml
new file mode 100644
index 0000000..9d84483
--- /dev/null
+++ b/xtesting-onap-vnf.yaml
@@ -0,0 +1,9 @@
+---
+# file: webservers.yml
+- hosts: kube-master
+ run_once: "yes"
+ vars_files:
+ - "vars/pdf.yml"
+ - "vars/cluster.yml"
+ roles:
+ - xtesting-onap-vnf
diff --git a/xtesting-pages.yaml b/xtesting-pages.yaml
new file mode 100644
index 0000000..db85102
--- /dev/null
+++ b/xtesting-pages.yaml
@@ -0,0 +1,8 @@
+---
+# file: webservers.yml
+- hosts: kube-master
+ run_once: "yes"
+ roles:
+ - xtesting-pages
+ vars_files:
+ - "vars/pdf.yml"