aboutsummaryrefslogtreecommitdiffstats
path: root/roles/xtesting-healthcheck
diff options
context:
space:
mode:
Diffstat (limited to 'roles/xtesting-healthcheck')
-rw-r--r--roles/xtesting-healthcheck/defaults/main.yaml130
-rw-r--r--roles/xtesting-healthcheck/tasks/launch.yaml50
-rw-r--r--roles/xtesting-healthcheck/tasks/main.yaml5
-rw-r--r--roles/xtesting-healthcheck/tasks/prepare.yaml52
4 files changed, 237 insertions, 0 deletions
diff --git a/roles/xtesting-healthcheck/defaults/main.yaml b/roles/xtesting-healthcheck/defaults/main.yaml
new file mode 100644
index 0000000..6312a16
--- /dev/null
+++ b/roles/xtesting-healthcheck/defaults/main.yaml
@@ -0,0 +1,130 @@
+---
+tests:
+ - core
+
+healthcheck_secret:
+ apiVersion: v1
+ kind: Secret
+ metadata:
+ name: s3-keys
+ namespace: "{{ onap_namespace }}"
+ data:
+ access-key: "{{ s3_access_key | string | b64encode }}"
+ secret-key: "{{ s3_secret_key | string | b64encode }}"
+
+healthcheck_deployment:
+ apiVersion: batch/v1
+ kind: Job
+ metadata:
+ name: "integration-onap-{{ run_type }}"
+ namespace: "{{ onap_namespace }}"
+ spec:
+ backoffLimit: 0
+ template:
+ metadata:
+ annotations:
+ sidecar.istio.io/inject: "false"
+ spec:
+ restartPolicy: Never
+ containers:
+ - name: "functest-onap-{{ run_type }}"
+ image: "{{ testing_container }}:{{ testing_container_tag }}"
+ imagePullPolicy: Always
+ env: "{{ healthcheck_deployment_env }}"
+ volumeMounts:
+ - name: localtime
+ mountPath: /etc/localtime
+ readOnly: true
+ - name: robot-eteshare
+ mountPath: /share/config
+ - name: robot-save-results
+ mountPath: /var/lib/xtesting/results/
+ command:
+ - run_tests
+ args: "{{ args }}"
+ volumes: "{{ job_volumes }}"
+
+healthcheck_deployment_env_legacy:
+ - name: INSTALLER_TYPE
+ value: "{{ deployment_name }}"
+ - name: DEPLOY_SCENARIO
+ value: "{{ deploy_scenario }}"
+ - name: NODE_NAME
+ value: "{{ node_name }}"
+ - name: TEST_DB_URL
+ value: "{{ test_result_url }}"
+ - name: BUILD_TAG
+ value: "{{ build_tag }}"
+ - name: TAG
+ value: "{{ run_type }}"
+
+healthcheck_deployment_env_s3:
+ - name: INSTALLER_TYPE
+ value: "{{ deployment_name }}"
+ - name: DEPLOY_SCENARIO
+ value: "{{ deploy_scenario }}"
+ - name: NODE_NAME
+ value: "{{ node_name }}"
+ - name: TEST_DB_URL
+ value: "{{ test_result_url }}"
+ - name: BUILD_TAG
+ value: "{{ build_tag }}"
+ - name: TAG
+ value: "{{ run_type }}"
+ - name: S3_ENDPOINT_URL
+ value: "{{ s3_internal_url }}"
+ - name: S3_DST_URL
+ value: "{{ s3_dst }}"
+ - name: HTTP_DST_URL
+ value: "{{ s3_http_url_endpoint }}"
+ - name: AWS_ACCESS_KEY_ID
+ valueFrom:
+ secretKeyRef:
+ key: access-key
+ name: s3-keys
+ - name: AWS_SECRET_ACCESS_KEY
+ valueFrom:
+ secretKeyRef:
+ key: secret-key
+ name: s3-keys
+
+healthcheck_deployment_env: "{{ use_s3 | bool |
+ternary(healthcheck_deployment_env_s3, healthcheck_deployment_env_legacy) }}"
+
+args_legacy:
+ - --test
+ - "{{ run_type }}"
+ - --report
+
+args_s3:
+ - --test
+ - "{{ run_type }}"
+ - --push
+ - --report
+
+args: "{{ use_s3 | bool | ternary(args_s3, args_legacy) }}"
+
+volumes_legacy:
+ - name: localtime
+ hostPath:
+ path: /etc/localtime
+ - name: robot-eteshare
+ configMap:
+ name: "{{ onap_namespace }}-{{ robot_configmap }}"
+ defaultMode: 0755
+ - name: robot-save-results
+ hostPath:
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+volumes_s3:
+ - name: localtime
+ hostPath:
+ path: /etc/localtime
+ - name: robot-eteshare
+ configMap:
+ name: "{{ onap_namespace }}-{{ robot_configmap }}"
+ defaultMode: 0755
+ - name: robot-save-results
+ emptyDir: {}
+
+job_volumes: "{{ use_s3 | bool | ternary(volumes_s3, volumes_legacy) }}"
diff --git a/roles/xtesting-healthcheck/tasks/launch.yaml b/roles/xtesting-healthcheck/tasks/launch.yaml
new file mode 100644
index 0000000..5ec978b
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/launch.yaml
@@ -0,0 +1,50 @@
+---
+- block:
+ - name: ensure secret is present
+ community.kubernetes.k8s:
+ state: present
+ src: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+ when: use_s3 | bool
+
+ - name: start healthcheck job
+ community.kubernetes.k8s:
+ state: present
+ src: "{{ k8s_job__dir_path }}/healthcheck-{{ run_type }}.yaml"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+
+ - name: wait for end of job
+ community.kubernetes.k8s_info:
+ kind: Job
+ name: "integration-onap-{{ run_type }}"
+ namespace: "{{ onap_namespace }}"
+ register: job_info
+ until: (job_info.resources[0].status.succeeded is defined and
+ job_info.resources[0].status.succeeded == 1) or
+ (job_info.resources[0].status.failed is defined and
+ job_info.resources[0].status.failed >= 1)
+ retries: "{{ run_timeout }}"
+ delay: 1
+
+ - name: job has failed
+ ansible.builtin.fail:
+ msg: "The job has failed"
+ when: job_info.resources[0].status.failed is defined and
+ job_info.resources[0].status.failed >= 1
+
+ always:
+ - name: "save healthcheck {{ run_type }} results for artifacts"
+ ansible.posix.synchronize:
+ src: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+ use_ssh_args: true
+ dest: "./results/{{ run_tiers }}"
+ mode: pull
+ rsync_opts:
+ - "--exclude=output.xml"
+ ignore_errors: True
+ when: not use_s3 | bool
+
+ - name: remove secret file
+ ansible.builtin.file:
+ path: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ state: absent
diff --git a/roles/xtesting-healthcheck/tasks/main.yaml b/roles/xtesting-healthcheck/tasks/main.yaml
new file mode 100644
index 0000000..5fb373c
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/main.yaml
@@ -0,0 +1,5 @@
+---
+# tasks file for ONAP healthcheck
+
+- import_tasks: prepare.yaml
+- import_tasks: launch.yaml
diff --git a/roles/xtesting-healthcheck/tasks/prepare.yaml b/roles/xtesting-healthcheck/tasks/prepare.yaml
new file mode 100644
index 0000000..712916e
--- /dev/null
+++ b/roles/xtesting-healthcheck/tasks/prepare.yaml
@@ -0,0 +1,52 @@
+---
+- name: create directories
+ ansible.builtin.file:
+ path: "{{ item }}"
+ state: directory
+ mode: 0755
+ loop:
+ - "{{ exec_local_path }}/"
+ - "{{ ansible_user_dir }}/oom/{{ onap_version }}/{{ run_tiers }}/"
+
+- name: create directories as root
+ become: yes
+ ansible.builtin.file:
+ path: "{{ item }}"
+ state: directory
+ mode: 0755
+ loop:
+ - "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+- name: Delete healthcheck job
+ community.kubernetes.k8s:
+ state: absent
+ api: batch/v1
+ kind: Job
+ namespace: onap
+ name: "integration-onap-{{ run_type }}"
+ kubeconfig: "{{ global_local_path }}/{{ kube_conf_file }}"
+
+- name: Delete old logs
+ become: yes
+ ansible.builtin.file:
+ state: absent
+ path: "{{ res_local_path }}/{{ run_tiers }}/{{ run_type }}"
+
+- name: Override docker version for CPS (python3 migration)
+ ansible.builtin.set_fact:
+ testing_container:
+ "nexus3.onap.org:10003/onap/xtesting-smoke-usecases-robot-py3"
+ when: (run_type == "cps-healthcheck") or
+ (run_type == "cps-temporal-healthcheck") or
+ (run_type == "cps-dmi-plugin-healthcheck")
+
+- name: save healthcheck deployment to file
+ ansible.builtin.copy:
+ content: "{{ healthcheck_deployment | to_nice_yaml }}"
+ dest: "{{ k8s_job__dir_path }}/healthcheck-{{ run_type }}.yaml"
+
+- name: save secret for S3 to file
+ ansible.builtin.copy:
+ content: "{{ healthcheck_secret | to_nice_yaml }}"
+ dest: "{{ k8s_job__dir_path }}/s3-keys-{{ run_type }}.yaml"
+ when: use_s3 | bool