aboutsummaryrefslogtreecommitdiffstats
path: root/csit
diff options
context:
space:
mode:
Diffstat (limited to 'csit')
-rw-r--r--csit/README.md43
-rwxr-xr-xcsit/resources/scripts/build-csit-docker-image.sh3
-rwxr-xr-xcsit/resources/scripts/cluster_setup.sh250
-rwxr-xr-xcsit/resources/scripts/config_setup.sh113
-rwxr-xr-xcsit/resources/scripts/get-cluster-info.sh113
-rwxr-xr-xcsit/resources/scripts/robot_setup.sh162
-rwxr-xr-xcsit/resources/scripts/run-test.sh3
-rwxr-xr-xcsit/resources/scripts/wait_for_rest.sh6
-rw-r--r--csit/resources/tests/apex-pdp-common.robot30
-rw-r--r--csit/resources/tests/apex-pdp-test.robot134
-rw-r--r--csit/resources/tests/apex-slas-3.robot6
-rw-r--r--csit/resources/tests/apex-slas.robot18
-rw-r--r--csit/resources/tests/api-test.robot2
-rw-r--r--csit/resources/tests/data/AcDocker.json1081
-rw-r--r--csit/resources/tests/data/AcK8s.json1081
-rw-r--r--csit/resources/tests/data/PMSHMultipleACTosca.yaml932
-rw-r--r--csit/resources/tests/data/ac-instance-migration-fail.yaml99
-rw-r--r--csit/resources/tests/data/acelement-usecase.yaml799
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.empty_filter_response.json61
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.filter_response.json23
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.request.json1
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.request.output.json5
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter.json1
-rw-r--r--csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter_empty.json1
-rw-r--r--csit/resources/tests/drools-applications-test.robot112
-rw-r--r--csit/resources/tests/opa-pdp-test.robot64
-rw-r--r--csit/resources/tests/policy-clamp-test.robot49
-rwxr-xr-xcsit/run-k8s-csit.sh499
-rwxr-xr-xcsit/run-project-csit.sh114
-rwxr-xr-xcsit/run-s3p-tests.sh165
-rwxr-xr-xcsit/start-s3p-tests.sh116
31 files changed, 3558 insertions, 2528 deletions
diff --git a/csit/README.md b/csit/README.md
new file mode 100644
index 00000000..f0c44823
--- /dev/null
+++ b/csit/README.md
@@ -0,0 +1,43 @@
+# Running Policy Framework CSIT
+
+## Using Docker Compose environment
+
+Policy Framework Continuous System and Integration Tests are executed daily on jenkins jobs
+targeting master branch. The runs are available at https://jenkins.onap.org/view/policy/
+
+The CSIT suites are also used by developers as another guarantee that new code or changes
+delivered on main code do not affect the expected behaviour for the already delivered
+functionalities or new tests are added when a new functionality is added.
+
+To execute the tests on a local environment, the steps are the following:
+
+> all the instructions assume docker repository was cloned to /git/policy/docker
+
+- after cloning the project, go to ../docker/csit
+- to run a test, execute the run-project-csit.sh script
+
+`./run-project-csit.sh <policy-component>`
+
+The options for <policy-component> are:
+- api
+- pap
+- apex-pdp
+- clamp (for runtime-acm and participants)
+- drools-pdp
+- drools-applications
+- xacml-pdp
+- distribution
+- opa-pdp
+
+The command above with download the latest SNAPSHOT version available for the policy-component
+being tested. Version is collected from [PF Release Data](https://github.com/onap/policy-parent/blob/master/integration/src/main/resources/release/pf_release_data.csv)
+
+To start the containers with images generated in local environment, the script can be run with the
+flag `--local`
+
+`./run-project-csit.sh api --local`
+
+The command above with start the docker containers for `policy-api` and `policy-db-migrator` using
+the latest image created at the local environment. When using the flag `--local` it will look for
+all the policy components needed for the test suites to be executed. The support services like
+PostgreSQL, Kafka, Prometheus, Grafana will always be downloaded if not present.
diff --git a/csit/resources/scripts/build-csit-docker-image.sh b/csit/resources/scripts/build-csit-docker-image.sh
index c80afc4f..8b5aa7d6 100755
--- a/csit/resources/scripts/build-csit-docker-image.sh
+++ b/csit/resources/scripts/build-csit-docker-image.sh
@@ -1,6 +1,6 @@
#!/bin/bash -x
#
-# Copyright 2024 Nordix Foundation.
+# Copyright 2024-2025 Nordix Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -37,7 +37,6 @@ fi
GERRIT_BRANCH=$(awk -F= '$1 == "defaultbranch" { print $2 }' "${WORKSPACE}"/.gitreview)
export ROBOT_DOCKER_IMAGE="policy-csit-robot"
-echo "Build docker image for robot framework"
cd ${WORKSPACE}/csit/resources || exit
docker image rm -f ${ROBOT_DOCKER_IMAGE}
diff --git a/csit/resources/scripts/cluster_setup.sh b/csit/resources/scripts/cluster_setup.sh
new file mode 100755
index 00000000..6d72bac3
--- /dev/null
+++ b/csit/resources/scripts/cluster_setup.sh
@@ -0,0 +1,250 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+# Copyright (C) 2025 Nordix Foundation. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+WORKSPACE=$(git rev-parse --show-toplevel)
+export WORKSPACE
+
+export GERRIT_BRANCH=$(awk -F= '$1 == "defaultbranch" { print $2 }' "${WORKSPACE}"/.gitreview)
+
+# Source the shared config script
+source "$(dirname "$0")/config_setup.sh"
+
+KAFKA_DIR=${WORKSPACE}/helm/cp-kafka
+SET_VALUES=""
+
+ZK_CONTAINER="zookeeper-deployment"
+KAFKA_CONTAINER="kafka-deployment"
+
+function spin_microk8s_cluster() {
+ echo "Verify if Microk8s cluster is running.."
+ microk8s version
+ exitcode="${?}"
+
+ if [ "$exitcode" -ne 0 ]; then
+ echo "Microk8s cluster not available, Spinning up the cluster.."
+ sudo snap install microk8s --classic --channel=1.30/stable
+
+ if [ "${?}" -ne 0 ]; then
+ echo "Failed to install kubernetes cluster. Aborting.."
+ return 1
+ fi
+ echo "Microk8s cluster installed successfully"
+ sudo usermod -a -G microk8s $USER
+ echo "Enabling DNS and Storage plugins"
+ sudo microk8s.enable dns hostpath-storage
+ echo "Creating configuration file for Microk8s"
+ sudo mkdir -p $HOME/.kube
+ sudo chown -R $USER:$USER $HOME/.kube
+ sudo microk8s kubectl config view --raw >$HOME/.kube/config
+ sudo chmod 600 $HOME/.kube/config
+ echo "K8s installation completed"
+ echo "----------------------------------------"
+ else
+ echo "K8s cluster is already running"
+ echo "----------------------------------------"
+ fi
+
+ echo "Verify if kubectl is running.."
+ kubectl version
+ exitcode="${?}"
+
+ if [ "$exitcode" -ne 0 ]; then
+ echo "Kubectl not available, Installing.."
+ sudo snap install kubectl --classic --channel=1.30/stable
+
+ if [ "${?}" -ne 0 ]; then
+ echo "Failed to install Kubectl. Aborting.."
+ return 1
+ fi
+ echo "Kubectl installation completed"
+ echo "----------------------------------------"
+ else
+ echo "Kubectl is already running"
+ echo "----------------------------------------"
+ return 0
+ fi
+
+ echo "Verify if helm is running.."
+ helm version
+ exitcode="${?}"
+
+ if [ "$exitcode" -ne 0 ]; then
+ echo "Helm not available, Installing.."
+ sudo snap install helm --classic --channel=3.7
+
+ if [ "${?}" -ne 0 ]; then
+ echo "Failed to install Helm client. Aborting.."
+ return 1
+ fi
+ echo "Helm installation completed"
+ echo "----------------------------------------"
+ else
+ echo "Helm is already running"
+ echo "----------------------------------------"
+ return 0
+ fi
+}
+
+function install_kafka() {
+ echo "Installing Confluent kafka"
+ kubectl apply -f $KAFKA_DIR/zookeeper.yaml
+ kubectl apply -f $KAFKA_DIR/kafka.yaml
+ echo "----------------------------------------"
+}
+
+function uninstall_policy() {
+ echo "Removing the policy helm deployment"
+ helm uninstall csit-policy
+ helm uninstall prometheus
+ helm uninstall csit-robot
+ kubectl delete deploy $ZK_CONTAINER $KAFKA_CONTAINER
+ rm -rf ${WORKSPACE}/helm/policy/Chart.lock
+ if [ "$PROJECT" == "clamp" ] || [ "$PROJECT" == "policy-clamp" ]; then
+ helm uninstall policy-chartmuseum
+ helm repo remove chartmuseum-git policy-chartmuseum
+ fi
+ sudo rm -rf /dockerdata-nfs/mariadb-galera/
+ kubectl delete pvc --all
+ echo "Policy deployment deleted"
+ echo "Clean up docker"
+ docker image prune -f
+}
+
+function teardown_cluster() {
+ echo "Removing k8s cluster and k8s configuration file"
+ sudo snap remove microk8s;rm -rf $HOME/.kube/config
+ sudo snap remove helm;
+ sudo snap remove kubectl;
+ echo "MicroK8s Cluster removed"
+}
+
+function install_chartmuseum () {
+ echo "---------------------------------------------"
+ echo "Installing Chartmuseum helm repository..."
+ helm repo add chartmuseum-git https://chartmuseum.github.io/charts
+ helm repo update
+ helm install policy-chartmuseum chartmuseum-git/chartmuseum --set env.open.DISABLE_API=false --set service.type=NodePort --set service.nodePort=30208
+ helm plugin install https://github.com/chartmuseum/helm-push
+ echo "---------------------------------------------"
+}
+
+function get_pod_name() {
+ pods=$(kubectl get pods --no-headers -o custom-columns=':metadata.name' | grep $1)
+ read -rd '' -a pod_array <<< "$pods"
+ echo "${pod_array[@]}"
+}
+
+function wait_for_pods_running() {
+ local namespace="$1"
+ shift
+ local timeout_seconds="$1"
+ shift
+
+ IFS=',' read -ra pod_names <<< "$@"
+ shift
+
+ local pending_pods=("${pod_names[@]}")
+ local start_time
+ start_time=$(date +%s)
+
+ while [ ${#pending_pods[@]} -gt 0 ]; do
+ local current_time
+ current_time=$(date +%s)
+ local elapsed_time
+ elapsed_time=$((current_time - start_time))
+
+ if [ "$elapsed_time" -ge "$timeout_seconds" ]; then
+ echo "Timed out waiting for the pods to reach 'Running' state."
+ echo "Printing the current status of the deployment before exiting.."
+ kubectl get po;
+ kubectl describe pods;
+ echo "------------------------------------------------------------"
+ for pod in "${pending_pods[@]}"; do
+ echo "Logs of the pod $pod"
+ kubectl logs $pod
+ echo "---------------------------------------------------------"
+ done
+ exit 1
+ fi
+
+ local newly_running_pods=()
+
+ for pod_name_prefix in "${pending_pods[@]}"; do
+ local pod_names=$(get_pod_name "$pod_name_prefix")
+ IFS=' ' read -r -a pod_array <<< "$pod_names"
+ if [ "${#pod_array[@]}" -eq 0 ]; then
+ echo "*** Error: No pods found for the deployment $pod_name_prefix . Exiting ***"
+ return -1
+ fi
+ for pod in "${pod_array[@]}"; do
+ local pod_status
+ local pod_ready
+ pod_status=$(kubectl get pod "$pod" -n "$namespace" --no-headers -o custom-columns=STATUS:.status.phase 2>/dev/null)
+ pod_ready=$(kubectl get pod "$pod" -o jsonpath='{.status.containerStatuses[*].ready}')
+
+ if [ "$pod_status" == "Running" ] && { [ "$pod_ready" == "true" ] || [ "$pod_ready" == "true true" ]; }; then
+ echo "Pod '$pod' in namespace '$namespace' is now in 'Running' state and 'Readiness' is true"
+ else
+ newly_running_pods+=("$pod")
+ echo "Waiting for pod '$pod' in namespace '$namespace' to reach 'Running' and 'Ready' state..."
+ fi
+ done
+ done
+
+ pending_pods=("${newly_running_pods[@]}")
+
+ sleep 5
+ done
+
+ echo "All specified pods are in the 'Running and Ready' state. Exiting the function."
+}
+
+OPERATION="$1"
+PROJECT="$2"
+LOCALIMAGE="${3:-false}"
+
+if [ $OPERATION == "install" ]; then
+ spin_microk8s_cluster
+ if [ "${?}" -eq 0 ]; then
+ export KAFKA_CONTAINERS=($KAFKA_CONTAINER,$ZK_CONTAINER)
+ install_kafka
+ wait_for_pods_running default 300 $KAFKA_CONTAINERS
+ set_project_config "$PROJECT"
+ echo "Installing policy helm charts in the default namespace"
+ source ${WORKSPACE}/compose/get-k8s-versions.sh
+ if [ $LOCALIMAGE == "true" ]; then
+ echo "loading local image"
+ source ${WORKSPACE}/compose/get-versions.sh
+ ${WORKSPACE}/compose/loaddockerimage.sh
+ fi
+ cd ${WORKSPACE}/helm || exit
+ helm dependency build policy
+ helm install csit-policy policy ${SET_VALUES}
+ helm install prometheus prometheus
+ wait_for_pods_running default 900 ${READINESS_CONTAINERS[@]}
+ echo "Policy chart installation completed"
+ echo "-------------------------------------------"
+ fi
+elif [ $OPERATION == "uninstall" ]; then
+ uninstall_policy
+elif [ $OPERATION == "clean" ]; then
+ teardown_cluster
+else
+ echo "Invalid arguments provided. Usage: $0 [options..] {install {project_name} | uninstall | clean} {uselocalimage = true/false}"
+fi
diff --git a/csit/resources/scripts/config_setup.sh b/csit/resources/scripts/config_setup.sh
new file mode 100755
index 00000000..1cdd260b
--- /dev/null
+++ b/csit/resources/scripts/config_setup.sh
@@ -0,0 +1,113 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+# Copyright (C) 2025 Nordix Foundation. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+export POLICY_CLAMP_ROBOT="policy-clamp-test.robot clamp-slas.robot"
+export POLICY_API_ROBOT="api-test.robot api-slas.robot"
+export POLICY_PAP_ROBOT="pap-test.robot pap-slas.robot"
+export POLICY_APEX_PDP_ROBOT="apex-pdp-test.robot apex-slas.robot"
+export POLICY_XACML_PDP_ROBOT="xacml-pdp-test.robot xacml-pdp-slas.robot"
+export POLICY_OPA_PDP_ROBOT="opa-pdp-test.robot"
+export POLICY_DROOLS_PDP_ROBOT="drools-pdp-test.robot"
+export POLICY_DISTRIBUTION_ROBOT="distribution-test.robot"
+
+export POLICY_API_CONTAINER="policy-api"
+export POLICY_PAP_CONTAINER="policy-pap"
+export POLICY_CLAMP_CONTAINER="policy-clamp-runtime-acm"
+export POLICY_APEX_CONTAINER="policy-apex-pdp"
+export POLICY_DROOLS_CONTAINER="policy-drools-pdp"
+export POLICY_XACML_CONTAINER="policy-xacml-pdp"
+export POLICY_OPA_CONTAINER="policy-opa-pdp"
+export POLICY_DISTRIBUTION_CONTAINER="policy-distribution"
+export POLICY_K8S_PPNT_CONTAINER="policy-clamp-ac-k8s-ppnt"
+export POLICY_HTTP_PPNT_CONTAINER="policy-clamp-ac-http-ppnt"
+export POLICY_SIM_PPNT_CONTAINER="policy-clamp-ac-sim-ppnt"
+export POLICY_PF_PPNT_CONTAINER="policy-clamp-ac-pf-ppnt"
+export JAEGER_CONTAINER="jaeger"
+
+function install_chartmuseum () {
+ echo "---------------------------------------------"
+ echo "Installing Chartmuseum helm repository..."
+ helm repo add chartmuseum-git https://chartmuseum.github.io/charts
+ helm repo update
+ helm install policy-chartmuseum chartmuseum-git/chartmuseum --set env.open.DISABLE_API=false --set service.type=NodePort --set service.nodePort=30208
+ helm plugin install https://github.com/chartmuseum/helm-push
+ echo "---------------------------------------------"
+}
+
+function set_project_config() {
+ echo "Setting project configuration for: $PROJECT"
+ case $PROJECT in
+ clamp | policy-clamp)
+ export ROBOT_FILE=$POLICY_CLAMP_ROBOT
+ export READINESS_CONTAINERS=($POLICY_CLAMP_CONTAINER,$POLICY_APEX_CONTAINER,$POLICY_PF_PPNT_CONTAINER,$POLICY_K8S_PPNT_CONTAINER,
+ $POLICY_HTTP_PPNT_CONTAINER,$POLICY_SIM_PPNT_CONTAINER,$JAEGER_CONTAINER)
+ export SET_VALUES="--set $POLICY_CLAMP_CONTAINER.enabled=true --set $POLICY_APEX_CONTAINER.enabled=true
+ --set $POLICY_PF_PPNT_CONTAINER.enabled=true --set $POLICY_K8S_PPNT_CONTAINER.enabled=true
+ --set $POLICY_HTTP_PPNT_CONTAINER.enabled=true --set $POLICY_SIM_PPNT_CONTAINER.enabled=true
+ --set $JAEGER_CONTAINER.enabled=true"
+ install_chartmuseum
+ ;;
+ api | policy-api)
+ export ROBOT_FILE=$POLICY_API_ROBOT
+ export READINESS_CONTAINERS=($POLICY_API_CONTAINER)
+ ;;
+ pap | policy-pap)
+ export ROBOT_FILE=$POLICY_PAP_ROBOT
+ export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_API_CONTAINER,$POLICY_XACML_CONTAINER)
+ export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_XACML_CONTAINER.enabled=true"
+ ;;
+ apex-pdp | policy-apex-pdp)
+ export ROBOT_FILE=$POLICY_APEX_PDP_ROBOT
+ export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER)
+ export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true"
+ ;;
+ xacml-pdp | policy-xacml-pdp)
+ export ROBOT_FILE=($POLICY_XACML_PDP_ROBOT)
+ export READINESS_CONTAINERS=($POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_XACML_CONTAINER)
+ export SET_VALUES="--set $POLICY_XACML_CONTAINER.enabled=true"
+ ;;
+ opa-pdp | policy-opa-pdp)
+ export ROBOT_FILE=($POLICY_OPA_PDP_ROBOT)
+ export READINESS_CONTAINERS=($POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_OPA_CONTAINER)
+ export SET_VALUES="--set $POLICY_OPA_CONTAINER.enabled=true"
+ ;;
+ drools-pdp | policy-drools-pdp)
+ export ROBOT_FILE=($POLICY_DROOLS_PDP_ROBOT)
+ export READINESS_CONTAINERS=($POLICY_DROOLS_CONTAINER)
+ export SET_VALUES="--set $POLICY_DROOLS_CONTAINER.enabled=true"
+ ;;
+ distribution | policy-distribution)
+ export ROBOT_FILE=($POLICY_DISTRIBUTION_ROBOT)
+ export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_DISTRIBUTION_CONTAINER)
+ export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true"
+ ;;
+ *)
+ echo "Unknown project supplied. Enabling all policy charts for the deployment"
+ export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,
+ $POLICY_DISTRIBUTION_CONTAINER,$POLICY_DROOLS_CONTAINER,$POLICY_XACML_CONTAINER,$POLICY_OPA_CONTAINER,
+ $POLICY_CLAMP_CONTAINER,$POLICY_PF_PPNT_CONTAINER,$POLICY_K8S_PPNT_CONTAINER,
+ $POLICY_HTTP_PPNT_CONTAINER,$POLICY_SIM_PPNT_CONTAINER)
+ export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_XACML_CONTAINER.enabled=true
+ --set $POLICY_OPA_CONTAINER.enabled=true --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true --set $POLICY_DROOLS_CONTAINER.enabled=true
+ --set $POLICY_CLAMP_CONTAINER.enabled=true --set $POLICY_PF_PPNT_CONTAINER.enabled=true
+ --set $POLICY_K8S_PPNT_CONTAINER.enabled=true --set $POLICY_HTTP_PPNT_CONTAINER.enabled=true
+ --set $POLICY_SIM_PPNT_CONTAINER.enabled=true"
+ ;;
+ esac
+}
diff --git a/csit/resources/scripts/get-cluster-info.sh b/csit/resources/scripts/get-cluster-info.sh
index 539bf6a3..1252f3e4 100755
--- a/csit/resources/scripts/get-cluster-info.sh
+++ b/csit/resources/scripts/get-cluster-info.sh
@@ -1,7 +1,10 @@
#!/bin/bash
# ============LICENSE_START=======================================================
-# Copyright (C) 2023-2024 Nordix Foundation. All rights reserved.
+# Copyright (C) 2023-2025 Nordix Foundation. All rights reserved.
+# Modifications Copyright © 2024 Deutsche Telekom
# ================================================================================
+#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -32,15 +35,21 @@ export PF_PARTICIPANT_PORT=30008
export HTTP_PARTICIPANT_PORT=30009
export K8S_PARTICIPANT_PORT=30010
export SIM_PARTICIPANT_PORT=30011
+export OPA_PORT=30012
export SIMULATOR_PORT=30904
# Retrieve pod names
+function get_pod_name() {
+ microk8s kubectl get pods --no-headers -o custom-columns=':metadata.name' | grep $1
+}
+
function get_pod_names() {
export APEX_POD=$(get_pod_name apex)
export PAP_POD=$(get_pod_name pap)
export API_POD=$(get_pod_name api)
export DMAAP_POD=$(get_pod_name message-router)
export XACML_POD=$(get_pod_name xacml)
+ export OPA_POD=$(get_pod_name opa-pdp)
export DROOLS_POD=$(get_pod_name drools-pdp)
export DIST_POD=$(get_pod_name distribution)
export ACM_POD=$(get_pod_name acm-runtime)
@@ -51,6 +60,10 @@ function get_pod_names() {
}
# Retrieve service names
+function get_svc_name() {
+ microk8s kubectl get svc --no-headers -o custom-columns=':metadata.name' | grep $1
+}
+
function get_svc_names() {
export APEX_SVC=$(get_svc_name policy-apex-pdp)
export PAP_SVC=$(get_svc_name policy-pap)
@@ -58,6 +71,7 @@ function get_svc_names() {
export DMAAP_SVC=$(get_svc_name message-router)
export DROOLS_SVC=$(get_svc_name drools-pdp)
export XACML_SVC=$(get_svc_name policy-xacml-pdp)
+ export OPA_SVC=$(get_svc_name policy-opa-pdp)
export DIST_SVC=$(get_svc_name policy-distribution)
export ACM_SVC=$(get_svc_name policy-clamp-runtime-acm)
export POLICY_PPNT_SVC=$(get_svc_name policy-clamp-ac-pf-ppnt)
@@ -66,7 +80,40 @@ function get_svc_names() {
export POLICY_K8S_SVC=$(get_svc_name policy-clamp-ac-k8s-ppnt)
}
+# Assign set port values
+function patch_port() {
+ microk8s kubectl patch service "$1-svc" --namespace=default --type='json' --patch='[{"op": "replace", "path": "/spec/ports/0/nodePort", "value":'"$2"'}]'
+}
+
+function patch_ports() {
+ patch_port "$APEX_SVC" $APEX_PORT
+ patch_port "$API_SVC" $API_PORT
+ patch_port "$PAP_SVC" $PAP_PORT
+ patch_port "$ACM_SVC" $ACM_PORT
+ patch_port "$POLICY_PPNT_SVC" $PF_PARTICIPANT_PORT
+ patch_port "$POLICY_HTTP_SVC" $HTTP_PARTICIPANT_PORT
+ patch_port "$POLICY_SIM_SVC" $SIM_PARTICIPANT_PORT
+ patch_port "$POLICY_K8S_SVC" $K8S_PARTICIPANT_PORT
+ patch_port "$DIST_SVC" $DIST_PORT
+ patch_port "$DROOLS_SVC" $DROOLS_PORT
+ patch_port "$XACML_SVC" $XACML_PORT
+ patch_port "$OPA_SVC" $OPA_PORT
+}
+
+function setup_message_router_svc() {
+ microk8s kubectl expose service message-router --name message-router-svc --type NodePort --protocol TCP --port 3904 --target-port 3904
+ microk8s kubectl patch service message-router-svc --namespace=default --type='json' --patch='[{"op": "replace", "path": "/spec/ports/0/nodePort", "value":'"$SIMULATOR_PORT"'}]'
+}
+
# Expose services in order to perform tests from JMeter
+function expose_service() {
+ microk8s kubectl expose service $1 --name $1"-svc" --type NodePort --protocol TCP --port 6969 --target-port 6969
+}
+
+function expose_service_opa_pdp() {
+ microk8s kubectl expose service $1 --name $1"-svc" --type NodePort --protocol TCP --port 8282 --target-port 8282
+}
+
function expose_services() {
expose_service $APEX_SVC
expose_service $PAP_SVC
@@ -76,52 +123,48 @@ function expose_services() {
expose_service $DIST_SVC
expose_service $ACM_SVC
expose_service $POLICY_PPNT_SVC
- expose_service POLICY_HTTP_SVC
- expose_service POLICY_SIM_SVC
- expose_service POLICY_K8S_SVC
+ expose_service $POLICY_HTTP_SVC
+ expose_service $POLICY_SIM_SVC
+ expose_service $POLICY_K8S_SVC
+ expose_service_opa_pdp $OPA_SVC
setup_message_router_svc
sleep 2
patch_ports
}
-function get_pod_name() {
- microk8s kubectl get pods --no-headers -o custom-columns=':metadata.name' | grep $1
-}
+# Port forward Kafka to handle traffic to/from JMeter
+function setup_kafka_connection() {
+ # Get the Kafka pod name
+ KAFKA_POD=$(kubectl get pods -l app=kafka -o jsonpath="{.items[0].metadata.name}")
-function get_svc_name() {
- microk8s kubectl get svc --no-headers -o custom-columns=':metadata.name' | grep $1
-}
+ # Set up port forwarding
+ kubectl port-forward pod/$KAFKA_POD 29092:29092 &
+ PF_PID=$!
-function expose_service() {
- microk8s kubectl expose service $1 --name $1"-svc" --type NodePort --protocol TCP --port 6969 --target-port 6969
-}
+ # Wait for port forwarding to be established
+ sleep 5
-function patch_port() {
- microk8s kubectl patch service "$1-svc" --namespace=default --type='json' --patch='[{"op": "replace", "path": "/spec/ports/0/nodePort", "value":'"$2"'}]'
-}
+ KAFKA_POD_IP=$(kubectl get pod $KAFKA_POD -o jsonpath='{.status.podIP}')
-# Assign set port values
-function patch_ports() {
- patch_port "$APEX_SVC" $APEX_PORT
- patch_port "$API_SVC" $API_PORT
- patch_port "$PAP_SVC" $PAP_PORT
- patch_port "$ACM_SVC" $ACM_PORT
- patch_port "$POLICY_PPNT_SVC" $PF_PARTICIPANT_PORT
- patch_port "$POLICY_HTTP_SVC" $HTTP_PARTICIPANT_PORT
- patch_port "$POLICY_SIM_SVC" $SIM_PARTICIPANT_PORT
- patch_port "$POLICY_K8S_SVC" $K8S_PARTICIPANT_PORT
- patch_port "$DIST_SVC" $DIST_PORT
- patch_port "$DROOLS_SVC" $DROOLS_PORT
- patch_port "$XACML_SVC" $XACML_PORT
+ # Update hosts file
+ echo "127.0.0.1 $KAFKA_POD" | sudo tee -a /etc/hosts
+
+ export KAFKA_HOST="127.0.0.1"
+ export KAFKA_PORT="29092"
}
-function setup_message_router_svc() {
- microk8s kubectl expose service message-router --name message-router-svc --type NodePort --protocol TCP --port 3904 --target-port 3904
- microk8s kubectl patch service message-router-svc --namespace=default --type='json' --patch='[{"op": "replace", "path": "/spec/ports/0/nodePort", "value":'"$SIMULATOR_PORT"'}]'
+function teardown_kafka_connection() {
+ kill $PF_PID
+ sudo sed -i "/$KAFKA_POD/d" /etc/hosts
}
####MAIN###
-get_pod_names
-get_svc_names
-expose_services
+if [ "$1" = "teardown" ]; then
+ teardown_kafka_connection
+else
+ get_pod_names
+ get_svc_names
+ expose_services
+ setup_kafka_connection
+fi \ No newline at end of file
diff --git a/csit/resources/scripts/robot_setup.sh b/csit/resources/scripts/robot_setup.sh
new file mode 100755
index 00000000..f5ef2f3d
--- /dev/null
+++ b/csit/resources/scripts/robot_setup.sh
@@ -0,0 +1,162 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+# Copyright (C) 2025 Nordix Foundation. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+WORKSPACE=$(git rev-parse --show-toplevel)
+export WORKSPACE
+
+export ROBOT_FILE=""
+
+PROJECT="$1"
+CSIT_SCRIPT="scripts/run-test.sh"
+ROBOT_DOCKER_IMAGE="policy-csit-robot"
+ROBOT_LOG_DIR=${WORKSPACE}/csit/archives
+
+# Source the shared config script
+source "$(dirname "$0")/config_setup.sh"
+
+DISTRIBUTION_CSAR=${WORKSPACE}/csit/resources/tests/data/csar
+DIST_TEMP_FOLDER=/tmp/distribution
+
+function clone_models() {
+ local retry_count=3
+ local success=false
+ cd tests
+ for ((i = 1; i <= retry_count; i++)); do
+ git clone "https://gerrit.onap.org/r/policy/models" && success=true && break
+ echo "Retrying git clone ($i/$retry_count)..."
+ sleep 5
+ done
+
+ cd ../
+ if [ "$success" = false ]; then
+ echo "Error: failed to clone policy-models repository after $retry_count attempts"
+ exit 1
+ fi
+
+ sed -e 's!Measurement_vGMUX!ADifferentValue!' \
+ tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.json \
+ >tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.v1_2.json
+
+ sed -e 's!"version": "1.0.0"!"version": "2.0.0"!' \
+ -e 's!"policy-version": 1!"policy-version": 2!' \
+ tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.json \
+ >tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.v2.json
+
+}
+
+function copy_csar_file() {
+ zip -F ${DISTRIBUTION_CSAR}/sample_csar_with_apex_policy.csar \
+ --out ${DISTRIBUTION_CSAR}/csar_temp.csar -q
+ sudo rm -rf "${DIST_TEMP_FOLDER}"
+ sudo mkdir "${DIST_TEMP_FOLDER}"
+ sudo cp ${DISTRIBUTION_CSAR}/csar_temp.csar ${DISTRIBUTION_CSAR}/temp.csar
+ sudo mv ${DISTRIBUTION_CSAR}/temp.csar ${DIST_TEMP_FOLDER}/sample_csar_with_apex_policy.csar
+}
+
+function build_robot_image() {
+ echo "Build docker image for robot framework"
+ cd ${WORKSPACE}/csit/resources || exit
+ clone_models
+ if [ "${PROJECT}" == "distribution" ] || [ "${PROJECT}" == "policy-distribution" ]; then
+ copy_csar_file
+ fi
+ echo "Build robot framework docker image"
+ sudo apt install gnupg2 pass -y
+ export DOCKERPW=docker
+ echo "$DOCKERPW" | docker login -u docker --password-stdin nexus3.onap.org:10001
+ docker build . --file Dockerfile \
+ --build-arg CSIT_SCRIPT="$CSIT_SCRIPT" \
+ --build-arg ROBOT_FILE="$ROBOT_FILE" \
+ --tag "${ROBOT_DOCKER_IMAGE}" --no-cache
+ echo "---------------------------------------------"
+}
+
+function push_acelement_chart() {
+ echo "Pushing acelement chart to the chartmuseum repo..."
+ helm repo add policy-chartmuseum http://localhost:30208
+ cd tests || exit
+ local retry_count=3
+ local success=false
+ for ((i = 1; i <= retry_count; i++)); do
+ git clone "https://gerrit.onap.org/r/policy/clamp" && success=true && break
+ echo "Retrying git clone ($i/$retry_count)..."
+ sleep 5
+ done
+
+ ACELEMENT_CHART=${WORKSPACE}/csit/resources/tests/clamp/examples/src/main/resources/clamp/acm/acelement-helm/acelement
+ helm cm-push $ACELEMENT_CHART policy-chartmuseum
+ helm repo update
+ rm -rf ${WORKSPACE}/csit/resources/tests/clamp/
+ echo "-------------------------------------------"
+}
+
+function print_robot_log() {
+ count_pods=0
+ while [[ ${count_pods} -eq 0 ]]; do
+ echo "Waiting for pods to come up..."
+ sleep 5
+ count_pods=$(kubectl get pods --output name | wc -l)
+ done
+ robotpod=$(kubectl get po | grep policy-csit)
+ podName=$(echo "$robotpod" | awk '{print $1}')
+ echo "The robot tests will begin once the policy components {${READINESS_CONTAINERS[*]}} are up and running..."
+ kubectl wait --for=jsonpath='{.status.phase}'=Running --timeout=18m pod/"$podName"
+ echo "Policy deployment status:"
+ kubectl get po
+ kubectl get all -A
+ echo "Robot Test logs:"
+ kubectl logs -f "$podName"
+}
+
+function start_csit() {
+ build_robot_image
+ if [ "${?}" -eq 0 ]; then
+ echo "Importing robot image into microk8s registry"
+ docker save -o policy-csit-robot.tar ${ROBOT_DOCKER_IMAGE}:latest
+ sudo microk8s ctr image import policy-csit-robot.tar
+ rm -rf ${WORKSPACE}/csit/resources/policy-csit-robot.tar
+ rm -rf ${WORKSPACE}/csit/resources/tests/models/
+ echo "---------------------------------------------"
+ if [ "$PROJECT" == "clamp" ] || [ "$PROJECT" == "policy-clamp" ]; then
+ POD_READY_STATUS="0/1"
+ while [[ ${POD_READY_STATUS} != "1/1" ]]; do
+ echo "Waiting for chartmuseum pod to come up..."
+ sleep 5
+ POD_READY_STATUS=$(kubectl get pods | grep -e "policy-chartmuseum" | awk '{print $2}')
+ done
+ push_acelement_chart
+ fi
+ echo "Installing Robot framework pod for running CSIT"
+ cd ${WORKSPACE}/helm || exit
+ mkdir -p ${ROBOT_LOG_DIR}
+ helm install csit-robot robot --set robot="$ROBOT_FILE" --set "readiness={$(echo ${READINESS_CONTAINERS} | sed 's/[{}]//g' | sed 's/,$//')}" --set robotLogDir=$ROBOT_LOG_DIR
+ print_robot_log
+ fi
+}
+
+if [ "$PROJECT" ]; then
+ set_project_config "$PROJECT"
+ export ROBOT_LOG_DIR=${WORKSPACE}/csit/archives/${PROJECT}
+ echo "CSIT will be invoked from $ROBOT_FILE"
+ echo "Readiness containers: ${READINESS_CONTAINERS[*]}"
+ echo "-------------------------------------------"
+ start_csit
+else
+ echo "No project supplied for running CSIT"
+fi
diff --git a/csit/resources/scripts/run-test.sh b/csit/resources/scripts/run-test.sh
index 1e756f6d..bc4dcc62 100755
--- a/csit/resources/scripts/run-test.sh
+++ b/csit/resources/scripts/run-test.sh
@@ -2,6 +2,7 @@
#
# ============LICENSE_START====================================================
# Copyright (C) 2023-2024 Nordix Foundation.
+# Modifications Copyright 2024 Deutsche Telekom
# =============================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -32,6 +33,7 @@ APEX_IP=policy-apex-pdp:${DEFAULT_PORT}
APEX_EVENTS_IP=policy-apex-pdp:23324
POLICY_PDPX_IP=policy-xacml-pdp:${DEFAULT_PORT}
+POLICY_OPA_IP=policy-opa-pdp:8282
POLICY_DROOLS_IP=policy-drools-pdp:9696
DROOLS_IP_1=policy-drools-apps:${DEFAULT_PORT}
@@ -59,6 +61,7 @@ ROBOT_VARIABLES="-v DATA:${DATA}
-v KAFKA_IP:${KAFKA_IP}
-v PROMETHEUS_IP:${PROMETHEUS_IP}
-v POLICY_PDPX_IP:${POLICY_PDPX_IP}
+-v POLICY_OPA_IP:${POLICY_OPA_IP}
-v POLICY_DROOLS_IP:${POLICY_DROOLS_IP}
-v DROOLS_IP:${DROOLS_IP_1}
-v DROOLS_IP_2:${DROOLS_IP_2}
diff --git a/csit/resources/scripts/wait_for_rest.sh b/csit/resources/scripts/wait_for_rest.sh
index 9732bc54..b51a7fe0 100755
--- a/csit/resources/scripts/wait_for_rest.sh
+++ b/csit/resources/scripts/wait_for_rest.sh
@@ -1,6 +1,6 @@
#!/bin/sh
# ============LICENSE_START====================================================
-# Copyright (C) 2023-2024 Nordix Foundation.
+# Copyright (C) 2023-2025 Nordix Foundation.
# =============================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -49,7 +49,7 @@ do
export port="$2"
shift
shift
- echo "Waiting for REST to come up on $host port $port..."
+ echo "Checking if REST port $port is open on $host ..."
while [ "$tmout" -gt 0 ]
do
if command -v docker > /dev/null 2>&1
@@ -67,7 +67,7 @@ do
fi
done
if [ $rc -ne 0 ]; then
- echo "$host port $port REST cannot be detected"
+ echo "REST port $port cannot be detected on host $host"
exit $rc
fi
done
diff --git a/csit/resources/tests/apex-pdp-common.robot b/csit/resources/tests/apex-pdp-common.robot
index 81924604..d88d16f8 100644
--- a/csit/resources/tests/apex-pdp-common.robot
+++ b/csit/resources/tests/apex-pdp-common.robot
@@ -1,26 +1,26 @@
*** Settings ***
-Library Collections
-Library RequestsLibrary
-Library OperatingSystem
-Library json
-Library Process
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library Process
Resource common-library.robot
*** Keywords ***
DeployPolicy
- [Documentation] Deploy the policy in apex-pdp engine
- ${postjson}= Get File ${CURDIR}/data/policy_deploy.json
- ${postjson}= evaluate json.loads('''${postjson}''') json
- set to dictionary ${postjson['groups'][0]['deploymentSubgroups'][0]['policies'][0]} name=${policyName}
- ${postjson}= evaluate json.dumps(${postjson}) json
- ${policyadmin}= PolicyAdminAuth
- PerformPostRequest ${POLICY_PAP_IP} /policy/pap/v1/pdps/deployments/batch 202 ${postjson} null ${policyadmin}
+ [Documentation] Deploy the policy in apex-pdp engine
+ ${postjson}= Get File ${CURDIR}/data/policy_deploy.json
+ ${postjson}= evaluate json.loads('''${postjson}''') json
+ set to dictionary ${postjson['groups'][0]['deploymentSubgroups'][0]['policies'][0]} name=${policyName}
+ ${postjson}= evaluate json.dumps(${postjson}) json
+ ${policyadmin}= PolicyAdminAuth
+ PerformPostRequest ${POLICY_PAP_IP} /policy/pap/v1/pdps/deployments/batch 202 ${postjson} null ${policyadmin}
RunEventOnApexEngine
[Documentation] Send event to verify policy execution
Create Session apexSession http://${APEX_EVENTS_IP} max_retries=1
- ${data}= Get Binary File ${CURDIR}/data/event.json
+ ${data}= Get Binary File ${CURDIR}/data/event.json
&{headers}= Create Dictionary Content-Type=application/json Accept=application/json
${resp}= PUT On Session apexSession /apex/FirstConsumer/EventIn data=${data} headers=${headers}
Should Be Equal As Strings ${resp.status_code} 200
@@ -32,8 +32,8 @@ CheckLogMessage
Should Contain ${result} ${expectedMsg}
ValidateEventExecution
- [Arguments] ${eventStartTime} ${eventEndTime} ${eventsNo}
- [Documentation] Check that X amount of events were exeuted per second
+ [Arguments] ${eventStartTime} ${eventEndTime} ${eventsNo}
+ [Documentation] Check that X amount of events were executed per second
${eventTimeTaken}= Subtract Date From Date ${eventEndTime} ${eventStartTime}
${eventResult}= Set Variable ${eventTimeTaken * ${1000}}
${eventsPerSecond}= Set Variable ${${1000} / ${eventResult}}
diff --git a/csit/resources/tests/apex-pdp-test.robot b/csit/resources/tests/apex-pdp-test.robot
index 3989ec46..cf4c713d 100644
--- a/csit/resources/tests/apex-pdp-test.robot
+++ b/csit/resources/tests/apex-pdp-test.robot
@@ -1,96 +1,96 @@
*** Settings ***
-Library Collections
-Library RequestsLibrary
-Library OperatingSystem
-Library json
-Library Process
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library Process
Resource common-library.robot
Resource apex-pdp-common.robot
*** Test Cases ***
Healthcheck
- [Documentation] Runs Apex PDP Health check
- ${hcauth}= PolicyAdminAuth
- ${resp}= PerformGetRequest ${APEX_IP} /policy/apex-pdp/v1/healthcheck 200 null ${hcauth}
- Should Be Equal As Strings ${resp.json()['code']} 200
- Set Suite Variable ${pdpName} ${resp.json()['name']}
+ [Documentation] Runs Apex PDP Health check
+ ${hcauth}= PolicyAdminAuth
+ ${resp}= PerformGetRequest ${APEX_IP} /policy/apex-pdp/v1/healthcheck 200 null ${hcauth}
+ Should Be Equal As Strings ${resp.json()['code']} 200
+ Set Suite Variable ${pdpName} ${resp.json()['name']}
ExecuteApexSampleDomainPolicy
- # [Tags] docker
- Set Test Variable ${policyName} onap.policies.native.apex.Sampledomain
- ${postjson}= Get File ${CURDIR}/data/${policyName}.json
- CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
- DeployPolicy
- Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
- Wait Until Keyword Succeeds 4 min 5 sec RunEventOnApexEngine
+ # [Tags] docker
+ Set Test Variable ${policyName} onap.policies.native.apex.Sampledomain
+ ${postjson}= Get File ${CURDIR}/data/${policyName}.json
+ CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
+ DeployPolicy
+ Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
+ Wait Until Keyword Succeeds 4 min 5 sec RunEventOnApexEngine
ExecuteApexTestPnfPolicy
- Set Test Variable ${policyName} onap.policies.apex.pnf.Test
- ${postjson}= Get File ${CURDIR}/data/${policyName}.json
- CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
- DeployPolicy
- Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
- GetKafkaTopic apex-cl-mgt
- Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy apex-cl-mgt
+ Set Test Variable ${policyName} onap.policies.apex.pnf.Test
+ ${postjson}= Get File ${CURDIR}/data/${policyName}.json
+ CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
+ DeployPolicy
+ Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
+ GetKafkaTopic apex-cl-mgt
+ TriggerAndVerifyTestPnfPolicy apex-cl-mgt
#ExecuteApexTestVnfPolicy
-# Set Test Variable ${policyName} onap.policies.apex.vnf.Test
-# ${postjson}= Get File ${CURDIR}/data/${policyName}.json
-# CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
-# DeployPolicy
-# Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
-# GetTopic apex-cl-mgt
-# Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestVnfPolicy
+# Set Test Variable ${policyName} onap.policies.apex.vnf.Test
+# ${postjson}= Get File ${CURDIR}/data/${policyName}.json
+# CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
+# DeployPolicy
+# Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
+# GetKafkaTopic apex-cl-mgt
+# TriggerAndVerifyTestVnfPolicy apex-cl-mgt
ExecuteApexTestPnfPolicyWithMetadataSet
- Set Test Variable ${policyName} onap.policies.apex.pnf.metadataSet.Test
- ${postjson}= Get File ${CURDIR}/data/${policyName}.json
- CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
- ${postjson}= Get File ${CURDIR}/data/onap.pnf.metadataSet.Test.json
- CreateNodeTemplate /policy/api/v1/nodetemplates 201 ${postjson} 1
- DeployPolicy
- Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
- GetKafkaTopic apex-cl-mgt2
- Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy apex-cl-mgt2
+ Set Test Variable ${policyName} onap.policies.apex.pnf.metadataSet.Test
+ ${postjson}= Get File ${CURDIR}/data/${policyName}.json
+ CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
+ ${postjson}= Get File ${CURDIR}/data/onap.pnf.metadataSet.Test.json
+ CreateNodeTemplate /policy/api/v1/nodetemplates 201 ${postjson} 1
+ DeployPolicy
+ Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
+ GetKafkaTopic apex-cl-mgt2
+ TriggerAndVerifyTestPnfPolicy apex-cl-mgt2
Metrics
- [Documentation] Verify policy-apex-pdp is exporting prometheus metrics
- ${auth}= PolicyAdminAuth
- ${resp}= PerformGetRequest ${APEX_IP} /metrics 200 null ${auth}
- Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="TOTAL",} 3.0
- Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="SUCCESS",} 3.0
- Should Contain ${resp.text} pdpa_policy_executions_total{status="SUCCESS",} 6.0
- Should Contain ${resp.text} pdpa_policy_executions_total{status="TOTAL",} 6.0
- Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="NSOApexEngine-*:0.0.1",}*
- Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="MyApexEngine-*:0.0.1",}*
- Should Match ${resp.text} *pdpa_engine_state{engine_instance_id=*,} 2.0*
- Should Contain ${resp.text} pdpa_engine_event_executions
- Should Contain ${resp.text} pdpa_engine_average_execution_time_seconds
- Should Contain ${resp.text} pdpa_engine_last_execution_time_bucket
- Should Contain ${resp.text} pdpa_engine_last_execution_time_count
- Should Contain ${resp.text} pdpa_engine_last_execution_time_sum
- Should Match ${resp.text} *pdpa_engine_last_start_timestamp_epoch{engine_instance_id="NSOApexEngine-*:0.0.1",}*E12*
- Should Match ${resp.text} *pdpa_engine_last_start_timestamp_epoch{engine_instance_id="MyApexEngine-*:0.0.1",}*E12*
- Should Contain ${resp.text} jvm_threads_current
+ [Documentation] Verify policy-apex-pdp is exporting prometheus metrics
+ ${auth}= PolicyAdminAuth
+ ${resp}= PerformGetRequest ${APEX_IP} /metrics 200 null ${auth}
+ Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="TOTAL",} 3.0
+ Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="SUCCESS",} 3.0
+ Should Contain ${resp.text} pdpa_policy_executions_total{status="SUCCESS",} 3.0
+ Should Contain ${resp.text} pdpa_policy_executions_total{status="TOTAL",} 3.0
+ Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="NSOApexEngine-*:0.0.1",}*
+ Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="MyApexEngine-*:0.0.1",}*
+ Should Match ${resp.text} *pdpa_engine_state{engine_instance_id=*,} 2.0*
+ Should Contain ${resp.text} pdpa_engine_event_executions
+ Should Contain ${resp.text} pdpa_engine_average_execution_time_seconds
+ Should Contain ${resp.text} pdpa_engine_last_execution_time_bucket
+ Should Contain ${resp.text} pdpa_engine_last_execution_time_count
+ Should Contain ${resp.text} pdpa_engine_last_execution_time_sum
+ Should Match ${resp.text} *pdpa_engine_last_start_timestamp_epoch{engine_instance_id="NSOApexEngine-*:0.0.1",}*E12*
+ Should Match ${resp.text} *pdpa_engine_last_start_timestamp_epoch{engine_instance_id="MyApexEngine-*:0.0.1",}*E12*
+ Should Contain ${resp.text} jvm_threads_current
*** Keywords ***
TriggerAndVerifyTestPnfPolicy
[Documentation] Send TestPnf policy trigger event to Kafka and read notifications to verify policy execution
[Arguments] ${topic}
- ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json
+ ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json
${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_cl_output ${data} ${KAFKA_IP}
- Run Keyword CheckLogMessage ${topic} ACTIVE VES event has been received. Going to fetch details from AAI.
- Run Keyword CheckLogMessage ${topic} SUCCESS Received response from AAI successfully. Hostname in AAI matches with the one in Ves event. Going to make the update-config request to CDS.
- Run Keyword CheckLogMessage ${topic} FINAL_SUCCESS Successfully processed the VES event. Hostname is updated.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} ACTIVE VES event has been received. Going to fetch details from AAI.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} SUCCESS Received response from AAI successfully. Hostname in AAI matches with the one in Ves event. Going to make the update-config request to CDS.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} FINAL_SUCCESS Successfully processed the VES event. Hostname is updated.
TriggerAndVerifyTestVnfPolicy
[Documentation] Send TestVnf policy trigger event to Kafka and read notifications to verify policy execution
[Arguments] ${topic}
- ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json
+ ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json
${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_policy_example_output ${data} ${KAFKA_IP}
- Run Keyword CheckLogMessage ${topic} ACTIVE VES event has been received. Going to fetch VNF details from AAI.
- Run Keyword CheckLogMessage ${topic} SUCCESS VNF details are received from AAI successfully. Sending ConfigModify request to CDS.
- Run Keyword CheckLogMessage ${topic} SUCCESS ConfigModify request is successful. Sending restart request to CDS.
- Run Keyword CheckLogMessage ${topic} FINAL_SUCCESS Successfully processed the VES Event. Restart is complete.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} ACTIVE VES event has been received. Going to fetch VNF details from AAI.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} SUCCESS VNF details are received from AAI successfully. Sending ConfigModify request to CDS.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} SUCCESS ConfigModify request is successful. Sending restart request to CDS.
+ Wait Until Keyword Succeeds 4 x 10 sec CheckLogMessage ${topic} FINAL_SUCCESS Successfully processed the VES Event. Restart is complete.
diff --git a/csit/resources/tests/apex-slas-3.robot b/csit/resources/tests/apex-slas-3.robot
index 1ceb9b77..c8fc2582 100644
--- a/csit/resources/tests/apex-slas-3.robot
+++ b/csit/resources/tests/apex-slas-3.robot
@@ -17,7 +17,7 @@ Healthcheck
Set Suite Variable ${pdpName} ${resp.json()['name']}
ValidatePolicyExecutionAndEventRateLowComplexity
- [Documentation] Validate that a moderate complexity policity can be executed in less than 100ms and minimum 30 events triggered per second
+ [Documentation] Validate that a moderate complexity policy can be executed in less than 100ms and minimum 30 events triggered per second
Set Test Variable ${policyName} onap.policies.apex.pnf.Test
${postjson}= Get File ${CURDIR}/data/${policyName}.json
CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
@@ -31,7 +31,7 @@ ValidatePolicyExecutionAndEventRateLowComplexity
ValidateEventExecution ${eventStartTime} ${eventEndTime} 30
ValidatePolicyExecutionAndEventRateHighComplexity
- [Documentation] Validate that a high complexity policity can be executed in less than 5000ms and minimum 0.6 events triggered per second
+ [Documentation] Validate that a high complexity policy can be executed in less than 5000ms and minimum 0.6 events triggered per second
Set Test Variable ${policyName} onap.policies.apex.pnf.metadataSet.Test
${postjson}= Get File ${CURDIR}/data/${policyName}.json
CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
@@ -47,7 +47,7 @@ ValidatePolicyExecutionAndEventRateHighComplexity
ValidateEventExecution ${eventStartTime} ${eventEndTime} 0.6
ValidatePolicyExecutionAndEventRateModerateComplexity
- [Documentation] Validate that a low complexity policity can be executed in less than 1000ms and minimum 3 events triggered per second
+ [Documentation] Validate that a low complexity policy can be executed in less than 1000ms and minimum 3 events triggered per second
Set Test Variable ${policyName} onap.policies.native.apex.Sampledomain
${postjson}= Get File ${CURDIR}/data/${policyName}.json
CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
diff --git a/csit/resources/tests/apex-slas.robot b/csit/resources/tests/apex-slas.robot
index 760bc348..e27f0da2 100644
--- a/csit/resources/tests/apex-slas.robot
+++ b/csit/resources/tests/apex-slas.robot
@@ -10,11 +10,11 @@ Resource apex-pdp-common.robot
*** Test Cases ***
Healthcheck
- [Documentation] Runs Apex PDP Health check
- ${hcauth}= PolicyAdminAuth
- ${resp}= PerformGetRequest ${APEX_IP} /policy/apex-pdp/v1/healthcheck 200 null ${hcauth}
- Should Be Equal As Strings ${resp.json()['code']} 200
- Set Suite Variable ${pdpName} ${resp.json()['name']}
+ [Documentation] Runs Apex PDP Health check
+ ${hcauth}= PolicyAdminAuth
+ ${resp}= PerformGetRequest ${APEX_IP} /policy/apex-pdp/v1/healthcheck 200 null ${hcauth}
+ Should Be Equal As Strings ${resp.json()['code']} 200
+ Set Suite Variable ${pdpName} ${resp.json()['name']}
ValidatePolicyExecutionAndEventRateLowComplexity
[Documentation] Validate that a moderate complexity policy can be executed in less than 100ms and minimum 10 events triggered per second
@@ -23,8 +23,8 @@ ValidatePolicyExecutionAndEventRateLowComplexity
CreatePolicySuccessfully /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies ${postjson} ${policyName} 1.0.0
DeployPolicy
Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
- GetKafkaTopic apex-cl-mgt
- ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json
+ GetKafkaTopic apex-cl-mgt
+ ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json
${eventStartTime}= Get Current Date
${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_cl_output ${data} ${KAFKA_IP}
${eventEndTime}= Get Current Date
@@ -50,8 +50,8 @@ ValidatePolicyExecutionAndEventRateHighComplexity
CreateNodeTemplate /policy/api/v1/nodetemplates 201 ${postjson} 1
DeployPolicy
Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex
- GetKafkaTopic apex-cl-mgt2
- ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json
+ GetKafkaTopic apex-cl-mgt2
+ ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json
${eventStartTime}= Get Current Date
${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_policy_example_output ${data} ${KAFKA_IP}
${eventEndTime}= Get Current Date
diff --git a/csit/resources/tests/api-test.robot b/csit/resources/tests/api-test.robot
index 29c2fba4..e1b8fd47 100644
--- a/csit/resources/tests/api-test.robot
+++ b/csit/resources/tests/api-test.robot
@@ -16,7 +16,7 @@ Healthcheck
RetrievePolicyTypes
[Documentation] Retrieve all policy types
- FetchPolicyTypes /policy/api/v1/policytypes 37
+ FetchPolicyTypes /policy/api/v1/policytypes 38
CreateTCAPolicyTypeV1
[Documentation] Create an existing policy type with modification and keeping the same version should result in error.
diff --git a/csit/resources/tests/data/AcDocker.json b/csit/resources/tests/data/AcDocker.json
index 57148256..25f0c712 100644
--- a/csit/resources/tests/data/AcDocker.json
+++ b/csit/resources/tests/data/AcDocker.json
@@ -12,14 +12,1083 @@
},
"description": "Policy Automation Composition Element for the Demo",
"properties": {
- "policy_type_id": {
- "name": "onap.policies.native.Apex",
- "version": "1.0.0"
+ "tosca_definitions_version": "tosca_simple_yaml_1_3",
+ "name": "NULL",
+ "version": "0.0.0",
+ "data_types": {
+ "onap.datatypes.native.apex.EngineService": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Specifies the engine name",
+ "required": false,
+ "default": "ApexEngineService"
+ },
+ "version": {
+ "type": "string",
+ "description": "Specifies the engine version in double dotted format",
+ "required": false,
+ "default": "1.0.0"
+ },
+ "id": {
+ "type": "integer",
+ "description": "Specifies the engine id",
+ "required": true
+ },
+ "instance_count": {
+ "type": "integer",
+ "description": "Specifies the number of engine threads that should be run",
+ "required": true
+ },
+ "deployment_port": {
+ "type": "integer",
+ "description": "Specifies the port to connect to for engine administration",
+ "required": false,
+ "default": 1
+ },
+ "policy_model_file_name": {
+ "type": "string",
+ "description": "The name of the file from which to read the APEX policy model",
+ "required": false
+ },
+ "policy_type_impl": {
+ "type": "string",
+ "description": "The policy type implementation from which to read the APEX policy model",
+ "required": false
+ },
+ "periodic_event_period": {
+ "type": "string",
+ "description": "The time interval in milliseconds for the periodic scanning event, 0 means don't scan",
+ "required": false
+ },
+ "engine": {
+ "type": "onap.datatypes.native.apex.engineservice.Engine",
+ "description": "The parameters for all engines in the APEX engine service",
+ "required": true
+ }
+ }
+ },
+ "onap.datatypes.native.apex.EventHandler": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Specifies the event handler name, if not specified this is set to the key name",
+ "required": false
+ },
+ "carrier_technology": {
+ "type": "onap.datatypes.native.apex.CarrierTechnology",
+ "description": "Specifies the carrier technology of the event handler (such as REST/Web Socket/Kafka)",
+ "required": true
+ },
+ "event_protocol": {
+ "type": "onap.datatypes.native.apex.EventProtocol",
+ "description": "Specifies the event protocol of events for the event handler (such as Yaml/JSON/XML/POJO)",
+ "required": true
+ },
+ "event_name": {
+ "type": "string",
+ "description": "Specifies the event name for events on this event handler, if not specified, the event name is read from or written to the event being received or sent",
+ "required": false
+ },
+ "event_name_filter": {
+ "type": "string",
+ "description": "Specifies a filter as a regular expression, events that do not match the filter are dropped, the default is to let all events through",
+ "required": false
+ },
+ "synchronous_mode": {
+ "type": "boolean",
+ "description": "Specifies the event handler is syncronous (receive event and send response)",
+ "required": false,
+ "default": false
+ },
+ "synchronous_peer": {
+ "type": "string",
+ "description": "The peer event handler (output for input or input for output) of this event handler in synchronous mode, this parameter is mandatory if the event handler is in synchronous mode",
+ "required": false
+ },
+ "synchronous_timeout": {
+ "type": "integer",
+ "description": "The timeout in milliseconds for responses to be issued by APEX torequests, this parameter is mandatory if the event handler is in synchronous mode",
+ "required": false
+ },
+ "requestor_mode": {
+ "type": "boolean",
+ "description": "Specifies the event handler is in requestor mode (send event and wait for response mode)",
+ "required": false,
+ "default": false
+ },
+ "requestor_peer": {
+ "type": "string",
+ "description": "The peer event handler (output for input or input for output) of this event handler in requestor mode, this parameter is mandatory if the event handler is in requestor mode",
+ "required": false
+ },
+ "requestor_timeout": {
+ "type": "integer",
+ "description": "The timeout in milliseconds for wait for responses to requests, this parameter is mandatory if the event handler is in requestor mode",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.CarrierTechnology": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "label": {
+ "type": "string",
+ "description": "The label (name) of the carrier technology (such as REST, Kafka, WebSocket)",
+ "required": true
+ },
+ "plugin_parameter_class_name": {
+ "type": "string",
+ "description": "The class name of the class that overrides default handling of event input or output for this carrier technology, defaults to the supplied input or output class",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.EventProtocol": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "label": {
+ "type": "string",
+ "description": "The label (name) of the event protocol (such as Yaml, JSON, XML, or POJO)",
+ "required": true
+ },
+ "event_protocol_plugin_class": {
+ "type": "string",
+ "description": "The class name of the class that overrides default handling of the event protocol for this carrier technology, defaults to the supplied event protocol class",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.Environment": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the environment variable",
+ "required": true
+ },
+ "value": {
+ "type": "string",
+ "description": "The value of the environment variable",
+ "required": true
+ }
+ }
+ },
+ "onap.datatypes.native.apex.engineservice.Engine": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "context": {
+ "type": "onap.datatypes.native.apex.engineservice.engine.Context",
+ "description": "The properties for handling context in APEX engines, defaults to using Java maps for context",
+ "required": false
+ },
+ "executors": {
+ "type": "map",
+ "description": "The plugins for policy executors used in engines such as javascript, MVEL, Jython",
+ "required": true,
+ "entry_schema": {
+ "description": "The plugin class path for this policy executor",
+ "type": "string"
+ }
+ }
+ }
+ },
+ "onap.datatypes.native.apex.engineservice.engine.Context": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "distributor": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for distributing context between APEX PDPs at runtime",
+ "required": false
+ },
+ "schemas": {
+ "type": "map",
+ "description": "The plugins for context schemas available in APEX PDPs such as Java and Avro",
+ "required": false,
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.Plugin"
+ }
+ },
+ "locking": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for locking context in and between APEX PDPs at runtime",
+ "required": false
+ },
+ "persistence": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for persisting context for APEX PDPs at runtime",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.Plugin": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the executor such as Javascript, Jython or MVEL",
+ "required": true
+ },
+ "plugin_class_name": {
+ "type": "string",
+ "description": "The class path of the plugin class for this executor"
+ }
+ }
+ }
},
- "policy_id": {
- "get_input": "acm_element_policy"
+ "policy_types": {
+ "onap.policies.Native": {
+ "derived_from": "tosca.policies.Root",
+ "description": "a base policy type for all native PDP policies",
+ "version": "1.0.0",
+ "name": "onap.policies.Native"
+ },
+ "onap.policies.native.Apex": {
+ "derived_from": "onap.policies.Native",
+ "description": "a policy type for native apex policies",
+ "version": "1.0.0",
+ "name": "onap.policies.native.Apex",
+ "properties": {
+ "engine_service": {
+ "type": "onap.datatypes.native.apex.EngineService",
+ "description": "APEX Engine Service Parameters"
+ },
+ "inputs": {
+ "type": "map",
+ "description": "Inputs for handling events coming into the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.EventHandler"
+ }
+ },
+ "outputs": {
+ "type": "map",
+ "description": "Outputs for handling events going out of the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.EventHandler"
+ }
+ },
+ "environment": {
+ "type": "list",
+ "description": "Envioronmental parameters for the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.Environment"
+ }
+ }
+ }
+ }
+ },
+ "topology_template": {
+ "policies": [
+ {
+ "onap.policies.native.apex.ac.element": {
+ "type": "onap.policies.native.Apex",
+ "type_version": "1.0.0",
+ "properties": {
+ "engineServiceParameters": {
+ "name": "MyApexEngine",
+ "version": "0.0.1",
+ "id": 45,
+ "instanceCount": 2,
+ "deploymentPort": 12561,
+ "engineParameters": {
+ "executorParameters": {
+ "JAVASCRIPT": {
+ "parameterClassName": "org.onap.policy.apex.plugins.executor.javascript.JavascriptExecutorParameters"
+ }
+ },
+ "contextParameters": {
+ "parameterClassName": "org.onap.policy.apex.context.parameters.ContextParameters",
+ "schemaParameters": {
+ "Json": {
+ "parameterClassName": "org.onap.policy.apex.plugins.context.schema.json.JsonSchemaHelperParameters"
+ }
+ }
+ }
+ },
+ "policy_type_impl": {
+ "policies": {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "policyMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "policyKey": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "template": "Freestyle",
+ "state": {
+ "entry": [
+ {
+ "key": "DecideForwardingState",
+ "value": {
+ "stateKey": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "DecideForwardingState"
+ },
+ "trigger": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "stateOutputs": {
+ "entry": [
+ {
+ "key": "CreateForwardPayload",
+ "value": {
+ "key": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "CreateForwardPayload"
+ },
+ "outgoingEvent": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "outgoingEventReference": [
+ {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ }
+ ],
+ "nextState": {
+ "parentKeyName": "NULL",
+ "parentKeyVersion": "0.0.0",
+ "parentLocalName": "NULL",
+ "localName": "NULL"
+ }
+ }
+ }
+ ]
+ },
+ "contextAlbumReference": [],
+ "taskSelectionLogic": {
+ "key": {
+ "parentKeyName": "NULL",
+ "parentKeyVersion": "0.0.0",
+ "parentLocalName": "NULL",
+ "localName": "NULL"
+ },
+ "logicFlavour": "UNDEFINED",
+ "logic": ""
+ },
+ "stateFinalizerLogicMap": {
+ "entry": []
+ },
+ "defaultTask": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "taskReferences": {
+ "entry": [
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "ReceiveEventPolicy"
+ },
+ "outputType": "DIRECT",
+ "output": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "CreateForwardPayload"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "firstState": "DecideForwardingState"
+ }
+ }
+ ]
+ }
+ },
+ "tasks": {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "taskMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "inputEvent": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "Kafka",
+ "target": "APEX",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "AcElementEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": "ENTRY"
+ },
+ "outputEvents": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "Kafka",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "KafkaResponseStatusEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseStatusEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ }
+ ]
+ },
+ "taskParameters": {
+ "entry": []
+ },
+ "contextAlbumReference": [
+ {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ }
+ ],
+ "taskLogic": {
+ "key": {
+ "parentKeyName": "ForwardPayloadTask",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "TaskLogic"
+ },
+ "logicFlavour": "JAVASCRIPT",
+ "logic": "/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2022 Nordix. All rights reserved.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the 'License');\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an 'AS IS' BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(executor.subject.id);\nexecutor.logger.info(executor.inFields);\n\nvar msgResponse = executor.inFields.get('KafkaResponseEvent');\nexecutor.logger.info('Task in progress with mesages: ' + msgResponse);\n\nvar elementId = msgResponse.get('elementId').get('name');\n\nif (msgResponse.get('messageType') == 'STATUS' &&\n (elementId == 'onap.policy.clamp.ac.startertobridge'\n || elementId == 'onap.policy.clamp.ac.bridgetosink')) {\n\n var receiverId = '';\n if (elementId == 'onap.policy.clamp.ac.startertobridge') {\n receiverId = 'onap.policy.clamp.ac.bridge';\n } else {\n receiverId = 'onap.policy.clamp.ac.sink';\n }\n\n var elementIdResponse = new java.util.HashMap();\n elementIdResponse.put('name', receiverId);\n elementIdResponse.put('version', msgResponse.get('elementId').get('version'));\n\n var kafkaResponse = new java.util.HashMap();\n kafkaResponse.put('elementId', elementIdResponse);\n\n var message = msgResponse.get('message') + ' trace added from policy';\n kafkaResponse.put('message', message);\n kafkaResponse.put('messageType', 'STATUS');\n kafkaResponse.put('messageId', msgResponse.get('messageId'));\n kafkaResponse.put('timestamp', msgResponse.get('timestamp'));\n\n executor.logger.info('Sending forwarding Event to Ac element: ' + kafkaResponse);\n\n executor.outFields.put('KafkaResponseStatusEvent', kafkaResponse);\n}\n\ntrue;"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "events": {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "eventMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "Kafka",
+ "target": "APEX",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "AcElementEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": "ENTRY"
+ }
+ },
+ {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "Kafka",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "KafkaResponseStatusEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseStatusEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ },
+ {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "file",
+ "parameter": {
+ "entry": [
+ {
+ "key": "final_status",
+ "value": {
+ "key": {
+ "parentKeyName": "LogEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "final_status"
+ },
+ "fieldSchemaKey": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ },
+ {
+ "key": "message",
+ "value": {
+ "key": {
+ "parentKeyName": "LogEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "message"
+ },
+ "fieldSchemaKey": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ }
+ ]
+ }
+ },
+ "albums": {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "albums": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "scope": "policy",
+ "isWritable": true,
+ "itemSchema": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "schemas": {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "schemas": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Json",
+ "schemaDefinition": "{\n \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n \"type\": \"object\",\n \"properties\": {\n \"elementId\": {\n \"type\": \"object\",\n \"properties\": {\n \"name\": {\n \"type\": \"string\"\n },\n \"version\": {\n \"type\": \"string\"\n }\n },\n \"required\": [\n \"name\",\n \"version\"\n ]\n },\n \"message\": {\n \"type\": \"string\"\n },\n \"messageType\": {\n \"type\": \"string\"\n }\n },\n \"required\": [\n \"elementId\",\n \"message\",\n \"messageType\"\n ]\n}"
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.lang.Integer"
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.lang.String"
+ }
+ },
+ {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.util.UUID"
+ }
+ }
+ ]
+ }
+ },
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "keyInformation": {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "keyInfoMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "UUID": "7cddfab8-6d3f-3f7f-8ac3-e2eb5979c900",
+ "description": "Generated description for concept referred to by key \"ACElementAlbum:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "UUID": "dab78794-b666-3929-a75b-70d634b04fe5",
+ "description": "Generated description for concept referred to by key \"ACEventType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "UUID": "da478611-7d77-3c46-b4be-be968769ba4e",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "UUID": "fa8dc15e-8c8d-3de3-a0f8-585b76511175",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Albums:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "UUID": "8508cd65-8dd2-342d-a5c6-1570810dbe2b",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Events:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "UUID": "09e6927d-c5ac-3779-919f-9333994eed22",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_KeyInfo:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "UUID": "cade3c9a-1600-3642-a6f4-315612187f46",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Policies:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "UUID": "5bb4a8e9-35fa-37db-9a49-48ef036a7ba9",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Schemas:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "UUID": "2527eeec-0d1f-3094-ad3f-212622b12836",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Tasks:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "32c013e2-2740-3986-a626-cbdf665b63e9",
+ "description": "Generated description for concept referred to by key \"AcElementEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "2715cb6c-2778-3461-8b69-871e79f95935",
+ "description": "Generated description for concept referred to by key \"KafkaResponseStatusEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "UUID": "51defa03-1ecf-3314-bf34-2a652bce57fa",
+ "description": "Generated description for concept referred to by key \"ForwardPayloadTask:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "c540f048-96af-35e3-a36e-e9c29377cba7",
+ "description": "Generated description for concept referred to by key \"LogEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "UUID": "568b7345-9de1-36d3-b6a3-9b857e6809a1",
+ "description": "Generated description for concept referred to by key \"ReceiveEventPolicy:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "UUID": "153791fd-ae0a-36a7-88a5-309a7936415d",
+ "description": "Generated description for concept referred to by key \"SimpleIntType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "UUID": "8a4957cf-9493-3a76-8c22-a208e23259af",
+ "description": "Generated description for concept referred to by key \"SimpleStringType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "UUID": "6a8cc68e-dfc8-3403-9c6d-071c886b319c",
+ "description": "Generated description for concept referred to by key \"UUIDType:0.0.1\""
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ "eventInputParameters": {
+ "KafkaConsumer": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "KAFKA",
+ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters",
+ "parameters": {
+ "bootstrapServers": "kafka:9092",
+ "groupId": "apex-group-id",
+ "enableAutoCommit": true,
+ "autoCommitTime": 1000,
+ "sessionTimeout": 30000,
+ "consumerPollTime": 100,
+ "consumerTopicList": [
+ "ac_element_msg"
+ ],
+ "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer",
+ "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON",
+ "parameters": {
+ "pojoField": "KafkaResponseEvent"
+ }
+ },
+ "eventName": "AcElementEvent",
+ "eventNameFilter": "AcElementEvent"
+ }
+ },
+ "eventOutputParameters": {
+ "logOutputter": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "FILE",
+ "parameters": {
+ "fileName": "outputevents.log"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON"
+ }
+ },
+ "KafkaReplyProducer": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "KAFKA",
+ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters",
+ "parameters": {
+ "bootstrapServers": "kafka:9092",
+ "acks": "all",
+ "retries": 0,
+ "batchSize": 16384,
+ "lingerTime": 1,
+ "bufferMemory": 33554432,
+ "producerTopic": "policy_update_msg",
+ "keySerializer": "org.apache.kafka.common.serialization.StringSerializer",
+ "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON",
+ "parameters": {
+ "pojoField": "KafkaResponseStatusEvent"
+ }
+ },
+ "eventNameFilter": "(LogEvent|KafkaResponseStatusEvent)"
+ }
+ }
+ },
+ "name": "onap.policies.native.apex.ac.element",
+ "version": "1.0.0"
+ }
+ }
+ ]
}
}
}
}
-} \ No newline at end of file
+}
diff --git a/csit/resources/tests/data/AcK8s.json b/csit/resources/tests/data/AcK8s.json
index 3a7e3a33..8585faf5 100644
--- a/csit/resources/tests/data/AcK8s.json
+++ b/csit/resources/tests/data/AcK8s.json
@@ -12,12 +12,1081 @@
},
"description": "Starter Automation Composition Element for the Demo",
"properties": {
- "policy_type_id": {
- "name": "onap.policies.operational.pm-subscription-handler",
- "version": "1.0.0"
+ "tosca_definitions_version": "tosca_simple_yaml_1_3",
+ "name": "NULL",
+ "version": "0.0.0",
+ "data_types": {
+ "onap.datatypes.native.apex.EngineService": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Specifies the engine name",
+ "required": false,
+ "default": "ApexEngineService"
+ },
+ "version": {
+ "type": "string",
+ "description": "Specifies the engine version in double dotted format",
+ "required": false,
+ "default": "1.0.0"
+ },
+ "id": {
+ "type": "integer",
+ "description": "Specifies the engine id",
+ "required": true
+ },
+ "instance_count": {
+ "type": "integer",
+ "description": "Specifies the number of engine threads that should be run",
+ "required": true
+ },
+ "deployment_port": {
+ "type": "integer",
+ "description": "Specifies the port to connect to for engine administration",
+ "required": false,
+ "default": 1
+ },
+ "policy_model_file_name": {
+ "type": "string",
+ "description": "The name of the file from which to read the APEX policy model",
+ "required": false
+ },
+ "policy_type_impl": {
+ "type": "string",
+ "description": "The policy type implementation from which to read the APEX policy model",
+ "required": false
+ },
+ "periodic_event_period": {
+ "type": "string",
+ "description": "The time interval in milliseconds for the periodic scanning event, 0 means don't scan",
+ "required": false
+ },
+ "engine": {
+ "type": "onap.datatypes.native.apex.engineservice.Engine",
+ "description": "The parameters for all engines in the APEX engine service",
+ "required": true
+ }
+ }
+ },
+ "onap.datatypes.native.apex.EventHandler": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Specifies the event handler name, if not specified this is set to the key name",
+ "required": false
+ },
+ "carrier_technology": {
+ "type": "onap.datatypes.native.apex.CarrierTechnology",
+ "description": "Specifies the carrier technology of the event handler (such as REST/Web Socket/Kafka)",
+ "required": true
+ },
+ "event_protocol": {
+ "type": "onap.datatypes.native.apex.EventProtocol",
+ "description": "Specifies the event protocol of events for the event handler (such as Yaml/JSON/XML/POJO)",
+ "required": true
+ },
+ "event_name": {
+ "type": "string",
+ "description": "Specifies the event name for events on this event handler, if not specified, the event name is read from or written to the event being received or sent",
+ "required": false
+ },
+ "event_name_filter": {
+ "type": "string",
+ "description": "Specifies a filter as a regular expression, events that do not match the filter are dropped, the default is to let all events through",
+ "required": false
+ },
+ "synchronous_mode": {
+ "type": "boolean",
+ "description": "Specifies the event handler is syncronous (receive event and send response)",
+ "required": false,
+ "default": false
+ },
+ "synchronous_peer": {
+ "type": "string",
+ "description": "The peer event handler (output for input or input for output) of this event handler in synchronous mode, this parameter is mandatory if the event handler is in synchronous mode",
+ "required": false
+ },
+ "synchronous_timeout": {
+ "type": "integer",
+ "description": "The timeout in milliseconds for responses to be issued by APEX torequests, this parameter is mandatory if the event handler is in synchronous mode",
+ "required": false
+ },
+ "requestor_mode": {
+ "type": "boolean",
+ "description": "Specifies the event handler is in requestor mode (send event and wait for response mode)",
+ "required": false,
+ "default": false
+ },
+ "requestor_peer": {
+ "type": "string",
+ "description": "The peer event handler (output for input or input for output) of this event handler in requestor mode, this parameter is mandatory if the event handler is in requestor mode",
+ "required": false
+ },
+ "requestor_timeout": {
+ "type": "integer",
+ "description": "The timeout in milliseconds for wait for responses to requests, this parameter is mandatory if the event handler is in requestor mode",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.CarrierTechnology": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "label": {
+ "type": "string",
+ "description": "The label (name) of the carrier technology (such as REST, Kafka, WebSocket)",
+ "required": true
+ },
+ "plugin_parameter_class_name": {
+ "type": "string",
+ "description": "The class name of the class that overrides default handling of event input or output for this carrier technology, defaults to the supplied input or output class",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.EventProtocol": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "label": {
+ "type": "string",
+ "description": "The label (name) of the event protocol (such as Yaml, JSON, XML, or POJO)",
+ "required": true
+ },
+ "event_protocol_plugin_class": {
+ "type": "string",
+ "description": "The class name of the class that overrides default handling of the event protocol for this carrier technology, defaults to the supplied event protocol class",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.Environment": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the environment variable",
+ "required": true
+ },
+ "value": {
+ "type": "string",
+ "description": "The value of the environment variable",
+ "required": true
+ }
+ }
+ },
+ "onap.datatypes.native.apex.engineservice.Engine": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "context": {
+ "type": "onap.datatypes.native.apex.engineservice.engine.Context",
+ "description": "The properties for handling context in APEX engines, defaults to using Java maps for context",
+ "required": false
+ },
+ "executors": {
+ "type": "map",
+ "description": "The plugins for policy executors used in engines such as javascript, MVEL, Jython",
+ "required": true,
+ "entry_schema": {
+ "description": "The plugin class path for this policy executor",
+ "type": "string"
+ }
+ }
+ }
+ },
+ "onap.datatypes.native.apex.engineservice.engine.Context": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "distributor": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for distributing context between APEX PDPs at runtime",
+ "required": false
+ },
+ "schemas": {
+ "type": "map",
+ "description": "The plugins for context schemas available in APEX PDPs such as Java and Avro",
+ "required": false,
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.Plugin"
+ }
+ },
+ "locking": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for locking context in and between APEX PDPs at runtime",
+ "required": false
+ },
+ "persistence": {
+ "type": "onap.datatypes.native.apex.Plugin",
+ "description": "The plugin to be used for persisting context for APEX PDPs at runtime",
+ "required": false
+ }
+ }
+ },
+ "onap.datatypes.native.apex.Plugin": {
+ "derived_from": "tosca.datatypes.Root",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the executor such as Javascript, Jython or MVEL",
+ "required": true
+ },
+ "plugin_class_name": {
+ "type": "string",
+ "description": "The class path of the plugin class for this executor"
+ }
+ }
+ }
},
- "policy_id": {
- "get_input": "acm_element_policy"
+ "policy_types": {
+ "onap.policies.Native": {
+ "derived_from": "tosca.policies.Root",
+ "description": "a base policy type for all native PDP policies",
+ "version": "1.0.0",
+ "name": "onap.policies.Native"
+ },
+ "onap.policies.native.Apex": {
+ "derived_from": "onap.policies.Native",
+ "description": "a policy type for native apex policies",
+ "version": "1.0.0",
+ "name": "onap.policies.native.Apex",
+ "properties": {
+ "engine_service": {
+ "type": "onap.datatypes.native.apex.EngineService",
+ "description": "APEX Engine Service Parameters"
+ },
+ "inputs": {
+ "type": "map",
+ "description": "Inputs for handling events coming into the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.EventHandler"
+ }
+ },
+ "outputs": {
+ "type": "map",
+ "description": "Outputs for handling events going out of the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.EventHandler"
+ }
+ },
+ "environment": {
+ "type": "list",
+ "description": "Envioronmental parameters for the APEX engine",
+ "entry_schema": {
+ "type": "onap.datatypes.native.apex.Environment"
+ }
+ }
+ }
+ }
+ },
+ "topology_template": {
+ "policies": [
+ {
+ "onap.policies.native.apex.ac.element": {
+ "type": "onap.policies.native.Apex",
+ "type_version": "1.0.0",
+ "properties": {
+ "engineServiceParameters": {
+ "name": "MyApexEngine",
+ "version": "0.0.1",
+ "id": 45,
+ "instanceCount": 2,
+ "deploymentPort": 12561,
+ "engineParameters": {
+ "executorParameters": {
+ "JAVASCRIPT": {
+ "parameterClassName": "org.onap.policy.apex.plugins.executor.javascript.JavascriptExecutorParameters"
+ }
+ },
+ "contextParameters": {
+ "parameterClassName": "org.onap.policy.apex.context.parameters.ContextParameters",
+ "schemaParameters": {
+ "Json": {
+ "parameterClassName": "org.onap.policy.apex.plugins.context.schema.json.JsonSchemaHelperParameters"
+ }
+ }
+ }
+ },
+ "policy_type_impl": {
+ "policies": {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "policyMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "policyKey": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "template": "Freestyle",
+ "state": {
+ "entry": [
+ {
+ "key": "DecideForwardingState",
+ "value": {
+ "stateKey": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "DecideForwardingState"
+ },
+ "trigger": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "stateOutputs": {
+ "entry": [
+ {
+ "key": "CreateForwardPayload",
+ "value": {
+ "key": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "CreateForwardPayload"
+ },
+ "outgoingEvent": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "outgoingEventReference": [
+ {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ }
+ ],
+ "nextState": {
+ "parentKeyName": "NULL",
+ "parentKeyVersion": "0.0.0",
+ "parentLocalName": "NULL",
+ "localName": "NULL"
+ }
+ }
+ }
+ ]
+ },
+ "contextAlbumReference": [],
+ "taskSelectionLogic": {
+ "key": {
+ "parentKeyName": "NULL",
+ "parentKeyVersion": "0.0.0",
+ "parentLocalName": "NULL",
+ "localName": "NULL"
+ },
+ "logicFlavour": "UNDEFINED",
+ "logic": ""
+ },
+ "stateFinalizerLogicMap": {
+ "entry": []
+ },
+ "defaultTask": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "taskReferences": {
+ "entry": [
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "ReceiveEventPolicy"
+ },
+ "outputType": "DIRECT",
+ "output": {
+ "parentKeyName": "ReceiveEventPolicy",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "DecideForwardingState",
+ "localName": "CreateForwardPayload"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "firstState": "DecideForwardingState"
+ }
+ }
+ ]
+ }
+ },
+ "tasks": {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "taskMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "inputEvent": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "Kafka",
+ "target": "APEX",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "AcElementEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": "ENTRY"
+ },
+ "outputEvents": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "Kafka",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "KafkaResponseStatusEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseStatusEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ }
+ ]
+ },
+ "taskParameters": {
+ "entry": []
+ },
+ "contextAlbumReference": [
+ {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ }
+ ],
+ "taskLogic": {
+ "key": {
+ "parentKeyName": "ForwardPayloadTask",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "TaskLogic"
+ },
+ "logicFlavour": "JAVASCRIPT",
+ "logic": "/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2022 Nordix. All rights reserved.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the 'License');\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an 'AS IS' BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(executor.subject.id);\nexecutor.logger.info(executor.inFields);\n\nvar msgResponse = executor.inFields.get('KafkaResponseEvent');\nexecutor.logger.info('Task in progress with mesages: ' + msgResponse);\n\nvar elementId = msgResponse.get('elementId').get('name');\n\nif (msgResponse.get('messageType') == 'STATUS' &&\n (elementId == 'onap.policy.clamp.ac.startertobridge'\n || elementId == 'onap.policy.clamp.ac.bridgetosink')) {\n\n var receiverId = '';\n if (elementId == 'onap.policy.clamp.ac.startertobridge') {\n receiverId = 'onap.policy.clamp.ac.bridge';\n } else {\n receiverId = 'onap.policy.clamp.ac.sink';\n }\n\n var elementIdResponse = new java.util.HashMap();\n elementIdResponse.put('name', receiverId);\n elementIdResponse.put('version', msgResponse.get('elementId').get('version'));\n\n var kafkaResponse = new java.util.HashMap();\n kafkaResponse.put('elementId', elementIdResponse);\n\n var message = msgResponse.get('message') + ' trace added from policy';\n kafkaResponse.put('message', message);\n kafkaResponse.put('messageType', 'STATUS');\n kafkaResponse.put('messageId', msgResponse.get('messageId'));\n kafkaResponse.put('timestamp', msgResponse.get('timestamp'));\n\n executor.logger.info('Sending forwarding Event to Ac element: ' + kafkaResponse);\n\n executor.outFields.put('KafkaResponseStatusEvent', kafkaResponse);\n}\n\ntrue;"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "events": {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "eventMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "Kafka",
+ "target": "APEX",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "AcElementEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": "ENTRY"
+ }
+ },
+ {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "Kafka",
+ "parameter": {
+ "entry": [
+ {
+ "key": "KafkaResponseStatusEvent",
+ "value": {
+ "key": {
+ "parentKeyName": "KafkaResponseStatusEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "KafkaResponseStatusEvent"
+ },
+ "fieldSchemaKey": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ },
+ {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "nameSpace": "org.onap.policy.apex.ac.element",
+ "source": "APEX",
+ "target": "file",
+ "parameter": {
+ "entry": [
+ {
+ "key": "final_status",
+ "value": {
+ "key": {
+ "parentKeyName": "LogEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "final_status"
+ },
+ "fieldSchemaKey": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ },
+ {
+ "key": "message",
+ "value": {
+ "key": {
+ "parentKeyName": "LogEvent",
+ "parentKeyVersion": "0.0.1",
+ "parentLocalName": "NULL",
+ "localName": "message"
+ },
+ "fieldSchemaKey": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "optional": false
+ }
+ }
+ ]
+ },
+ "toscaPolicyState": ""
+ }
+ }
+ ]
+ }
+ },
+ "albums": {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "albums": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "scope": "policy",
+ "isWritable": true,
+ "itemSchema": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "schemas": {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "schemas": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Json",
+ "schemaDefinition": "{\n \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n \"type\": \"object\",\n \"properties\": {\n \"elementId\": {\n \"type\": \"object\",\n \"properties\": {\n \"name\": {\n \"type\": \"string\"\n },\n \"version\": {\n \"type\": \"string\"\n }\n },\n \"required\": [\n \"name\",\n \"version\"\n ]\n },\n \"message\": {\n \"type\": \"string\"\n },\n \"messageType\": {\n \"type\": \"string\"\n }\n },\n \"required\": [\n \"elementId\",\n \"message\",\n \"messageType\"\n ]\n}"
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.lang.Integer"
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.lang.String"
+ }
+ },
+ {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "schemaFlavour": "Java",
+ "schemaDefinition": "java.util.UUID"
+ }
+ }
+ ]
+ }
+ },
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "keyInformation": {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "keyInfoMap": {
+ "entry": [
+ {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACElementAlbum",
+ "version": "0.0.1"
+ },
+ "UUID": "7cddfab8-6d3f-3f7f-8ac3-e2eb5979c900",
+ "description": "Generated description for concept referred to by key \"ACElementAlbum:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ACEventType",
+ "version": "0.0.1"
+ },
+ "UUID": "dab78794-b666-3929-a75b-70d634b04fe5",
+ "description": "Generated description for concept referred to by key \"ACEventType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy",
+ "version": "0.0.1"
+ },
+ "UUID": "da478611-7d77-3c46-b4be-be968769ba4e",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Albums",
+ "version": "0.0.1"
+ },
+ "UUID": "fa8dc15e-8c8d-3de3-a0f8-585b76511175",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Albums:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Events",
+ "version": "0.0.1"
+ },
+ "UUID": "8508cd65-8dd2-342d-a5c6-1570810dbe2b",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Events:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_KeyInfo",
+ "version": "0.0.1"
+ },
+ "UUID": "09e6927d-c5ac-3779-919f-9333994eed22",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_KeyInfo:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Policies",
+ "version": "0.0.1"
+ },
+ "UUID": "cade3c9a-1600-3642-a6f4-315612187f46",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Policies:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Schemas",
+ "version": "0.0.1"
+ },
+ "UUID": "5bb4a8e9-35fa-37db-9a49-48ef036a7ba9",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Schemas:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "APEXacElementPolicy_Tasks",
+ "version": "0.0.1"
+ },
+ "UUID": "2527eeec-0d1f-3094-ad3f-212622b12836",
+ "description": "Generated description for concept referred to by key \"APEXacElementPolicy_Tasks:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "AcElementEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "32c013e2-2740-3986-a626-cbdf665b63e9",
+ "description": "Generated description for concept referred to by key \"AcElementEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "KafkaResponseStatusEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "2715cb6c-2778-3461-8b69-871e79f95935",
+ "description": "Generated description for concept referred to by key \"KafkaResponseStatusEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ForwardPayloadTask",
+ "version": "0.0.1"
+ },
+ "UUID": "51defa03-1ecf-3314-bf34-2a652bce57fa",
+ "description": "Generated description for concept referred to by key \"ForwardPayloadTask:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "LogEvent",
+ "version": "0.0.1"
+ },
+ "UUID": "c540f048-96af-35e3-a36e-e9c29377cba7",
+ "description": "Generated description for concept referred to by key \"LogEvent:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "ReceiveEventPolicy",
+ "version": "0.0.1"
+ },
+ "UUID": "568b7345-9de1-36d3-b6a3-9b857e6809a1",
+ "description": "Generated description for concept referred to by key \"ReceiveEventPolicy:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleIntType",
+ "version": "0.0.1"
+ },
+ "UUID": "153791fd-ae0a-36a7-88a5-309a7936415d",
+ "description": "Generated description for concept referred to by key \"SimpleIntType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "SimpleStringType",
+ "version": "0.0.1"
+ },
+ "UUID": "8a4957cf-9493-3a76-8c22-a208e23259af",
+ "description": "Generated description for concept referred to by key \"SimpleStringType:0.0.1\""
+ }
+ },
+ {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "value": {
+ "key": {
+ "name": "UUIDType",
+ "version": "0.0.1"
+ },
+ "UUID": "6a8cc68e-dfc8-3403-9c6d-071c886b319c",
+ "description": "Generated description for concept referred to by key \"UUIDType:0.0.1\""
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ "eventInputParameters": {
+ "KafkaConsumer": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "KAFKA",
+ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters",
+ "parameters": {
+ "bootstrapServers": "kafka:9092",
+ "groupId": "apex-group-id",
+ "enableAutoCommit": true,
+ "autoCommitTime": 1000,
+ "sessionTimeout": 30000,
+ "consumerPollTime": 100,
+ "consumerTopicList": [
+ "ac_element_msg"
+ ],
+ "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer",
+ "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON",
+ "parameters": {
+ "pojoField": "KafkaResponseEvent"
+ }
+ },
+ "eventName": "AcElementEvent",
+ "eventNameFilter": "AcElementEvent"
+ }
+ },
+ "eventOutputParameters": {
+ "logOutputter": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "FILE",
+ "parameters": {
+ "fileName": "outputevents.log"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON"
+ }
+ },
+ "KafkaReplyProducer": {
+ "carrierTechnologyParameters": {
+ "carrierTechnology": "KAFKA",
+ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters",
+ "parameters": {
+ "bootstrapServers": "kafka:9092",
+ "acks": "all",
+ "retries": 0,
+ "batchSize": 16384,
+ "lingerTime": 1,
+ "bufferMemory": 33554432,
+ "producerTopic": "policy_update_msg",
+ "keySerializer": "org.apache.kafka.common.serialization.StringSerializer",
+ "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer"
+ }
+ },
+ "eventProtocolParameters": {
+ "eventProtocol": "JSON",
+ "parameters": {
+ "pojoField": "KafkaResponseStatusEvent"
+ }
+ },
+ "eventNameFilter": "(LogEvent|KafkaResponseStatusEvent)"
+ }
+ }
+ },
+ "name": "onap.policies.native.apex.ac.element",
+ "version": "1.0.0"
+ }
+ }
+ ]
}
}
},
@@ -208,4 +1277,4 @@
}
}
}
-} \ No newline at end of file
+}
diff --git a/csit/resources/tests/data/PMSHMultipleACTosca.yaml b/csit/resources/tests/data/PMSHMultipleACTosca.yaml
deleted file mode 100644
index c6f61e72..00000000
--- a/csit/resources/tests/data/PMSHMultipleACTosca.yaml
+++ /dev/null
@@ -1,932 +0,0 @@
-tosca_definitions_version: tosca_simple_yaml_1_3
-data_types:
- onap.datatypes.ToscaConceptIdentifier:
- derived_from: tosca.datatypes.Root
- properties:
- name:
- type: string
- required: true
- version:
- type: string
- required: true
- onap.datatype.acm.Target:
- derived_from: tosca.datatypes.Root
- description: >-
- Definition for a entity in A&AI to perform an Automation Composition
- operation on
- properties:
- targetType:
- type: string
- description: Category for the target type
- required: true
- constraints:
- - valid_values:
- - VNF
- - VM
- - VFMODULE
- - PNF
- entityIds:
- type: map
- description: >
- Map of values that identify the resource. If none are provided, it is
- assumed that the
-
- entity that generated the ONSET event will be the target.
- required: false
- metadata:
- clamp_possible_values: 'ClampExecution:CSAR_RESOURCES'
- entry_schema:
- type: string
- onap.datatype.acm.Actor:
- derived_from: tosca.datatypes.Root
- description: An actor/operation/target definition
- properties:
- actor:
- type: string
- description: The actor performing the operation.
- required: true
- metadata:
- clamp_possible_values: 'Dictionary:DefaultActors,ClampExecution:CDS/actor'
- operation:
- type: string
- description: The operation the actor is performing.
- metadata:
- clamp_possible_values: 'Dictionary:DefaultOperations,ClampExecution:CDS/operation'
- required: true
- target:
- type: onap.datatype.acm.Target
- description: The resource the operation should be performed on.
- required: true
- payload:
- type: map
- description: Name/value pairs of payload information passed by Policy to the actor
- required: false
- metadata:
- clamp_possible_values: 'ClampExecution:CDS/payload'
- entry_schema:
- type: string
- onap.datatype.acm.Operation:
- derived_from: tosca.datatypes.Root
- description: An operation supported by an actor
- properties:
- id:
- type: string
- description: Unique identifier for the operation
- required: true
- description:
- type: string
- description: A user-friendly description of the intent for the operation
- required: false
- operation:
- type: onap.datatype.acm.Actor
- description: The definition of the operation to be performed.
- required: true
- timeout:
- type: integer
- description: The amount of time for the actor to perform the operation.
- required: true
- retries:
- type: integer
- description: >-
- The number of retries the actor should attempt to perform the
- operation.
- required: true
- default: 0
- success:
- type: string
- description: >-
- Points to the operation to invoke on success. A value of
- "final_success" indicates and end to the operation.
- required: false
- default: final_success
- failure:
- type: string
- description: Points to the operation to invoke on Actor operation failure.
- required: false
- default: final_failure
- failure_timeout:
- type: string
- description: >-
- Points to the operation to invoke when the time out for the operation
- occurs.
- required: false
- default: final_failure_timeout
- failure_retries:
- type: string
- description: >-
- Points to the operation to invoke when the current operation has
- exceeded its max retries.
- required: false
- default: final_failure_retries
- failure_exception:
- type: string
- description: >-
- Points to the operation to invoke when the current operation causes an
- exception.
- required: false
- default: final_failure_exception
- failure_guard:
- type: string
- description: >-
- Points to the operation to invoke when the current operation is
- blocked due to guard policy enforcement.
- required: false
- default: final_failure_guard
- onap.datatypes.monitoring.managedObjectDNsBasic:
- constraints: []
- properties:
- DN:
- name: DN
- type: string
- typeVersion: 0.0.0
- description: Managed object distinguished name
- required: true
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.managedObjectDNsBasic
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.managedObjectDNsBasics:
- constraints: []
- properties:
- managedObjectDNsBasic:
- name: managedObjectDNsBasic
- type: map
- typeVersion: 0.0.0
- description: Managed object distinguished name object
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.managedObjectDNsBasic
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.managedObjectDNsBasics
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.measurementGroup:
- constraints: []
- properties:
- measurementTypes:
- name: measurementTypes
- type: list
- typeVersion: 0.0.0
- description: List of measurement types
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.measurementTypes
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- managedObjectDNsBasic:
- name: managedObjectDNsBasic
- type: list
- typeVersion: 0.0.0
- description: List of managed object distinguished names
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.managedObjectDNsBasics
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.measurementGroup
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.measurementGroups:
- constraints: []
- properties:
- measurementGroup:
- name: measurementGroup
- type: map
- typeVersion: 0.0.0
- description: Measurement Group
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.measurementGroup
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.measurementGroups
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.measurementType:
- constraints: []
- properties:
- measurementType:
- name: measurementType
- type: string
- typeVersion: 0.0.0
- description: Measurement type
- required: true
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.measurementType
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.measurementTypes:
- constraints: []
- properties:
- measurementType:
- name: measurementType
- type: map
- typeVersion: 0.0.0
- description: Measurement type object
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.measurementType
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.measurementTypes
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.nfFilter:
- constraints: []
- properties:
- modelNames:
- name: modelNames
- type: list
- typeVersion: 0.0.0
- description: List of model names
- required: true
- constraints: []
- entry_schema:
- type: string
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- modelInvariantIDs:
- name: modelInvariantIDs
- type: list
- typeVersion: 0.0.0
- description: List of model invariant IDs
- required: true
- constraints: []
- entry_schema:
- type: string
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- modelVersionIDs:
- name: modelVersionIDs
- type: list
- typeVersion: 0.0.0
- description: List of model version IDs
- required: true
- constraints: []
- entry_schema:
- type: string
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- nfNames:
- name: nfNames
- type: list
- typeVersion: 0.0.0
- description: List of network functions
- required: true
- constraints: []
- entry_schema:
- type: string
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.nfFilter
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- onap.datatypes.monitoring.subscription:
- constraints: []
- properties:
- measurementGroups:
- name: measurementGroups
- type: list
- typeVersion: 0.0.0
- description: Measurement Groups
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.measurementGroups
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- fileBasedGP:
- name: fileBasedGP
- type: integer
- typeVersion: 0.0.0
- description: File based granularity period
- required: true
- constraints: []
- metadata: {}
- fileLocation:
- name: fileLocation
- type: string
- typeVersion: 0.0.0
- description: ROP file location
- required: true
- constraints: []
- metadata: {}
- subscriptionName:
- name: subscriptionName
- type: string
- typeVersion: 0.0.0
- description: Name of the subscription
- required: true
- constraints: []
- metadata: {}
- administrativeState:
- name: administrativeState
- type: string
- typeVersion: 0.0.0
- description: State of the subscription
- required: true
- constraints:
- - valid_values:
- - LOCKED
- - UNLOCKED
- metadata: {}
- nfFilter:
- name: nfFilter
- type: map
- typeVersion: 0.0.0
- description: Network function filter
- required: true
- constraints: []
- entry_schema:
- type: onap.datatypes.monitoring.nfFilter
- typeVersion: 0.0.0
- constraints: []
- metadata: {}
- name: onap.datatypes.monitoring.subscription
- version: 0.0.0
- derived_from: tosca.datatypes.Root
- metadata: {}
- org.onap.datatypes.policy.clamp.acm.httpAutomationCompositionElement.RestRequest:
- version: 1.0.0
- derived_from: tosca.datatypes.Root
- properties:
- restRequestId:
- type: onap.datatypes.ToscaConceptIdentifier
- typeVersion: 1.0.0
- required: true
- description: The name and version of a REST request to be sent to a REST endpoint
- httpMethod:
- type: string
- required: true
- constraints:
- - valid_values:
- - POST
- - PUT
- - GET
- - DELETE
- description: The REST method to use
- path:
- type: string
- required: true
- description: The path of the REST request relative to the base URL
- body:
- type: string
- required: false
- description: The body of the REST request for PUT and POST requests
- expectedResponse:
- type: integer
- required: true
- constraints: []
- description: THe expected HTTP status code for the REST request
- org.onap.datatypes.policy.clamp.acm.httpAutomationCompositionElement.ConfigurationEntity:
- version: 1.0.0
- derived_from: tosca.datatypes.Root
- properties:
- configurationEntityId:
- type: onap.datatypes.ToscaConceptIdentifier
- typeVersion: 1.0.0
- required: true
- description: >-
- The name and version of a Configuration Entity to be handled by the
- HTTP Automation Composition Element
- restSequence:
- type: list
- entry_schema:
- type: >-
- org.onap.datatypes.policy.clamp.acm.httpAutomationCompositionElement.RestRequest
- typeVersion: 1.0.0
- description: A sequence of REST commands to send to the REST endpoint
- org.onap.datatypes.policy.clamp.acm.a1PmsAutomationCompositionElement.A1PolicyServiceEntity:
- version: 1.0.0
- derived_from: tosca.datatypes.Root
- properties:
- a1PolicyServiceEntityId:
- type: onap.datatypes.ToscaConceptIdentifier
- typeVersion: 1.0.0
- required: true
- description: The name and version of a Configuration Entity to be handled by the A1 PMS Automation Composition Element
- clientId:
- type: string
- required: true
- description: Client Id to be created
- callbackUrl:
- type: string
- required: true
- description: The callback URL to get registered
- keepAliveIntervalSeconds:
- type: integer
- required: true
- description: Keep alive interval time for the callback URL
-policy_types:
- onap.policies.Monitoring:
- derived_from: tosca.policies.Root
- description: a base policy type for all policies that govern monitoring provisioning
- version: 1.0.0
- name: onap.policies.Monitoring
- onap.policies.Sirisha:
- derived_from: tosca.policies.Root
- description: a base policy type for all policies that govern monitoring provisioning
- version: 1.0.0
- name: onap.policies.Sirisha
- onap.policies.monitoring.dcae-pm-subscription-handler:
- properties:
- pmsh_policy:
- name: pmsh_policy
- type: onap.datatypes.monitoring.subscription
- typeVersion: 0.0.0
- description: PMSH Policy JSON
- required: false
- constraints: []
- metadata: {}
- name: onap.policies.monitoring.dcae-pm-subscription-handler
- version: 1.0.0
- derived_from: onap.policies.Monitoring
- metadata: {}
- onap.policies.acm.operational.Common:
- derived_from: tosca.policies.Root
- version: 1.0.0
- name: onap.policies.acm.operational.Common
- description: >
- Operational Policy for Automation Composition execution. Originated in
- Frankfurt to support TOSCA Compliant
-
- Policy Types. This does NOT support the legacy Policy YAML policy type.
- properties:
- id:
- type: string
- description: The unique Automation Composition id.
- required: true
- timeout:
- type: integer
- description: >
- Overall timeout for executing all the operations. This timeout should
- equal or exceed the total
-
- timeout for each operation listed.
- required: true
- abatement:
- type: boolean
- description: >-
- Whether an abatement event message will be expected for the Automation
- Composition from DCAE.
- required: true
- default: false
- trigger:
- type: string
- description: >-
- Initial operation to execute upon receiving an Onset event message for
- the Automation Composition.
- required: true
- operations:
- type: list
- description: >-
- List of operations to be performed when Automation Composition is
- triggered.
- required: true
- entry_schema:
- type: onap.datatype.acm.Operation
- onap.policies.acm.operational.common.Apex:
- derived_from: onap.policies.acm.operational.Common
- type_version: 1.0.0
- version: 1.0.0
- name: onap.policies.acm.operational.common.Apex
- description: Operational policies for Apex PDP
- properties:
- engineServiceParameters:
- type: string
- description: >-
- The engine parameters like name, instanceCount, policy implementation,
- parameters etc.
- required: true
- eventInputParameters:
- type: string
- description: The event input parameters.
- required: true
- eventOutputParameters:
- type: string
- description: The event output parameters.
- required: true
- javaProperties:
- type: string
- description: Name/value pairs of properties to be set for APEX if needed.
- required: false
-node_types:
- org.onap.policy.clamp.acm.Participant:
- version: 1.0.1
- derived_from: tosca.nodetypes.Root
- properties:
- provider:
- type: string
- requred: false
- org.onap.policy.clamp.acm.AutomationCompositionElement:
- version: 1.0.1
- derived_from: tosca.nodetypes.Root
- properties:
- provider:
- type: string
- required: false
- metadata:
- common: true
- description: >-
- Specifies the organization that provides the automation composition
- element
- participant_id:
- type: onap.datatypes.ToscaConceptIdentifier
- requred: true
- metadata:
- common: true
- participantType:
- type: onap.datatypes.ToscaConceptIdentifier
- required: true
- metadata:
- common: true
- description: >-
- The identity of the participant type that hosts this type of
- Automation Composition Element
- startPhase:
- type: integer
- required: false
- constraints:
- - greater_or_equal: 0
- metadata:
- common: true
- description: >-
- A value indicating the start phase in which this automation
- composition element will be started, the first start phase is zero.
- Automation Composition Elements are started in their start_phase order
- and stopped in reverse start phase order. Automation Composition
- Elements with the same start phase are started and stopped
- simultaneously
- uninitializedToPassiveTimeout:
- type: integer
- required: false
- constraints:
- - greater_or_equal: 0
- default: 60
- metadata:
- common: true
- description: >-
- The maximum time in seconds to wait for a state chage from
- uninitialized to passive
- passiveToRunningTimeout:
- type: integer
- required: false
- constraints:
- - greater_or_equal: 0
- default: 60
- metadata:
- common: true
- description: >-
- The maximum time in seconds to wait for a state chage from passive to
- running
- runningToPassiveTimeout:
- type: integer
- required: false
- constraints:
- - greater_or_equal: 0
- default: 60
- metadata:
- common: true
- description: >-
- The maximum time in seconds to wait for a state chage from running to
- passive
- passiveToUninitializedTimeout:
- type: integer
- required: false
- constraints:
- - greater_or_equal: 0
- default: 60
- metadata:
- common: true
- description: >-
- The maximum time in seconds to wait for a state chage from passive to
- uninitialized
- org.onap.policy.clamp.acm.AutomationComposition:
- version: 1.0.1
- derived_from: tosca.nodetypes.Root
- properties:
- provider:
- type: string
- required: false
- metadata:
- common: true
- description: >-
- Specifies the organization that provides the automation composition
- element
- elements:
- type: list
- required: true
- metadata:
- common: true
- entry_schema:
- type: onap.datatypes.ToscaConceptIdentifier
- description: >-
- Specifies a list of automation composition element definitions that
- make up this automation composition definition
- org.onap.policy.clamp.acm.PolicyAutomationCompositionElement:
- version: 1.0.1
- derived_from: org.onap.policy.clamp.acm.AutomationCompositionElement
- properties:
- policy_type_id:
- type: onap.datatypes.ToscaConceptIdentifier
- requred: true
- policy_id:
- type: onap.datatypes.ToscaConceptIdentifier
- requred: false
- org.onap.policy.clamp.acm.CDSAutomationCompositionElement:
- version: 1.0.1
- derived_from: org.onap.policy.clamp.acm.AutomationCompositionElement
- properties:
- cds_blueprint_id:
- type: onap.datatypes.ToscaConceptIdentifier
- requred: true
- org.onap.policy.clamp.acm.K8SMicroserviceAutomationCompositionElement:
- version: 1.0.1
- derived_from: org.onap.policy.clamp.acm.AutomationCompositionElement
- properties:
- chart:
- type: string
- required: true
- configs:
- type: list
- required: false
- requirements:
- type: string
- requred: false
- templates:
- type: list
- required: false
- entry_schema: null
- values:
- type: string
- requred: true
- org.onap.policy.clamp.acm.HttpAutomationCompositionElement:
- version: 1.0.1
- derived_from: org.onap.policy.clamp.acm.AutomationCompositionElement
- properties:
- baseUrl:
- type: string
- required: true
- description: >-
- The base URL to be prepended to each path, identifies the host for the
- REST endpoints.
- httpHeaders:
- type: map
- required: false
- entry_schema:
- type: string
- description: HTTP headers to send on REST requests
- configurationEntities:
- type: map
- required: true
- entry_schema:
- type: >-
- org.onap.datatypes.policy.clamp.acm.httpAutomationCompositionElement.ConfigurationEntity
- typeVersion: 1.0.0
- description: >-
- The connfiguration entities the Automation Composition Element is
- managing and their associated REST requests
- org.onap.policy.clamp.acm.A1PMSAutomationCompositionElement:
- version: 1.0.1
- derived_from: org.onap.policy.clamp.acm.AutomationCompositionElement
- properties:
- policyServiceEntities:
- type: list
- required: true
- entry_schema:
- type: org.onap.datatypes.policy.clamp.acm.a1pmsAutomationCompositionElement.A1PolicyServiceEntity
- typeVersion: 1.0.0
- description: The configuration entities of A1 PMS policy services
-topology_template:
- inputs:
- pmsh_monitoring_policy:
- type: onap.datatypes.ToscaConceptIdentifier
- description: The ID of the PMSH monitoring policy to use
- default:
- name: >-
- MICROSERVICE_vLoadBalancerMS_v1_0_dcae-pm-subscription-handler_1_0_0test
- version: 1.0.0
- pmsh_operational_policy:
- type: onap.datatypes.ToscaConceptIdentifier
- description: The ID of the PMSH operational policy to use
- default:
- name: operational.apex.pmcontrol
- version: 1.0.0
- node_templates:
- org.onap.policy.clamp.acm.PolicyParticipant:
- version: 2.3.1
- type: org.onap.policy.clamp.acm.Participant
- type_version: 1.0.1
- description: Participant for DCAE microservices
- properties:
- provider: ONAP
- org.onap.domain.pmsh.PMSH_MonitoringPolicyAutomationCompositionElement:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.PolicyAutomationCompositionElement
- type_version: 1.0.0
- description: >-
- Automation composition element for the monitoring policy for Performance
- Management Subscription Handling
- properties:
- provider: Ericsson
- participant_id:
- name: org.onap.PM_Policy
- version: 1.0.0
- participantType:
- name: org.onap.policy.clamp.acm.PolicyParticipant
- version: 2.3.1
- policy_type_id:
- name: onap.policies.monitoring.pm-subscription-handler
- version: 1.0.0
- policy_id:
- get_input: pmsh_monitoring_policy
- org.onap.domain.pmsh.PMSH_OperationalPolicyAutomationCompositionElement:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.PolicyAutomationCompositionElement
- type_version: 1.0.0
- description: >-
- Automation composition element for the operational policy for
- Performance Management Subscription Handling
- properties:
- provider: Ericsson
- participant_id:
- name: org.onap.PM_Policy
- version: 1.0.0
- participantType:
- name: org.onap.policy.clamp.acm.PolicyParticipant
- version: 2.3.1
- policy_type_id:
- name: onap.policies.operational.pm-subscription-handler
- version: 1.0.0
- policy_id:
- get_input: pmsh_operational_policy
- org.onap.policy.clamp.acm.KubernetesParticipant:
- version: 2.3.4
- type: org.onap.policy.clamp.acm.Participant
- type_version: 1.0.1
- description: Participant for K8S
- properties:
- provider: ONAP
- org.onap.domain.database.PMSH_K8SMicroserviceAutomationCompositionElement:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.K8SMicroserviceAutomationCompositionElement
- type_version: 1.0.0
- description: Automation composition element for the K8S microservice for PMSH
- properties:
- provider: ONAP
- participant_id:
- name: K8sParticipant0
- version: 1.0.0
- participantType:
- name: org.onap.policy.clamp.acm.KubernetesParticipant
- version: 2.3.4
- chart:
- chartId:
- name: dcae-pmsh
- version: 10.0.0
- namespace: onap
- releaseName: onap-dcae-pmsh
- repository:
- repoName: chartmuseum
- address: 'http://chart-museum:80'
- userName: onapinitializer
- password: demo123456!
- overrideParams:
- global.masterPassword: test
- org.onap.policy.clamp.acm.HttpParticipant:
- version: 2.3.4
- type: org.onap.policy.clamp.acm.Participant
- type_version: 1.0.1
- description: Participant for Http requests
- properties:
- provider: ONAP
- org.onap.domain.database.Http_PMSHMicroserviceAutomationCompositionElement:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.HttpAutomationCompositionElement
- type_version: 1.0.1
- description: >-
- Automation composition element for the http requests of PMSH
- microservice
- properties:
- provider: ONAP
- participant_id:
- name: HttpParticipant0
- version: 1.0.0
- participantType:
- name: org.onap.policy.clamp.acm.HttpParticipant
- version: 2.3.4
- uninitializedToPassiveTimeout: 180
- startPhase: 1
- baseUrl: 'https://dcae-pmsh:8443'
- httpHeaders:
- Content-Type: application/json
- configurationEntities:
- - configurationEntityId:
- name: entity1
- version: 1.0.1
- restSequence:
- - restRequestId:
- name: request1
- version: 1.0.1
- httpMethod: POST
- path: subscription
- body: >-
- { "subscription":{ "subscriptionName":"new_sub_01",
- "operationalPolicyName":"pmsh-operational-policy",
- "controlLoopName":"controlLoop-name", "nfFilter":{ "nfNames":[
- "^pnf*" ], "modelInvariantIDs":[
- "7129e420-d396-4efb-af02-6b83499b12f5" ], "modelVersionIDs":[
- "e80a6ae3-cafd-4d24-850d-e14c084a5ca7" ], "modelNames":[
- "pnf_134" ] }, "measurementGroups":[ { "measurementGroup":{
- "measurementGroupName":"msgroup_01",
- "administrativeState":"UNLOCKED", "fileBasedGP":15,
- "fileLocation":"/pm/pm.xml", "measurementTypes":[ {
- "measurementType":"EutranCell.*" }, {
- "measurementType":"EutranCellRelation.pmCounter1" }, {
- "measurementType":"EutranCellRelation.pmCounter2" } ],
- "managedObjectDNsBasic":[ {
- "DN":"ManagedElement=1,ENodeBFunction=1,EUtranCell=CityCenter1"
- }, {
- "DN":"ManagedElement=1,ENodeBFunction=1,EUtranCell=CityCenter1,
- EUtranCellRelation=CityCenter2" }, {
- "DN":"ManagedElement=1,ENodeBFunction=1,EUtranCell=CityCenter1,
- EUtranCellRelation=CityCenter3" } ] } } ] } }
- expectedResponse: 201
- org.onap.k8s.acm.A1PMSAutomationCompositionParticipant:
- version: 2.3.4
- type: org.onap.policy.clamp.acm.Participant
- type_version: 1.0.1
- description: Participant for A1 PMS requests
- properties:
- provider: ONAP
- org.onap.domain.database.PMSH_A1PMSAutomationCompositionElement:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.A1PMSAutomationCompositionElement
- type_version: 1.0.1
- description: Automation composition element for the A1 PMS Requests
- properties:
- provider: ONAP
- participantType:
- name: org.onap.policy.clamp.acm.A1PMSParticipant
- version: 2.3.4
- policyServiceEntities:
- - a1PolicyServiceEntityId:
- name: entity1
- version: 1.0.1
- clientId: firstService
- callbackUrl: http://localhost
- keepAliveIntervalSeconds: 0
- - a1PolicyServiceEntityId:
- name: entity2
- version: 1.0.1
- clientId: secondService
- callbackUrl: http://127.0.0.1
- keepAliveIntervalSeconds: 0
- org.onap.domain.sample.GenericK8s_AutomationCompositionDefinition:
- version: 1.2.3
- type: org.onap.policy.clamp.acm.AutomationComposition
- type_version: 1.0.0
- description: Automation composition for Hello World
- properties:
- provider: ONAP
- elements:
- - name: >-
- org.onap.domain.database.PMSH_K8SMicroserviceAutomationCompositionElement
- version: 1.2.3
- - name: >-
- org.onap.domain.database.Http_PMSHMicroserviceAutomationCompositionElement
- version: 1.2.3
- - name: >-
- org.onap.domain.database.PMSH_A1PMSAutomationCompositionElement
- version: 1.2.3
- - name: >-
- org.onap.domain.pmsh.PMSH_MonitoringPolicyAutomationCompositionElement
- version: 1.2.3
- - name: >-
- org.onap.domain.pmsh.PMSH_OperationalPolicyAutomationCompositionElement
- version: 1.2.3
diff --git a/csit/resources/tests/data/ac-instance-migration-fail.yaml b/csit/resources/tests/data/ac-instance-migration-fail.yaml
new file mode 100644
index 00000000..1e6bd78c
--- /dev/null
+++ b/csit/resources/tests/data/ac-instance-migration-fail.yaml
@@ -0,0 +1,99 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2024 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the License);
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+name: Instance-Migration-Fail
+version: 1.0.1
+compositionId: COMPOSITIONIDPLACEHOLDER
+instanceId: INSTACEIDPLACEHOLDER
+compositionTargetId: COMPOSITIONTARGETIDPLACEHOLDER
+description: Demo automation composition instance 0
+elements:
+
+ 709c62b3-8918-41b9-a747-d21eb79c6c34:
+ id: 709c62b3-8918-41b9-a747-d21eb79c6c34
+ definition:
+ name: onap.policy.clamp.ac.element.Sim_StarterAutomationCompositionElement
+ version: 1.2.3
+ description: Starter Automation Composition Element for the Demo
+ properties:
+ baseUrl: http://address:30800
+ httpHeaders:
+ Content-Type: application/json
+ Authorization: Basic YWNtVXNlcjp6YiFYenRHMzQ=
+ configurationEntities:
+ - configurationEntityId:
+ name: onap.policy.clamp.ac.starter
+ version: 1.0.0
+ restSequence:
+ - restRequestId:
+ name: request1
+ version: 1.0.1
+ httpMethod: POST
+ path: /onap/policy/clamp/acelement/v2/activate
+ body: '{ "receiverId": { "name": "onap.policy.clamp.ac.startertobridge", "version": "1.0.0" }, "timerMs": 20000, "elementType": "STARTER", "topicParameterGroup": { "server": "message-router:3904", "listenerTopic": "POLICY_UPDATE_MSG", "publisherTopic": "AC_ELEMENT_MSG", "fetchTimeout": 15000, "topicCommInfrastructure": "dmaap" } }'
+ expectedResponse: 201
+ myParameterToUpdate: text
+
+ 709c62b3-8918-41b9-a747-d21eb79c6c35:
+ id: 709c62b3-8918-41b9-a747-d21eb79c6c35
+ definition:
+ name: onap.policy.clamp.ac.element.Sim_BridgeAutomationCompositionElement
+ version: 1.2.3
+ description: Bridge Automation Composition Element for the Demo
+ properties:
+ baseUrl: http://address:30801
+ httpHeaders:
+ Content-Type: application/json
+ Authorization: Basic YWNtVXNlcjp6YiFYenRHMzQ=
+ configurationEntities:
+ - configurationEntityId:
+ name: onap.policy.clamp.ac.bridge
+ version: 1.0.0
+ restSequence:
+ - restRequestId:
+ name: request2
+ version: 1.0.1
+ httpMethod: POST
+ path: /onap/policy/clamp/acelement/v2/activate
+ body: '{ "receiverId": { "name": "onap.policy.clamp.ac.bridgetosink", "version": "1.0.0" }, "timerMs": 20000, "elementType": "BRIDGE", "topicParameterGroup": { "server": "message-router:3904", "listenerTopic": "POLICY_UPDATE_MSG", "publisherTopic": "AC_ELEMENT_MSG", "fetchTimeout": 15000, "topicCommInfrastructure": "dmaap" } }'
+ expectedResponse: 201
+ myParameterToUpdate: text
+
+ 709c62b3-8918-41b9-a747-d21eb79c6c36:
+ id: 709c62b3-8918-41b9-a747-d21eb79c6c36
+ definition:
+ name: onap.policy.clamp.ac.element.Sim_SinkAutomationCompositionElement
+ version: 1.2.3
+ description: Sink Automation Composition Element for the Demo
+ properties:
+ baseUrl: http://address:30802
+ httpHeaders:
+ Content-Type: application/json
+ Authorization: Basic YWNtVXNlcjp6YiFYenRHMzQ=
+ configurationEntities:
+ - configurationEntityId:
+ name: onap.policy.clamp.ac.sink
+ version: 1.0.0
+ restSequence:
+ - restRequestId:
+ name: request3
+ version: 1.0.1
+ httpMethod: POST
+ path: /onap/policy/clamp/acelement/v2/activate
+ body: '{ "receiverId": { "name": "onap.policy.clamp.ac.sink", "version": "1.0.0" }, "timerMs": 20000, "elementType": "SINK", "topicParameterGroup": { "server": "message-router", "listenerTopic": "POLICY_UPDATE_MSG", "publisherTopic": "AC_ELEMENT_MSG", "fetchTimeout": 15000, "topicCommInfrastructure": "dmaap" } }'
+ expectedResponse: 201
+ myParameterToUpdate: text
diff --git a/csit/resources/tests/data/acelement-usecase.yaml b/csit/resources/tests/data/acelement-usecase.yaml
index 937ed6e6..b727a68f 100644
--- a/csit/resources/tests/data/acelement-usecase.yaml
+++ b/csit/resources/tests/data/acelement-usecase.yaml
@@ -27,176 +27,6 @@ data_types:
type: string
required: true
- onap.datatypes.native.apex.EngineService:
- derived_from: tosca.datatypes.Root
- properties:
- name:
- type: string
- description: Specifies the engine name
- required: false
- default: "ApexEngineService"
- version:
- type: string
- description: Specifies the engine version in double dotted format
- required: false
- default: "1.0.0"
- id:
- type: integer
- description: Specifies the engine id
- required: true
- instance_count:
- type: integer
- description: Specifies the number of engine threads that should be run
- required: true
- deployment_port:
- type: integer
- description: Specifies the port to connect to for engine administration
- required: false
- default: 1
- policy_model_file_name:
- type: string
- description: The name of the file from which to read the APEX policy model
- required: false
- policy_type_impl:
- type: string
- description: The policy type implementation from which to read the APEX policy model
- required: false
- periodic_event_period:
- type: string
- description: The time interval in milliseconds for the periodic scanning event, 0 means don't scan
- required: false
- engine:
- type: onap.datatypes.native.apex.engineservice.Engine
- description: The parameters for all engines in the APEX engine service
- required: true
- onap.datatypes.native.apex.EventHandler:
- derived_from: tosca.datatypes.Root
- properties:
- name:
- type: string
- description: Specifies the event handler name, if not specified this is set to the key name
- required: false
- carrier_technology:
- type: onap.datatypes.native.apex.CarrierTechnology
- description: Specifies the carrier technology of the event handler (such as REST/Web Socket/Kafka)
- required: true
- event_protocol:
- type: onap.datatypes.native.apex.EventProtocol
- description: Specifies the event protocol of events for the event handler (such as Yaml/JSON/XML/POJO)
- required: true
- event_name:
- type: string
- description: Specifies the event name for events on this event handler, if not specified, the event name is read from or written to the event being received or sent
- required: false
- event_name_filter:
- type: string
- description: Specifies a filter as a regular expression, events that do not match the filter are dropped, the default is to let all events through
- required: false
- synchronous_mode:
- type: boolean
- description: Specifies the event handler is syncronous (receive event and send response)
- required: false
- default: false
- synchronous_peer:
- type: string
- description: The peer event handler (output for input or input for output) of this event handler in synchronous mode, this parameter is mandatory if the event handler is in synchronous mode
- required: false
- synchronous_timeout:
- type: integer
- description: The timeout in milliseconds for responses to be issued by APEX torequests, this parameter is mandatory if the event handler is in synchronous mode
- required: false
- requestor_mode:
- type: boolean
- description: Specifies the event handler is in requestor mode (send event and wait for response mode)
- required: false
- default: false
- requestor_peer:
- type: string
- description: The peer event handler (output for input or input for output) of this event handler in requestor mode, this parameter is mandatory if the event handler is in requestor mode
- required: false
- requestor_timeout:
- type: integer
- description: The timeout in milliseconds for wait for responses to requests, this parameter is mandatory if the event handler is in requestor mode
- required: false
- onap.datatypes.native.apex.CarrierTechnology:
- derived_from: tosca.datatypes.Root
- properties:
- label:
- type: string
- description: The label (name) of the carrier technology (such as REST, Kafka, WebSocket)
- required: true
- plugin_parameter_class_name:
- type: string
- description: The class name of the class that overrides default handling of event input or output for this carrier technology, defaults to the supplied input or output class
- required: false
- onap.datatypes.native.apex.EventProtocol:
- derived_from: tosca.datatypes.Root
- properties:
- label:
- type: string
- description: The label (name) of the event protocol (such as Yaml, JSON, XML, or POJO)
- required: true
- event_protocol_plugin_class:
- type: string
- description: The class name of the class that overrides default handling of the event protocol for this carrier technology, defaults to the supplied event protocol class
- required: false
- onap.datatypes.native.apex.Environment:
- derived_from: tosca.datatypes.Root
- properties:
- name:
- type: string
- description: The name of the environment variable
- required: true
- value:
- type: string
- description: The value of the environment variable
- required: true
- onap.datatypes.native.apex.engineservice.Engine:
- derived_from: tosca.datatypes.Root
- properties:
- context:
- type: onap.datatypes.native.apex.engineservice.engine.Context
- description: The properties for handling context in APEX engines, defaults to using Java maps for context
- required: false
- executors:
- type: map
- description: The plugins for policy executors used in engines such as javascript, MVEL, Jython
- required: true
- entry_schema:
- description: The plugin class path for this policy executor
- type: string
- onap.datatypes.native.apex.engineservice.engine.Context:
- derived_from: tosca.datatypes.Root
- properties:
- distributor:
- type: onap.datatypes.native.apex.Plugin
- description: The plugin to be used for distributing context between APEX PDPs at runtime
- required: false
- schemas:
- type: map
- description: The plugins for context schemas available in APEX PDPs such as Java and Avro
- required: false
- entry_schema:
- type: onap.datatypes.native.apex.Plugin
- locking:
- type: onap.datatypes.native.apex.Plugin
- description: The plugin to be used for locking context in and between APEX PDPs at runtime
- required: false
- persistence:
- type: onap.datatypes.native.apex.Plugin
- description: The plugin to be used for persisting context for APEX PDPs at runtime
- required: false
- onap.datatypes.native.apex.Plugin:
- derived_from: tosca.datatypes.Root
- properties:
- name:
- type: string
- description: The name of the executor such as Javascript, Jython or MVEL
- required: true
- plugin_class_name:
- type: string
- description: The class path of the plugin class for this executor
-
org.onap.datatypes.policy.clamp.acm.httpAutomationCompositionElement.RestRequest:
version: 1.0.0
derived_from: tosca.datatypes.Root
@@ -244,37 +74,6 @@ data_types:
type_version: 1.0.0
description: A sequence of REST commands to send to the REST endpoint
-policy_types:
- onap.policies.Native:
- derived_from: tosca.policies.Root
- description: a base policy type for all native PDP policies
- version: 1.0.0
- name: onap.policies.Native
- onap.policies.native.Apex:
- derived_from: onap.policies.Native
- description: a policy type for native apex policies
- version: 1.0.0
- name: onap.policies.native.Apex
- properties:
- engine_service:
- type: onap.datatypes.native.apex.EngineService
- description: APEX Engine Service Parameters
- inputs:
- type: map
- description: Inputs for handling events coming into the APEX engine
- entry_schema:
- type: onap.datatypes.native.apex.EventHandler
- outputs:
- type: map
- description: Outputs for handling events going out of the APEX engine
- entry_schema:
- type: onap.datatypes.native.apex.EventHandler
- environment:
- type: list
- description: Envioronmental parameters for the APEX engine
- entry_schema:
- type: onap.datatypes.native.apex.Environment
-
node_types:
org.onap.policy.clamp.acm.Participant:
version: 1.0.1
@@ -536,601 +335,3 @@ topology_template:
version: 1.2.3
- name: onap.policy.clamp.ac.element.Http_SinkAutomationCompositionElement
version: 1.2.3
-
- policies:
- - onap.policies.native.apex.ac.element:
- type: onap.policies.native.Apex
- type_version: 1.0.0
- properties:
- engineServiceParameters:
- name: MyApexEngine
- version: 0.0.1
- id: 45
- instanceCount: 2
- deploymentPort: 12561
- engineParameters:
- executorParameters:
- JAVASCRIPT:
- parameterClassName: org.onap.policy.apex.plugins.executor.javascript.JavascriptExecutorParameters
- contextParameters:
- parameterClassName: org.onap.policy.apex.context.parameters.ContextParameters
- schemaParameters:
- Json:
- parameterClassName: org.onap.policy.apex.plugins.context.schema.json.JsonSchemaHelperParameters
- policy_type_impl:
- policies:
- key:
- name: APEXacElementPolicy_Policies
- version: 0.0.1
- policyMap:
- entry:
- - key:
- name: ReceiveEventPolicy
- version: 0.0.1
- value:
- policyKey:
- name: ReceiveEventPolicy
- version: 0.0.1
- template: Freestyle
- state:
- entry:
- - key: DecideForwardingState
- value:
- stateKey:
- parentKeyName: ReceiveEventPolicy
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: DecideForwardingState
- trigger:
- name: AcElementEvent
- version: 0.0.1
- stateOutputs:
- entry:
- - key: CreateForwardPayload
- value:
- key:
- parentKeyName: ReceiveEventPolicy
- parentKeyVersion: 0.0.1
- parentLocalName: DecideForwardingState
- localName: CreateForwardPayload
- outgoingEvent:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- outgoingEventReference:
- - name: DmaapResponseStatusEvent
- version: 0.0.1
- nextState:
- parentKeyName: 'NULL'
- parentKeyVersion: 0.0.0
- parentLocalName: 'NULL'
- localName: 'NULL'
- contextAlbumReference: []
- taskSelectionLogic:
- key:
- parentKeyName: 'NULL'
- parentKeyVersion: 0.0.0
- parentLocalName: 'NULL'
- localName: 'NULL'
- logicFlavour: UNDEFINED
- logic: ''
- stateFinalizerLogicMap:
- entry: []
- defaultTask:
- name: ForwardPayloadTask
- version: 0.0.1
- taskReferences:
- entry:
- - key:
- name: ForwardPayloadTask
- version: 0.0.1
- value:
- key:
- parentKeyName: ReceiveEventPolicy
- parentKeyVersion: 0.0.1
- parentLocalName: DecideForwardingState
- localName: ReceiveEventPolicy
- outputType: DIRECT
- output:
- parentKeyName: ReceiveEventPolicy
- parentKeyVersion: 0.0.1
- parentLocalName: DecideForwardingState
- localName: CreateForwardPayload
- firstState: DecideForwardingState
- tasks:
- key:
- name: APEXacElementPolicy_Tasks
- version: 0.0.1
- taskMap:
- entry:
- - key:
- name: ForwardPayloadTask
- version: 0.0.1
- value:
- key:
- name: ForwardPayloadTask
- version: 0.0.1
- inputEvent:
- key:
- name: AcElementEvent
- version: 0.0.1
- nameSpace: org.onap.policy.apex.ac.element
- source: Dmaap
- target: APEX
- parameter:
- entry:
- - key: DmaapResponseEvent
- value:
- key:
- parentKeyName: AcElementEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: DmaapResponseEvent
- fieldSchemaKey:
- name: ACEventType
- version: 0.0.1
- optional: false
- toscaPolicyState: ENTRY
- outputEvents:
- entry:
- - key: DmaapResponseStatusEvent
- value:
- key:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- nameSpace: org.onap.policy.apex.ac.element
- source: APEX
- target: Dmaap
- parameter:
- entry:
- - key: DmaapResponseStatusEvent
- value:
- key:
- parentKeyName: DmaapResponseStatusEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: DmaapResponseStatusEvent
- fieldSchemaKey:
- name: ACEventType
- version: 0.0.1
- optional: false
- toscaPolicyState: ''
- taskParameters:
- entry: []
- contextAlbumReference:
- - name: ACElementAlbum
- version: 0.0.1
- taskLogic:
- key:
- parentKeyName: ForwardPayloadTask
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: TaskLogic
- logicFlavour: JAVASCRIPT
- logic: "/*\n * ============LICENSE_START=======================================================\n\
- \ * Copyright (C) 2022 Nordix. All rights reserved.\n * ================================================================================\n\
- \ * Licensed under the Apache License, Version 2.0 (the 'License');\n\
- \ * you may not use this file except in compliance with the\
- \ License.\n * You may obtain a copy of the License at\n *\n\
- \ * http://www.apache.org/licenses/LICENSE-2.0\n *\n\
- \ * Unless required by applicable law or agreed to in writing,\
- \ software\n * distributed under the License is distributed\
- \ on an 'AS IS' BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS\
- \ OF ANY KIND, either express or implied.\n * See the License\
- \ for the specific language governing permissions and\n *\
- \ limitations under the License.\n *\n * SPDX-License-Identifier:\
- \ Apache-2.0\n * ============LICENSE_END=========================================================\n\
- \ */\n\nexecutor.logger.info(executor.subject.id);\nexecutor.logger.info(executor.inFields);\n\
- \nvar msgResponse = executor.inFields.get('DmaapResponseEvent');\n\
- executor.logger.info('Task in progress with mesages: ' + msgResponse);\n\
- \nvar elementId = msgResponse.get('elementId').get('name');\n\
- \nif (msgResponse.get('messageType') == 'STATUS' &&\n (elementId\
- \ == 'onap.policy.clamp.ac.startertobridge'\n || elementId\
- \ == 'onap.policy.clamp.ac.bridgetosink')) {\n\n var receiverId\
- \ = '';\n if (elementId == 'onap.policy.clamp.ac.startertobridge')\
- \ {\n receiverId = 'onap.policy.clamp.ac.bridge';\n\
- \ } else {\n receiverId = 'onap.policy.clamp.ac.sink';\n\
- \ }\n\n var elementIdResponse = new java.util.HashMap();\n\
- \ elementIdResponse.put('name', receiverId);\n elementIdResponse.put('version',\
- \ msgResponse.get('elementId').get('version'));\n\n var\
- \ dmaapResponse = new java.util.HashMap();\n dmaapResponse.put('elementId',\
- \ elementIdResponse);\n\n var message = msgResponse.get('message')\
- \ + ' trace added from policy';\n dmaapResponse.put('message',\
- \ message);\n dmaapResponse.put('messageType', 'STATUS');\n\
- \ dmaapResponse.put('messageId', msgResponse.get('messageId'));\n\
- \ dmaapResponse.put('timestamp', msgResponse.get('timestamp'));\n\
- \n executor.logger.info('Sending forwarding Event to Ac\
- \ element: ' + dmaapResponse);\n\n executor.outFields.put('DmaapResponseStatusEvent',\
- \ dmaapResponse);\n}\n\ntrue;"
- events:
- key:
- name: APEXacElementPolicy_Events
- version: 0.0.1
- eventMap:
- entry:
- - key:
- name: AcElementEvent
- version: 0.0.1
- value:
- key:
- name: AcElementEvent
- version: 0.0.1
- nameSpace: org.onap.policy.apex.ac.element
- source: Dmaap
- target: APEX
- parameter:
- entry:
- - key: DmaapResponseEvent
- value:
- key:
- parentKeyName: AcElementEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: DmaapResponseEvent
- fieldSchemaKey:
- name: ACEventType
- version: 0.0.1
- optional: false
- toscaPolicyState: ENTRY
- - key:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- value:
- key:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- nameSpace: org.onap.policy.apex.ac.element
- source: APEX
- target: Dmaap
- parameter:
- entry:
- - key: DmaapResponseStatusEvent
- value:
- key:
- parentKeyName: DmaapResponseStatusEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: DmaapResponseStatusEvent
- fieldSchemaKey:
- name: ACEventType
- version: 0.0.1
- optional: false
- toscaPolicyState: ''
- - key:
- name: LogEvent
- version: 0.0.1
- value:
- key:
- name: LogEvent
- version: 0.0.1
- nameSpace: org.onap.policy.apex.ac.element
- source: APEX
- target: file
- parameter:
- entry:
- - key: final_status
- value:
- key:
- parentKeyName: LogEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: final_status
- fieldSchemaKey:
- name: SimpleStringType
- version: 0.0.1
- optional: false
- - key: message
- value:
- key:
- parentKeyName: LogEvent
- parentKeyVersion: 0.0.1
- parentLocalName: 'NULL'
- localName: message
- fieldSchemaKey:
- name: SimpleStringType
- version: 0.0.1
- optional: false
- toscaPolicyState: ''
- albums:
- key:
- name: APEXacElementPolicy_Albums
- version: 0.0.1
- albums:
- entry:
- - key:
- name: ACElementAlbum
- version: 0.0.1
- value:
- key:
- name: ACElementAlbum
- version: 0.0.1
- scope: policy
- isWritable: true
- itemSchema:
- name: ACEventType
- version: 0.0.1
- schemas:
- key:
- name: APEXacElementPolicy_Schemas
- version: 0.0.1
- schemas:
- entry:
- - key:
- name: ACEventType
- version: 0.0.1
- value:
- key:
- name: ACEventType
- version: 0.0.1
- schemaFlavour: Json
- schemaDefinition: "{\n \"$schema\": \"http://json-schema.org/draft-04/schema#\"\
- ,\n \"type\": \"object\",\n \"properties\": {\n \
- \ \"elementId\": {\n \"type\": \"object\",\n \
- \ \"properties\": {\n \"name\": {\n \
- \ \"type\": \"string\"\n },\n\
- \ \"version\": {\n \"type\"\
- : \"string\"\n }\n },\n \
- \ \"required\": [\n \"name\",\n \
- \ \"version\"\n ]\n },\n \"message\"\
- : {\n \"type\": \"string\"\n },\n \"\
- messageType\": {\n \"type\": \"string\"\n \
- \ }\n },\n \"required\": [\n \"elementId\",\n \
- \ \"message\",\n \"messageType\"\n ]\n}"
- - key:
- name: SimpleIntType
- version: 0.0.1
- value:
- key:
- name: SimpleIntType
- version: 0.0.1
- schemaFlavour: Java
- schemaDefinition: java.lang.Integer
- - key:
- name: SimpleStringType
- version: 0.0.1
- value:
- key:
- name: SimpleStringType
- version: 0.0.1
- schemaFlavour: Java
- schemaDefinition: java.lang.String
- - key:
- name: UUIDType
- version: 0.0.1
- value:
- key:
- name: UUIDType
- version: 0.0.1
- schemaFlavour: Java
- schemaDefinition: java.util.UUID
- key:
- name: APEXacElementPolicy
- version: 0.0.1
- keyInformation:
- key:
- name: APEXacElementPolicy_KeyInfo
- version: 0.0.1
- keyInfoMap:
- entry:
- - key:
- name: ACElementAlbum
- version: 0.0.1
- value:
- key:
- name: ACElementAlbum
- version: 0.0.1
- UUID: 7cddfab8-6d3f-3f7f-8ac3-e2eb5979c900
- description: Generated description for concept referred to by
- key "ACElementAlbum:0.0.1"
- - key:
- name: ACEventType
- version: 0.0.1
- value:
- key:
- name: ACEventType
- version: 0.0.1
- UUID: dab78794-b666-3929-a75b-70d634b04fe5
- description: Generated description for concept referred to by
- key "ACEventType:0.0.1"
- - key:
- name: APEXacElementPolicy
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy
- version: 0.0.1
- UUID: da478611-7d77-3c46-b4be-be968769ba4e
- description: Generated description for concept referred to by
- key "APEXacElementPolicy:0.0.1"
- - key:
- name: APEXacElementPolicy_Albums
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_Albums
- version: 0.0.1
- UUID: fa8dc15e-8c8d-3de3-a0f8-585b76511175
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_Albums:0.0.1"
- - key:
- name: APEXacElementPolicy_Events
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_Events
- version: 0.0.1
- UUID: 8508cd65-8dd2-342d-a5c6-1570810dbe2b
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_Events:0.0.1"
- - key:
- name: APEXacElementPolicy_KeyInfo
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_KeyInfo
- version: 0.0.1
- UUID: 09e6927d-c5ac-3779-919f-9333994eed22
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_KeyInfo:0.0.1"
- - key:
- name: APEXacElementPolicy_Policies
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_Policies
- version: 0.0.1
- UUID: cade3c9a-1600-3642-a6f4-315612187f46
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_Policies:0.0.1"
- - key:
- name: APEXacElementPolicy_Schemas
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_Schemas
- version: 0.0.1
- UUID: 5bb4a8e9-35fa-37db-9a49-48ef036a7ba9
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_Schemas:0.0.1"
- - key:
- name: APEXacElementPolicy_Tasks
- version: 0.0.1
- value:
- key:
- name: APEXacElementPolicy_Tasks
- version: 0.0.1
- UUID: 2527eeec-0d1f-3094-ad3f-212622b12836
- description: Generated description for concept referred to by
- key "APEXacElementPolicy_Tasks:0.0.1"
- - key:
- name: AcElementEvent
- version: 0.0.1
- value:
- key:
- name: AcElementEvent
- version: 0.0.1
- UUID: 32c013e2-2740-3986-a626-cbdf665b63e9
- description: Generated description for concept referred to by
- key "AcElementEvent:0.0.1"
- - key:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- value:
- key:
- name: DmaapResponseStatusEvent
- version: 0.0.1
- UUID: 2715cb6c-2778-3461-8b69-871e79f95935
- description: Generated description for concept referred to by
- key "DmaapResponseStatusEvent:0.0.1"
- - key:
- name: ForwardPayloadTask
- version: 0.0.1
- value:
- key:
- name: ForwardPayloadTask
- version: 0.0.1
- UUID: 51defa03-1ecf-3314-bf34-2a652bce57fa
- description: Generated description for concept referred to by
- key "ForwardPayloadTask:0.0.1"
- - key:
- name: LogEvent
- version: 0.0.1
- value:
- key:
- name: LogEvent
- version: 0.0.1
- UUID: c540f048-96af-35e3-a36e-e9c29377cba7
- description: Generated description for concept referred to by
- key "LogEvent:0.0.1"
- - key:
- name: ReceiveEventPolicy
- version: 0.0.1
- value:
- key:
- name: ReceiveEventPolicy
- version: 0.0.1
- UUID: 568b7345-9de1-36d3-b6a3-9b857e6809a1
- description: Generated description for concept referred to by
- key "ReceiveEventPolicy:0.0.1"
- - key:
- name: SimpleIntType
- version: 0.0.1
- value:
- key:
- name: SimpleIntType
- version: 0.0.1
- UUID: 153791fd-ae0a-36a7-88a5-309a7936415d
- description: Generated description for concept referred to by
- key "SimpleIntType:0.0.1"
- - key:
- name: SimpleStringType
- version: 0.0.1
- value:
- key:
- name: SimpleStringType
- version: 0.0.1
- UUID: 8a4957cf-9493-3a76-8c22-a208e23259af
- description: Generated description for concept referred to by
- key "SimpleStringType:0.0.1"
- - key:
- name: UUIDType
- version: 0.0.1
- value:
- key:
- name: UUIDType
- version: 0.0.1
- UUID: 6a8cc68e-dfc8-3403-9c6d-071c886b319c
- description: Generated description for concept referred to by
- key "UUIDType:0.0.1"
- eventInputParameters:
- DmaapConsumer:
- carrierTechnologyParameters:
- carrierTechnology: KAFKA
- parameterClassName: org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters
- parameters:
- bootstrapServers: kafka:9092
- groupId: clamp-grp
- enableAutoCommit: true
- autoCommitTime: 1000
- sessionTimeout: 30000
- consumerPollTime: 100
- consumerTopicList:
- - ac_element_msg
- keyDeserializer: org.apache.kafka.common.serialization.StringDeserializer
- valueDeserializer: org.apache.kafka.common.serialization.StringDeserializer
- kafkaProperties: [ ]
- eventProtocolParameters:
- eventProtocol: JSON
- parameters:
- pojoField: DmaapResponseEvent
- eventName: AcElementEvent
- eventNameFilter: AcElementEvent
- eventOutputParameters:
- logOutputter:
- carrierTechnologyParameters:
- carrierTechnology: FILE
- parameters:
- fileName: outputevents.log
- eventProtocolParameters:
- eventProtocol: JSON
- DmaapReplyProducer:
- carrierTechnologyParameters:
- carrierTechnology: KAFKA
- parameterClassName: org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters
- parameters:
- bootstrapServers: kafka:9092
- acks: all
- retries: 0
- batchSize: 16384
- lingerTime: 1
- bufferMemory: 33554432
- producerTopic: policy_update_msg
- keySerializer: org.apache.kafka.common.serialization.StringSerializer
- valueSerializer: org.apache.kafka.common.serialization.StringSerializer
- kafkaProperties: [ ]
- eventProtocolParameters:
- eventProtocol: JSON
- parameters:
- pojoField: DmaapResponseStatusEvent
- eventNameFilter: LogEvent|DmaapResponseStatusEvent
- name: onap.policies.native.apex.ac.element
- version: 1.0.0
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.empty_filter_response.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.empty_filter_response.json
new file mode 100644
index 00000000..040f0624
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.empty_filter_response.json
@@ -0,0 +1,61 @@
+{
+ "output": {
+ "allow": true,
+ "role_grants": {
+ "billing": [
+ {
+ "action": "read",
+ "type": "finance"
+ },
+ {
+ "action": "update",
+ "type": "finance"
+ }
+ ],
+ "customer": [
+ {
+ "action": "read",
+ "type": "dog"
+ },
+ {
+ "action": "read",
+ "type": "cat"
+ },
+ {
+ "action": "adopt",
+ "type": "dog"
+ },
+ {
+ "action": "adopt",
+ "type": "cat"
+ }
+ ],
+ "employee": [
+ {
+ "action": "read",
+ "type": "dog"
+ },
+ {
+ "action": "read",
+ "type": "cat"
+ },
+ {
+ "action": "update",
+ "type": "dog"
+ },
+ {
+ "action": "update",
+ "type": "cat"
+ }
+ ]
+ },
+ "user_is_admin": true,
+ "user_is_granted": [],
+ "user_roles": {
+ "alice": ["admin"],
+ "bob": ["employee", "billing"],
+ "eve": ["customer"]
+ }
+ },
+ "policyName": "role"
+}
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.filter_response.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.filter_response.json
new file mode 100644
index 00000000..d19f5fed
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.filter_response.json
@@ -0,0 +1,23 @@
+{
+ "output": {
+ "role_grants": {
+ "billing": [
+ {"action": "read", "type": "finance"},
+ {"action": "update", "type": "finance"}
+ ],
+ "customer": [
+ {"action": "read", "type": "dog"},
+ {"action": "read", "type": "cat"},
+ {"action": "adopt", "type": "dog"},
+ {"action": "adopt", "type": "cat"}
+ ],
+ "employee": [
+ {"action": "read", "type": "dog"},
+ {"action": "read", "type": "cat"},
+ {"action": "update", "type": "dog"},
+ {"action": "update", "type": "cat"}
+ ]
+ }
+ },
+ "policyName": "role"
+}
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.json
new file mode 100644
index 00000000..748a5529
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.json
@@ -0,0 +1 @@
+{"onapName":"CDS","onapComponent":"CDS","onapInstance":"CDS","currentDate": "2024-11-22", "currentTime": "2024-11-22T11:34:56Z", "timeZone": "UTC", "timeOffset": "+05:30", "currentDateTime": "2024-11-22T12:08:00Z", "policyName":"role","input":{"user":"alice","action":"write","object":"id123","type":"dog"}}
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.output.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.output.json
new file mode 100644
index 00000000..bf035c4b
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request.output.json
@@ -0,0 +1,5 @@
+{
+ "errorMessage": "Policy Filter is nil.",
+ "policyName": "",
+ "responseCode": "bad_request"
+}
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter.json
new file mode 100644
index 00000000..1a621c26
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter.json
@@ -0,0 +1 @@
+{"onapName":"CDS","onapComponent":"CDS","onapInstance":"CDS","currentDate": "2024-11-22", "currentTime": "2024-11-22T11:34:56Z", "timeZone": "UTC", "timeOffset": "+05:30", "currentDateTime": "2024-11-22T12:08:00Z", "policyName":"role","policyFilter": ["role_grants"],"input":{"user":"alice","action":"write","object":"id123","type":"dog"}}
diff --git a/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter_empty.json b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter_empty.json
new file mode 100644
index 00000000..fec2ba48
--- /dev/null
+++ b/csit/resources/tests/data/onap.policy.opa.pdp.decision.request_filter_empty.json
@@ -0,0 +1 @@
+{"onapName":"CDS","onapComponent":"CDS","onapInstance":"CDS","currentDate": "2024-11-22", "currentTime": "2024-11-22T11:34:56Z", "timeZone": "UTC", "timeOffset": "+05:30", "currentDateTime": "2024-11-22T12:08:00Z", "policyName":"role","policyFilter": [""],"input":{"user":"alice","action":"write","object":"id123","type":"dog"}}
diff --git a/csit/resources/tests/drools-applications-test.robot b/csit/resources/tests/drools-applications-test.robot
index 4ceb288b..61d77591 100644
--- a/csit/resources/tests/drools-applications-test.robot
+++ b/csit/resources/tests/drools-applications-test.robot
@@ -1,10 +1,10 @@
*** Settings ***
-Library Collections
-Library String
-Library RequestsLibrary
-Library OperatingSystem
-Library Process
-Library json
+Library Collections
+Library String
+Library RequestsLibrary
+Library OperatingSystem
+Library Process
+Library json
Resource common-library.robot
*** Test Cases ***
@@ -29,18 +29,18 @@ Controller
AssertTopicsOnKafkaClient
[Documentation] Verify that the Policy topics policy-pdp-pap and policy-cl-mgt are available on kafka
- GetKafkaTopic policy-pdp-pap
- GetKafkaTopic policy-cl-mgt
+ GetKafkaTopic policy-pdp-pap
+ GetKafkaTopic policy-cl-mgt
CheckTopics
- [Documentation] List the topics registered with TopicManager
+ [Documentation] List the topics registered with TopicManager
${resp}= PerformGetRequestOnDrools /policy/pdp/engine/topics ${DROOLS_IP_2} 200
Should Contain ${resp.text} policy-cl-mgt
Should Contain ${resp.text} policy-pdp-pap
Should Contain ${resp.text} dcae_topic
CheckEngineFeatures
- [Documentation] List the features available in the drools engine
+ [Documentation] List the features available in the drools engine
${resp}= PerformGetRequestOnDrools /policy/pdp/engine/features ${DROOLS_IP_2} 200
Should Contain ${resp.text} "org.onap.policy.drools.lifecycle.LifecycleFeature"
Should Contain ${resp.text} "org.onap.policy.drools.apps.controlloop.feature.usecases.UsecasesFeature"
@@ -98,7 +98,7 @@ DeployXacmlPolicies
[Documentation] Deploys the Policies to Xacml
DeployPolicy deploy.xacml.policies.json
Sleep 5s
- @{otherMessages}= Create List deployed-policies onap.scaleout.tca onap.restart.tca
+ @{otherMessages}= Create List deployed-policies onap.scaleout.tca onap.restart.tca
AssertMessageFromTopic policy-notification onap.vfirewall.tca ${otherMessages}
VerifyDeployedXacmlPolicies
@@ -106,14 +106,14 @@ VerifyDeployedXacmlPolicies
${resp}= GetDeployedPolicies
@{policies}= Create List onap.vfirewall.tca onap.scaleout.tca onap.restart.tca
FOR ${policy} IN @{policies}
- Should Contain ${resp.text} ${policy}
+ Should Contain ${resp.text} ${policy}
END
DeployDroolsPolicies
[Documentation] Deploys the Policies to Drools
DeployPolicy deploy.drools.policies.json
Sleep 5s
- @{otherMessages}= Create List deployed-policies operational.scaleout operational.restart
+ @{otherMessages}= Create List deployed-policies operational.scaleout operational.restart
AssertMessageFromTopic policy-notification operational.modifyconfig ${otherMessages}
VerifyDeployedDroolsPolicies
@@ -121,68 +121,68 @@ VerifyDeployedDroolsPolicies
${resp}= GetDeployedPolicies
@{policies}= Create List operational.modifyconfig operational.scaleout operational.restart
FOR ${policy} IN @{policies}
- Should Contain ${resp.text} ${policy}
+ Should Contain ${resp.text} ${policy}
END
VcpeExecute
[Documentation] Executes VCPE Policy
- OnSet ${CURDIR}/data/drools/vcpeOnset.json
+ OnSet ${CURDIR}/data/drools/vcpeOnset.json
${policyExecuted}= Set Variable ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e
@{otherMessages}= Create List ACTIVE
- AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
@{otherMessages}= Create List ${policyExecuted} OPERATION
- AssertMessageFromTopic policy-cl-mgt Sending guard query for APPC Restart ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Sending guard query for APPC Restart ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt Guard result for APPC Restart is Permit ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Guard result for APPC Restart is Permit ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt actor=APPC,operation=Restart ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt actor=APPC,operation=Restart ${otherMessages}
# @{otherMessages}= Create List OPERATION: SUCCESS actor=APPC,operation=Restart
-# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
#
# @{otherMessages}= Create List FINAL: SUCCESS APPC Restart
-# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
VdnsExecute
[Documentation] Executes VDNS Policy
- OnSet ${CURDIR}/data/drools/vdnsOnset.json
+ OnSet ${CURDIR}/data/drools/vdnsOnset.json
${policyExecuted}= Set Variable ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3
@{otherMessages}= Create List ACTIVE
- AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
@{otherMessages}= Create List ${policyExecuted} OPERATION
- AssertMessageFromTopic policy-cl-mgt Sending guard query for SO VF Module Create ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Sending guard query for SO VF Module Create ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt Guard result for SO VF Module Create is Permit ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Guard result for SO VF Module Create is Permit ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt actor=SO,operation=VF Module Create ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt actor=SO,operation=VF Module Create ${otherMessages}
@{otherMessages}= Create List ${policyExecuted} OPERATION: SUCCESS
- AssertMessageFromTopic policy-cl-mgt actor=SO,operation=VF Module Create ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt actor=SO,operation=VF Module Create ${otherMessages}
@{otherMessages}= Create List ${policyExecuted} FINAL: SUCCESS SO
- AssertMessageFromTopic policy-cl-mgt VF Module Create ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt VF Module Create ${otherMessages}
VfwExecute
[Documentation] Executes VFW Policy
- OnSet ${CURDIR}/data/drools/vfwOnset.json
+ OnSet ${CURDIR}/data/drools/vfwOnset.json
${policyExecuted}= Set Variable ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a
@{otherMessages}= Create List ACTIVE
- AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
@{otherMessages}= Create List ${policyExecuted} OPERATION
- AssertMessageFromTopic policy-cl-mgt Sending guard query for APPC ModifyConfig ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Sending guard query for APPC ModifyConfig ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt Guard result for APPC ModifyConfig is Permit ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt Guard result for APPC ModifyConfig is Permit ${otherMessages}
- AssertMessageFromTopic policy-cl-mgt actor=APPC,operation=ModifyConfig ${otherMessages}
+ AssertMessageFromTopic policy-cl-mgt actor=APPC,operation=ModifyConfig ${otherMessages}
# @{otherMessages}= Create List OPERATION: SUCCESS actor=APPC,operation=ModifyConfig
-# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
#
# @{otherMessages}= Create List FINAL: SUCCESS APPC ModifyConfig
-# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
+# AssertMessageFromTopic policy-cl-mgt ${policyExecuted} ${otherMessages}
*** Keywords ***
@@ -192,25 +192,25 @@ VerifyController
Should Be Equal As Strings ${resp.json()['usecases']} 1
PerformGetRequestOnDrools
- [Arguments] ${url} ${domain} ${expectedstatus}
- ${auth}= Create List demo@people.osaaf.org demo123456!
- Log Creating session http://${domain}
- ${session}= Create Session policy http://${domain} auth=${auth}
- ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
- ${resp}= GET On Session policy ${url} headers=${headers} expected_status=${expectedstatus}
- Log Received response from policy ${resp.text}
- RETURN ${resp}
+ [Arguments] ${url} ${domain} ${expectedstatus}
+ ${auth}= Create List demo@people.osaaf.org demo123456!
+ Log Creating session http://${domain}
+ ${session}= Create Session policy http://${domain} auth=${auth}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= GET On Session policy ${url} headers=${headers} expected_status=${expectedstatus}
+ Log Received response from policy ${resp.text}
+ RETURN ${resp}
PerformPostRequest
- [Arguments] ${url} ${domain} ${file} ${folder} ${contenttype} ${expectedstatus}
- ${auth}= Create List policyadmin zb!XztG34
- ${body}= Get file ${folder}/${file}
- Log Creating session http://${domain}
- ${session}= Create Session policy http://${domain} auth=${auth}
- ${headers}= Create Dictionary Accept=application/${contenttype} Content-Type=application/${contenttype}
- ${resp}= POST On Session policy ${url} data=${body} headers=${headers} expected_status=${expectedstatus}
- Log Received response from policy ${resp.text}
- RETURN ${resp}
+ [Arguments] ${url} ${domain} ${file} ${folder} ${contenttype} ${expectedstatus}
+ ${auth}= Create List policyadmin zb!XztG34
+ ${body}= Get file ${folder}/${file}
+ Log Creating session http://${domain}
+ ${session}= Create Session policy http://${domain} auth=${auth}
+ ${headers}= Create Dictionary Accept=application/${contenttype} Content-Type=application/${contenttype}
+ ${resp}= POST On Session policy ${url} data=${body} headers=${headers} expected_status=${expectedstatus}
+ Log Received response from policy ${resp.text}
+ RETURN ${resp}
OnSet
[Arguments] ${file}
@@ -220,18 +220,18 @@ OnSet
RETURN ${resp.stdout}
CreatePolicy
- [Arguments] ${policyFile} ${contenttype}
+ [Arguments] ${policyFile} ${contenttype}
PerformPostRequest /policy/api/v1/policies ${POLICY_API_IP} ${policyFile} ${DATA} ${contenttype} 201
DeployPolicy
- [Arguments] ${policyName}
+ [Arguments] ${policyName}
PerformPostRequest /policy/pap/v1/pdps/deployments/batch ${POLICY_PAP_IP} ${policyName} ${CURDIR}/data/drools json 202
AssertMessageFromTopic
- [Arguments] ${topic} ${topicMessage} ${otherMessages}
+ [Arguments] ${topic} ${topicMessage} ${otherMessages}
${response}= Wait Until Keyword Succeeds 4 x 10 sec CheckKafkaTopic ${topic} ${topicMessage}
FOR ${msg} IN @{otherMessages}
- Should Contain ${response} ${msg}
+ Should Contain ${response} ${msg}
END
GetDeployedPolicies
diff --git a/csit/resources/tests/opa-pdp-test.robot b/csit/resources/tests/opa-pdp-test.robot
new file mode 100644
index 00000000..8e059ffd
--- /dev/null
+++ b/csit/resources/tests/opa-pdp-test.robot
@@ -0,0 +1,64 @@
+*** Settings ***
+Library RequestsLibrary
+Library Collections
+Library OperatingSystem
+Library Process
+Library json
+Resource common-library.robot
+
+*** Variables ***
+${OPA_PDP_HOST} /policy/pdpo/v1/healthcheck
+${url} /policy/pdpo/v1/decision
+
+*** Test Cases ***
+Healthcheck
+ [Documentation] Verify OPA PDP health check
+ PdpxGetReq ${OPA_PDP_HOST} <Response [200]>
+
+ValidatingPolicyWithoutPolicyFilter
+ [Documentation] Validating the policy without giving policy filter
+ ValidatePolicyResponseWithoutFilter onap.policy.opa.pdp.decision.request.json 400 onap.policy.opa.pdp.decision.request.output.json
+
+ValidatingPolicyWithPolicyFilter
+ [Documentation] Validating the policy with policy filter
+ ValidatePolicyResponse onap.policy.opa.pdp.decision.request_filter.json 200 onap.policy.opa.pdp.decision.filter_response.json
+
+ValidatingPolicyWithEmptyPolicyFilter
+ [Documentation] Validating the policy with empty policy filter
+ ValidatePolicyResponse onap.policy.opa.pdp.decision.request_filter_empty.json 200 onap.policy.opa.pdp.decision.empty_filter_response.json
+
+*** Keywords ***
+PdpxGetReq
+ [Documentation] Verify the response of Health Check is Successful
+ [Arguments] ${url} ${status}
+ ${hcauth}= PolicyAdminAuth
+ ${resp}= PerformGetRequest ${POLICY_OPA_IP} ${url} 200 null ${hcauth}
+ Should Be Equal As Strings ${resp} ${status}
+
+ValidatePolicyResponse
+ [Documentation] Validating the output for the policy
+ [Arguments] ${jsonfile} ${status} ${jsonfile1}
+ ${expectedStatus}= Set Variable ${status}
+ ${postjson}= Get file ${CURDIR}/data/${jsonfile}
+ ${expected_data}= Get file ${CURDIR}/data/${jsonfile1}
+ ${hcauth}= PolicyAdminAuth
+ ${resp}= PerformPostRequest ${POLICY_OPA_IP} ${url} ${expectedStatus} ${postjson} abbrev=true ${hcauth}
+ ${response_data}= Get From Dictionary ${resp.json()} output
+ ${expected_value}= Evaluate json.loads('''${expected_data}''') json
+ ${expected_output}= Get From Dictionary ${expected_value} output
+ Dictionaries Should Be Equal ${response_data} ${expected_output}
+
+ValidatePolicyResponseWithoutFilter
+ [Documentation] Validating the output for the policy
+ [Arguments] ${jsonfile} ${status} ${jsonfile1}
+ ${expectedStatus}= Set Variable ${status}
+ ${postjson}= Get file ${CURDIR}/data/${jsonfile}
+ ${expected_data}= Get file ${CURDIR}/data/${jsonfile1}
+ ${hcauth}= PolicyAdminAuth
+ ${resp}= PerformPostRequest ${POLICY_OPA_IP} ${url} ${expectedStatus} ${postjson} abbrev=true ${hcauth}
+ ${response_data}= Get From Dictionary ${resp.json()} responseCode
+ ${expected_value}= Evaluate json.loads('''${expected_data}''') json
+ ${expected_output}= Get From Dictionary ${expected_value} responseCode
+ Should Be Equal As Strings ${response_data} ${expected_output}
+
+
diff --git a/csit/resources/tests/policy-clamp-test.robot b/csit/resources/tests/policy-clamp-test.robot
index 1c2ea031..b2e30fc8 100644
--- a/csit/resources/tests/policy-clamp-test.robot
+++ b/csit/resources/tests/policy-clamp-test.robot
@@ -283,6 +283,37 @@ AutomationCompositionMigrationTo
VerifyMigratedElementsRuntime ${compositionToId} ${instanceMigrationId}
VerifyMigratedElementsSim ${instanceMigrationId}
+FailAutomationCompositionMigration
+ [Documentation] Fail Migration of an automation composition.
+ SetParticipantSimFail
+ ${auth}= ClampAuth
+ ${postyaml}= Get file ${CURDIR}/data/ac-instance-migration-fail.yaml
+ ${updatedpostyaml}= Replace String ${postyaml} COMPOSITIONIDPLACEHOLDER ${compositionToId}
+ ${updatedpostyaml}= Replace String ${updatedpostyaml} COMPOSITIONTARGETIDPLACEHOLDER ${compositionFromId}
+ ${updatedpostyaml}= Replace String ${updatedpostyaml} INSTACEIDPLACEHOLDER ${instanceMigrationId}
+ ${updatedpostyaml}= Replace String ${updatedpostyaml} TEXTPLACEHOLDER TextForMigration
+ ${resp}= MakeYamlPostRequest ACM ${POLICY_RUNTIME_ACM_IP} /onap/policy/clamp/acm/v2/compositions/${compositionToId}/instances ${updatedpostyaml} ${auth}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Wait Until Keyword Succeeds 2 min 5 sec VerifyStateChangeResult ${compositionToId} ${instanceMigrationId} FAILED
+
+FailDePrimeACDefinitionsFrom
+ [Documentation] Fail DePrime automation composition definition migration From.
+ SetParticipantSimFail
+ ${auth}= ClampAuth
+ ${postjson}= Get file ${CURDIR}/data/ACDepriming.json
+ ${resp}= MakeJsonPutRequest ACM ${POLICY_RUNTIME_ACM_IP} /onap/policy/clamp/acm/v2/compositions/${compositionFromId} ${postjson} ${auth}
+ Should Be Equal As Strings ${resp.status_code} 202
+ Wait Until Keyword Succeeds 2 min 5 sec VerifyFailedPriming ${compositionFromId}
+
+DePrimeACDefinitionsFrom
+ [Documentation] DePrime automation composition definition migration From.
+ SetParticipantSimSuccess
+ ${auth}= ClampAuth
+ ${postjson}= Get file ${CURDIR}/data/ACDepriming.json
+ ${resp}= MakeJsonPutRequest ACM ${POLICY_RUNTIME_ACM_IP} /onap/policy/clamp/acm/v2/compositions/${compositionFromId} ${postjson} ${auth}
+ Should Be Equal As Strings ${resp.status_code} 202
+ Wait Until Keyword Succeeds 2 min 5 sec VerifyPriming ${compositionFromId} COMMISSIONED
+
UnDeployAutomationComposition
[Documentation] UnDeploy automation composition.
${auth}= ClampAuth
@@ -352,24 +383,6 @@ DePrimeACDefinitions
Should Be Equal As Strings ${resp.status_code} 202
Wait Until Keyword Succeeds 2 min 5 sec VerifyPriming ${compositionId} COMMISSIONED
-FailDePrimeACDefinitionsFrom
- [Documentation] Fail DePrime automation composition definition migration From.
- SetParticipantSimFail
- ${auth}= ClampAuth
- ${postjson}= Get file ${CURDIR}/data/ACDepriming.json
- ${resp}= MakeJsonPutRequest ACM ${POLICY_RUNTIME_ACM_IP} /onap/policy/clamp/acm/v2/compositions/${compositionFromId} ${postjson} ${auth}
- Should Be Equal As Strings ${resp.status_code} 202
- Wait Until Keyword Succeeds 2 min 5 sec VerifyFailedPriming ${compositionFromId}
-
-DePrimeACDefinitionsFrom
- [Documentation] DePrime automation composition definition migration From.
- SetParticipantSimSuccess
- ${auth}= ClampAuth
- ${postjson}= Get file ${CURDIR}/data/ACDepriming.json
- ${resp}= MakeJsonPutRequest ACM ${POLICY_RUNTIME_ACM_IP} /onap/policy/clamp/acm/v2/compositions/${compositionFromId} ${postjson} ${auth}
- Should Be Equal As Strings ${resp.status_code} 202
- Wait Until Keyword Succeeds 2 min 5 sec VerifyPriming ${compositionFromId} COMMISSIONED
-
DePrimeACDefinitionsTo
[Documentation] DePrime automation composition definition migration To.
${auth}= ClampAuth
diff --git a/csit/run-k8s-csit.sh b/csit/run-k8s-csit.sh
index feab0437..2d5dfed3 100755
--- a/csit/run-k8s-csit.sh
+++ b/csit/run-k8s-csit.sh
@@ -1,8 +1,10 @@
#!/bin/bash
#
# ============LICENSE_START====================================================
-# Copyright (C) 2022-2024 Nordix Foundation.
+# Copyright (C) 2022-2025 Nordix Foundation.
+# Modifications Copyright © 2024 Deutsche Telekom
# =============================================================================
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@@ -22,445 +24,82 @@
# Runs CSITs in kubernetes.
WORKSPACE=$(git rev-parse --show-toplevel)
-export WORKSPACE
-
-export GERRIT_BRANCH=$(awk -F= '$1 == "defaultbranch" { print $2 }' "${WORKSPACE}"/.gitreview)
-
-CSIT_SCRIPT="scripts/run-test.sh"
-ROBOT_DOCKER_IMAGE="policy-csit-robot"
-POLICY_CLAMP_ROBOT="policy-clamp-test.robot clamp-slas.robot"
-POLICY_API_ROBOT="api-test.robot api-slas.robot"
-POLICY_PAP_ROBOT="pap-test.robot pap-slas.robot"
-POLICY_APEX_PDP_ROBOT="apex-pdp-test.robot apex-slas.robot"
-POLICY_XACML_PDP_ROBOT="xacml-pdp-test.robot xacml-pdp-slas.robot"
-POLICY_DROOLS_PDP_ROBOT="drools-pdp-test.robot"
-POLICY_DISTRIBUTION_ROBOT="distribution-test.robot"
-
-POLICY_API_CONTAINER="policy-api"
-POLICY_PAP_CONTAINER="policy-pap"
-POLICY_CLAMP_CONTAINER="policy-clamp-runtime-acm"
-POLICY_APEX_CONTAINER="policy-apex-pdp"
-POLICY_DROOLS_CONTAINER="policy-drools-pdp"
-POLICY_XACML_CONTAINER="policy-xacml-pdp"
-POLICY_DISTRIBUTION_CONTAINER="policy-distribution"
-POLICY_K8S_PPNT_CONTAINER="policy-clamp-ac-k8s-ppnt"
-POLICY_HTTP_PPNT_CONTAINER="policy-clamp-ac-http-ppnt"
-POLICY_SIM_PPNT_CONTAINER="policy-clamp-ac-sim-ppnt"
-POLICY_PF_PPNT_CONTAINER="policy-clamp-ac-pf-ppnt"
-JAEGER_CONTAINER="jaeger"
-KAFKA_CONTAINER="kafka-deployment"
-ZK_CONTAINER="zookeeper-deployment"
-KAFKA_DIR=${WORKSPACE}/helm/cp-kafka
-SET_VALUES=""
-
-DISTRIBUTION_CSAR=${WORKSPACE}/csit/resources/tests/data/csar
-DIST_TEMP_FOLDER=/tmp/distribution
-
-export PROJECT=""
-export ROBOT_FILE=""
-export ROBOT_LOG_DIR=${WORKSPACE}/csit/archives
-export READINESS_CONTAINERS=()
-
-
-function spin_microk8s_cluster() {
- echo "Verify if Microk8s cluster is running.."
- microk8s version
- exitcode="${?}"
-
- if [ "$exitcode" -ne 0 ]; then
- echo "Microk8s cluster not available, Spinning up the cluster.."
- sudo snap install microk8s --classic --channel=1.30/stable
-
- if [ "${?}" -ne 0 ]; then
- echo "Failed to install kubernetes cluster. Aborting.."
- return 1
- fi
- echo "Microk8s cluster installed successfully"
- sudo usermod -a -G microk8s $USER
- echo "Enabling DNS and Storage plugins"
- sudo microk8s.enable dns hostpath-storage
- echo "Creating configuration file for Microk8s"
- sudo mkdir -p $HOME/.kube
- sudo chown -R $USER:$USER $HOME/.kube
- sudo microk8s kubectl config view --raw >$HOME/.kube/config
- sudo chmod 600 $HOME/.kube/config
- echo "K8s installation completed"
- echo "----------------------------------------"
- else
- echo "K8s cluster is already running"
- echo "----------------------------------------"
- fi
- echo "Verify if kubectl is running.."
- kubectl version
- exitcode="${?}"
-
- if [ "$exitcode" -ne 0 ]; then
- echo "Kubectl not available, Installing.."
- sudo snap install kubectl --classic --channel=1.30/stable
-
- if [ "${?}" -ne 0 ]; then
- echo "Failed to install Kubectl. Aborting.."
- return 1
- fi
- echo "Kubectl installation completed"
- echo "----------------------------------------"
- else
- echo "Kubectl is already running"
- echo "----------------------------------------"
- return 0
- fi
-
- echo "Verify if helm is running.."
- helm version
- exitcode="${?}"
-
- if [ "$exitcode" -ne 0 ]; then
- echo "Helm not available, Installing.."
- sudo snap install helm --classic --channel=3.7
-
- if [ "${?}" -ne 0 ]; then
- echo "Failed to install Helm client. Aborting.."
- return 1
- fi
- echo "Helm installation completed"
- echo "----------------------------------------"
- else
- echo "Helm is already running"
- echo "----------------------------------------"
- return 0
- fi
+function print_usage() {
+ echo "Usage: $0 [OPTIONS] OPERATION PROJECT"
+ echo ""
+ echo "OPTIONS:"
+ echo " -c, --cluster-only Install cluster only, without running robot tests"
+ echo " -l, --local-image Use local Docker image"
+ echo " -h, --help Display this help message"
+ echo ""
+ echo "OPERATION:"
+ echo " install Install the cluster and optionally run robot tests"
+ echo " uninstall Uninstall the policy deployment"
+ echo " clean Teardown the cluster"
+ echo ""
+ echo "PROJECT:"
+ echo " Specify the project name (e.g., clamp, api, pap, etc.)"
}
-function install_kafka() {
- echo "Installing Confluent kafka"
- kubectl apply -f $KAFKA_DIR/zookeeper.yaml
- kubectl apply -f $KAFKA_DIR/kafka.yaml
- echo "----------------------------------------"
-}
+CLUSTER_ONLY=false
+LOCAL_IMAGE=false
+
+# Parse command-line options
+while [[ $# -gt 0 ]]; do
+ case $1 in
+ -c|--cluster-only)
+ CLUSTER_ONLY=true
+ shift
+ ;;
+ -l|--local-image)
+ LOCAL_IMAGE=true
+ shift
+ ;;
+ -h|--help)
+ print_usage
+ exit 0
+ ;;
+ *)
+ break
+ ;;
+ esac
+done
-function uninstall_policy() {
- echo "Removing the policy helm deployment"
- helm uninstall csit-policy
- helm uninstall prometheus
- helm uninstall csit-robot
- kubectl delete deploy $ZK_CONTAINER $KAFKA_CONTAINER
- rm -rf ${WORKSPACE}/helm/policy/Chart.lock
- if [ "$PROJECT" == "clamp" ] || [ "$PROJECT" == "policy-clamp" ]; then
- helm uninstall policy-chartmuseum
- helm repo remove chartmuseum-git policy-chartmuseum
- fi
- sudo rm -rf /dockerdata-nfs/mariadb-galera/
- kubectl delete pvc --all
- echo "Policy deployment deleted"
- echo "Clean up docker"
- docker image prune -f
-}
+# Check for required arguments
+if [ $# -lt 2 ]; then
+ echo "Error: Insufficient arguments"
+ print_usage
+ exit 1
+fi
-function teardown_cluster() {
- echo "Removing k8s cluster and k8s configuration file"
- sudo snap remove microk8s;rm -rf $HOME/.kube/config
- sudo snap remove helm;
- sudo snap remove kubectl;
- echo "MicroK8s Cluster removed"
-}
+OPERATION=$1
+PROJECT=$2
-function build_robot_image() {
- echo "Build docker image for robot framework"
- cd ${WORKSPACE}/csit/resources || exit
- clone_models
- if [ "${PROJECT}" == "distribution" ] || [ "${PROJECT}" == "policy-distribution" ]; then
- copy_csar_file
- fi
- echo "Build robot framework docker image"
- docker login -u docker -p docker nexus3.onap.org:10001
- docker build . --file Dockerfile \
- --build-arg CSIT_SCRIPT="$CSIT_SCRIPT" \
- --build-arg ROBOT_FILE="$ROBOT_FILE" \
- --tag "${ROBOT_DOCKER_IMAGE}" --no-cache
- echo "---------------------------------------------"
-}
+# Set local image flag
+if [ "$LOCAL_IMAGE" = true ]; then
+ LOCAL_IMAGE_ARG="true"
+else
+ LOCAL_IMAGE_ARG="false"
+fi
-function start_csit() {
- build_robot_image
- if [ "${?}" -eq 0 ]; then
- echo "Importing robot image into microk8s registry"
- docker save -o policy-csit-robot.tar ${ROBOT_DOCKER_IMAGE}:latest
- sudo microk8s ctr image import policy-csit-robot.tar
- rm -rf ${WORKSPACE}/csit/resources/policy-csit-robot.tar
- rm -rf ${WORKSPACE}/csit/resources/tests/models/
- echo "---------------------------------------------"
- if [ "$PROJECT" == "clamp" ] || [ "$PROJECT" == "policy-clamp" ]; then
- POD_READY_STATUS="0/1"
- while [[ ${POD_READY_STATUS} != "1/1" ]]; do
- echo "Waiting for chartmuseum pod to come up..."
- sleep 5
- POD_READY_STATUS=$(kubectl get pods | grep -e "policy-chartmuseum" | awk '{print $2}')
- done
- push_acelement_chart
+# Execute the appropriate script based on the operation
+case $OPERATION in
+ install)
+ ${WORKSPACE}/csit/resources/scripts/cluster_setup.sh install $PROJECT $LOCAL_IMAGE_ARG
+ if [ "$CLUSTER_ONLY" = false ]; then
+ ${WORKSPACE}/csit/resources/scripts/robot_setup.sh $PROJECT
fi
- echo "Installing Robot framework pod for running CSIT"
- cd ${WORKSPACE}/helm
- mkdir -p ${ROBOT_LOG_DIR}
- helm install csit-robot robot --set robot="$ROBOT_FILE" --set "readiness={${READINESS_CONTAINERS[*]}}" --set robotLogDir=$ROBOT_LOG_DIR
- print_robot_log
- fi
-}
-
-function print_robot_log() {
- count_pods=0
- while [[ ${count_pods} -eq 0 ]]; do
- echo "Waiting for pods to come up..."
- sleep 5
- count_pods=$(kubectl get pods --output name | wc -l)
- done
- robotpod=$(kubectl get po | grep policy-csit)
- podName=$(echo "$robotpod" | awk '{print $1}')
- echo "The robot tests will begin once the policy components {${READINESS_CONTAINERS[*]}} are up and running..."
- kubectl wait --for=jsonpath='{.status.phase}'=Running --timeout=18m pod/"$podName"
- echo "Policy deployment status:"
- kubectl get po
- kubectl get all -A
- echo "Robot Test logs:"
- kubectl logs -f "$podName"
-}
-
-function clone_models() {
-
- # download models examples
- git clone -b "${GERRIT_BRANCH}" --single-branch https://github.com/onap/policy-models.git "${WORKSPACE}"/csit/resources/tests/models
-
- # create a couple of variations of the policy definitions
- sed -e 's!Measurement_vGMUX!ADifferentValue!' \
- tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.json \
- >tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.v1_2.json
-
- sed -e 's!"version": "1.0.0"!"version": "2.0.0"!' \
- -e 's!"policy-version": 1!"policy-version": 2!' \
- tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.json \
- >tests/models/models-examples/src/main/resources/policies/vCPE.policy.monitoring.input.tosca.v2.json
-}
-
-function copy_csar_file() {
- zip -F ${DISTRIBUTION_CSAR}/sample_csar_with_apex_policy.csar \
- --out ${DISTRIBUTION_CSAR}/csar_temp.csar -q
- # Remake temp directory
- sudo rm -rf "${DIST_TEMP_FOLDER}"
- sudo mkdir "${DIST_TEMP_FOLDER}"
- sudo cp ${DISTRIBUTION_CSAR}/csar_temp.csar ${DISTRIBUTION_CSAR}/temp.csar
- sudo mv ${DISTRIBUTION_CSAR}/temp.csar ${DIST_TEMP_FOLDER}/sample_csar_with_apex_policy.csar
-}
-
-function set_project_config() {
- echo "Setting project configuration for: $PROJECT"
- case $PROJECT in
-
- clamp | policy-clamp)
- export ROBOT_FILE=$POLICY_CLAMP_ROBOT
- export READINESS_CONTAINERS=($POLICY_CLAMP_CONTAINER,$POLICY_APEX_CONTAINER,$POLICY_PF_PPNT_CONTAINER,$POLICY_K8S_PPNT_CONTAINER,
- $POLICY_HTTP_PPNT_CONTAINER,$POLICY_SIM_PPNT_CONTAINER,$JAEGER_CONTAINER)
- export SET_VALUES="--set $POLICY_CLAMP_CONTAINER.enabled=true --set $POLICY_APEX_CONTAINER.enabled=true
- --set $POLICY_PF_PPNT_CONTAINER.enabled=true --set $POLICY_K8S_PPNT_CONTAINER.enabled=true
- --set $POLICY_HTTP_PPNT_CONTAINER.enabled=true --set $POLICY_SIM_PPNT_CONTAINER.enabled=true
- --set $JAEGER_CONTAINER.enabled=true"
- install_chartmuseum
- ;;
-
- api | policy-api)
- export ROBOT_FILE=$POLICY_API_ROBOT
- export READINESS_CONTAINERS=($POLICY_API_CONTAINER)
- ;;
-
- pap | policy-pap)
- export ROBOT_FILE=$POLICY_PAP_ROBOT
- export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_API_CONTAINER,$POLICY_XACML_CONTAINER)
- export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_XACML_CONTAINER.enabled=true"
;;
-
- apex-pdp | policy-apex-pdp)
- export ROBOT_FILE=$POLICY_APEX_PDP_ROBOT
- export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER)
- export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true"
- ;;
-
- xacml-pdp | policy-xacml-pdp)
- export ROBOT_FILE=($POLICY_XACML_PDP_ROBOT)
- export READINESS_CONTAINERS=($POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_XACML_CONTAINER)
- export SET_VALUES="--set $POLICY_XACML_CONTAINER.enabled=true"
+ uninstall)
+ ${WORKSPACE}/csit/resources/scripts/cluster_setup.sh uninstall
;;
-
- drools-pdp | policy-drools-pdp)
- export ROBOT_FILE=($POLICY_DROOLS_PDP_ROBOT)
- export READINESS_CONTAINERS=($POLICY_DROOLS_CONTAINER)
- export SET_VALUES="--set $POLICY_DROOLS_CONTAINER.enabled=true"
- ;;
-
- distribution | policy-distribution)
- export ROBOT_FILE=($POLICY_DISTRIBUTION_ROBOT)
- export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,$POLICY_DISTRIBUTION_CONTAINER)
- export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true"
+ clean)
+ ${WORKSPACE}/csit/resources/scripts/cluster_setup.sh clean
;;
-
*)
- echo "Unknown project supplied. Enabling all policy charts for the deployment"
- export READINESS_CONTAINERS=($POLICY_APEX_CONTAINER,$POLICY_API_CONTAINER,$POLICY_PAP_CONTAINER,
- $POLICY_DISTRIBUTION_CONTAINER,$POLICY_DROOLS_CONTAINER,$POLICY_XACML_CONTAINER,
- $POLICY_CLAMP_CONTAINER,$POLICY_PF_PPNT_CONTAINER,$POLICY_K8S_PPNT_CONTAINER,
- $POLICY_HTTP_PPNT_CONTAINER,$POLICY_SIM_PPNT_CONTAINER)
- export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_XACML_CONTAINER.enabled=true
- --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true --set $POLICY_DROOLS_CONTAINER.enabled=true
- --set $POLICY_CLAMP_CONTAINER.enabled=true --set $POLICY_PF_PPNT_CONTAINER.enabled=true
- --set $POLICY_K8S_PPNT_CONTAINER.enabled=true --set $POLICY_HTTP_PPNT_CONTAINER.enabled=true
- --set $POLICY_SIM_PPNT_CONTAINER.enabled=true"
+ echo "Error: Invalid operation"
+ print_usage
+ exit 1
;;
- esac
-
-}
-
-function install_chartmuseum () {
- echo "---------------------------------------------"
- echo "Installing Chartmuseum helm repository..."
- helm repo add chartmuseum-git https://chartmuseum.github.io/charts
- helm repo update
- helm install policy-chartmuseum chartmuseum-git/chartmuseum --set env.open.DISABLE_API=false --set service.type=NodePort --set service.nodePort=30208
- helm plugin install https://github.com/chartmuseum/helm-push
- echo "---------------------------------------------"
-}
-
-function push_acelement_chart() {
- echo "Pushing acelement chart to the chartmuseum repo..."
- helm repo add policy-chartmuseum http://localhost:30208
-
- # download clamp repo
- git clone -b "${GERRIT_BRANCH}" --single-branch https://github.com/onap/policy-clamp.git "${WORKSPACE}"/csit/resources/tests/clamp
- ACELEMENT_CHART=${WORKSPACE}/csit/resources/tests/clamp/examples/src/main/resources/clamp/acm/acelement-helm/acelement
- helm cm-push $ACELEMENT_CHART policy-chartmuseum
- helm repo update
- rm -rf ${WORKSPACE}/csit/resources/tests/clamp/
- echo "-------------------------------------------"
-}
-
-function get_pod_name() {
- pods=$(kubectl get pods --no-headers -o custom-columns=':metadata.name' | grep $1)
- read -rd '' -a pod_array <<< "$pods"
- echo "${pod_array[@]}"
-}
-
-wait_for_pods_running() {
- local namespace="$1"
- shift
- local timeout_seconds="$1"
- shift
-
- IFS=',' read -ra pod_names <<< "$@"
- shift
-
- local pending_pods=("${pod_names[@]}")
- local start_time
- start_time=$(date +%s)
-
- while [ ${#pending_pods[@]} -gt 0 ]; do
- local current_time
- current_time=$(date +%s)
- local elapsed_time
- elapsed_time=$((current_time - start_time))
-
- if [ "$elapsed_time" -ge "$timeout_seconds" ]; then
- echo "Timed out waiting for the pods to reach 'Running' state."
- echo "Printing the current status of the deployment before exiting.."
- kubectl get po;
- kubectl describe pods;
- echo "------------------------------------------------------------"
- for pod in "${pending_pods[@]}"; do
- echo "Logs of the pod $pod"
- kubectl logs $pod
- echo "---------------------------------------------------------"
- done
- exit 1
- fi
-
- local newly_running_pods=()
-
- for pod_name_prefix in "${pending_pods[@]}"; do
- local pod_names=$(get_pod_name "$pod_name_prefix")
- IFS=' ' read -r -a pod_array <<< "$pod_names"
- if [ "${#pod_array[@]}" -eq 0 ]; then
- echo "*** Error: No pods found for the deployment $pod_name_prefix . Exiting ***"
- return -1
- fi
- for pod in "${pod_array[@]}"; do
- local pod_status
- local pod_ready
- pod_status=$(kubectl get pod "$pod" -n "$namespace" --no-headers -o custom-columns=STATUS:.status.phase 2>/dev/null)
- pod_ready=$(kubectl get pod "$pod" -o jsonpath='{.status.containerStatuses[*].ready}')
-
- if [ "$pod_status" == "Running" ] && [ "$pod_ready" == "true" ]; then
- echo "Pod '$pod' in namespace '$namespace' is now in 'Running' state and 'Readiness' is true"
- else
- newly_running_pods+=("$pod")
- echo "Waiting for pod '$pod' in namespace '$namespace' to reach 'Running' and 'Ready' state..."
- fi
-
- done
- done
-
- pending_pods=("${newly_running_pods[@]}")
-
- sleep 5
- done
-
- echo "All specified pods are in the 'Running and Ready' state. Exiting the function."
-}
-
-OPERATION="$1"
-PROJECT="$2"
-if [ -z "$3" ]
-then
- LOCALIMAGE="false"
-else
- LOCALIMAGE="$3"
-fi
-
-
-if [ $OPERATION == "install" ]; then
- spin_microk8s_cluster
- if [ "${?}" -eq 0 ]; then
- export KAFKA_CONTAINERS=($KAFKA_CONTAINER,$ZK_CONTAINER)
- install_kafka
- wait_for_pods_running default 300 $KAFKA_CONTAINERS
- set_project_config
- echo "Installing policy helm charts in the default namespace"
- source ${WORKSPACE}/compose/get-k8s-versions.sh
- if [ $LOCALIMAGE == "true" ]; then
- echo "loading local image"
- source ${WORKSPACE}/compose/get-versions.sh
- ${WORKSPACE}/compose/loaddockerimage.sh
- fi
- cd ${WORKSPACE}/helm || exit
- helm dependency build policy
- helm install csit-policy policy ${SET_VALUES}
- helm install prometheus prometheus
- wait_for_pods_running default 900 ${READINESS_CONTAINERS[@]}
- echo "Policy chart installation completed"
- echo "-------------------------------------------"
- fi
-
- if [ "$PROJECT" ]; then
- export ROBOT_LOG_DIR=${WORKSPACE}/csit/archives/${PROJECT}
- echo "CSIT will be invoked from $ROBOT_FILE"
- echo "Readiness containers: ${READINESS_CONTAINERS[*]}"
- echo "-------------------------------------------"
- start_csit
- else
- echo "No project supplied for running CSIT"
- fi
-
-elif [ $OPERATION == "uninstall" ]; then
- uninstall_policy
-
-elif [ $OPERATION == "clean" ]; then
- teardown_cluster
-
-else
- echo "Invalid arguments provided. Usage: $0 [options..] {install {project_name} | uninstall | clean} {uselocalimage = true/false}"
-fi
+esac
diff --git a/csit/run-project-csit.sh b/csit/run-project-csit.sh
index e355d5ff..9b2c7695 100755
--- a/csit/run-project-csit.sh
+++ b/csit/run-project-csit.sh
@@ -3,7 +3,8 @@
# Copyright 2016-2017 Huawei Technologies Co., Ltd.
# Modification Copyright 2019 © Samsung Electronics Co., Ltd.
# Modification Copyright 2021 © AT&T Intellectual Property.
-# Modification Copyright 2021-2024 Nordix Foundation.
+# Modification Copyright 2021-2025 Nordix Foundation.
+# Modifications Copyright 2024 Deutsche Telekom
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -18,20 +19,26 @@
# limitations under the License.
#
+SKIP_BUILDING_ROBOT_IMG=false
+DO_NOT_TEARDOWN=false
+
# even with forced finish, clean up docker containers
function on_exit(){
rm -rf ${CSAR_DIR}/csar_temp.csar
- # teardown of compose containers for acm-replicas doesn't work with normal stop-compose script
- if [ "${ACM_REPLICA_TEARDOWN}" = true ]; then
- source ${DOCKER_COMPOSE_DIR}/start-acm-replica.sh --stop --replicas=2
- elif [ "${APEX_REPLICA_TEARDOWN}" = true ]; then
- source ${DOCKER_COMPOSE_DIR}/start-multiple-pdp.sh --stop --replicas=2
- else
- source ${DOCKER_COMPOSE_DIR}/stop-compose.sh ${PROJECT}
+ if [ "${DO_NOT_TEARDOWN}" = false ]; then
+ # teardown of compose containers for acm-replicas doesn't work with normal stop-compose script
+ if [ "${ACM_REPLICA_TEARDOWN}" = true ]; then
+ source ${DOCKER_COMPOSE_DIR}/start-acm-replica.sh --stop --replicas=2
+ elif [ "${APEX_REPLICA_TEARDOWN}" = true ]; then
+ source ${DOCKER_COMPOSE_DIR}/start-multiple-pdp.sh --stop --replicas=2
+ else
+ source ${DOCKER_COMPOSE_DIR}/stop-compose.sh ${PROJECT}
+ fi
+
+ mv ${DOCKER_COMPOSE_DIR}/*.log ${ROBOT_LOG_DIR}
fi
- mv ${DOCKER_COMPOSE_DIR}/*.log ${ROBOT_LOG_DIR}
exit $RC
}
@@ -90,7 +97,8 @@ function check_rest_endpoint() {
function setup_clamp() {
export ROBOT_FILES="policy-clamp-test.robot clamp-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh policy-clamp-runtime-acm --grafana
- sleep 30
+ echo "Waiting 2 minutes acm-runtime and participants to start..."
+ sleep 120
check_rest_endpoint "${ACM_PORT}"
}
@@ -100,8 +108,8 @@ function setup_clamp_replica() {
export TEST_ENV="docker"
export PROJECT=clamp
source ${DOCKER_COMPOSE_DIR}/start-acm-replica.sh --start --replicas=2
- echo "Waiting a minute for the replicas to be started..."
- sleep 60
+ echo "Waiting 2 minutes for the replicas to be started..."
+ sleep 120
# checking on apex-pdp status because acm-r replicas only start after apex-pdp is running
check_rest_endpoint ${PAP_PORT}
check_rest_endpoint ${APEX_PORT}
@@ -112,31 +120,37 @@ function setup_clamp_replica() {
function setup_api() {
export ROBOT_FILES="api-test.robot api-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh api --grafana
- sleep 10
+ echo "Waiting 1 minute for policy-api to start..."
+ sleep 60
check_rest_endpoint ${API_PORT}
}
function setup_pap() {
export ROBOT_FILES="pap-test.robot pap-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh apex-pdp --grafana
- sleep 10
+ echo "Waiting 1 minute for policy-pap to start..."
+ sleep 60
check_rest_endpoint ${PAP_PORT}
+ check_rest_endpoint ${APEX_PORT}
+ apex_healthcheck
}
function setup_apex() {
export ROBOT_FILES="apex-pdp-test.robot apex-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh apex-pdp --grafana
- sleep 10
+ echo "Waiting 1 minute for apex-pdp to start..."
+ sleep 60
check_rest_endpoint ${PAP_PORT}
check_rest_endpoint ${APEX_PORT}
apex_healthcheck
}
function setup_apex_medium() {
- export SUITES="apex-slas-3.robot"
+ export ROBOT_FILES="apex-slas-3.robot"
export APEX_REPLICA_TEARDOWN=true
source ${DOCKER_COMPOSE_DIR}/start-multiple-pdp.sh --start --replicas=3
- sleep 10
+ echo "Waiting 1 minute for apex-pdp to start..."
+ sleep 60
check_rest_endpoint ${PAP_PORT}
check_rest_endpoint ${APEX_PORT}
apex_healthcheck
@@ -146,7 +160,8 @@ function setup_apex_large() {
export ROBOT_FILES="apex-slas-10.robot"
export APEX_REPLICA_TEARDOWN=true
source ${DOCKER_COMPOSE_DIR}/start-multiple-pdp.sh --start --replicas=10
- sleep 10
+ echo "Waiting 1 minute for apex-pdp to start..."
+ sleep 60
check_rest_endpoint ${PAP_PORT}
check_rest_endpoint ${APEX_PORT}
apex_healthcheck
@@ -155,25 +170,35 @@ function setup_apex_large() {
function setup_drools_apps() {
export ROBOT_FILES="drools-applications-test.robot drools-applications-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh drools-applications --grafana
- sleep 10
+ echo "Waiting 1 minute for drools-pdp and drools-applications to start..."
+ sleep 60
check_rest_endpoint ${PAP_PORT}
- sleep 10
check_rest_endpoint ${DROOLS_APPS_PORT}
- sleep 10
check_rest_endpoint ${DROOLS_APPS_TELEMETRY_PORT}
}
function setup_xacml_pdp() {
export ROBOT_FILES="xacml-pdp-test.robot xacml-pdp-slas.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh xacml-pdp --grafana
- sleep 10
+ echo "Waiting 1 minute for xacml-pdp to start..."
+ sleep 60
check_rest_endpoint "${XACML_PORT}"
}
+function setup_opa_pdp() {
+ export ROBOT_FILES="opa-pdp-test.robot"
+ export PROJECT="opa-pdp"
+ source ${DOCKER_COMPOSE_DIR}/start-compose.sh opa-pdp
+ echo "Waiting 3 minutes for OPA-PDP to start..."
+ sleep 180
+ check_rest_endpoint "${OPA_PDP_PORT}"
+}
+
function setup_drools_pdp() {
export ROBOT_FILES="drools-pdp-test.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh drools-pdp --grafana
- sleep 30
+ echo "Waiting 1 minute for drools-pdp to start..."
+ sleep 60
check_rest_endpoint ${DROOLS_TELEMETRY_PORT}
}
@@ -186,8 +211,11 @@ function setup_distribution() {
export ROBOT_FILES="distribution-test.robot"
source ${DOCKER_COMPOSE_DIR}/start-compose.sh distribution --grafana
- sleep 10
+ echo "Waiting 1 minute for distribution to start..."
+ sleep 60
check_rest_endpoint "${DIST_PORT}"
+ check_rest_endpoint ${APEX_PORT}
+ apex_healthcheck
}
function build_robot_image() {
@@ -224,10 +252,6 @@ function set_project_config() {
setup_apex
;;
- apex-pdp-postgres | policy-apex-pdp-postgres)
- setup_apex
- ;;
-
apex-pdp-medium | policy-apex-pdp-medium)
setup_apex_medium
;;
@@ -240,6 +264,10 @@ function set_project_config() {
setup_xacml_pdp
;;
+ opa-pdp | policy-opa-pdp)
+ setup_opa_pdp
+ ;;
+
drools-pdp | policy-drools-pdp)
setup_drools_pdp
;;
@@ -262,6 +290,33 @@ function set_project_config() {
# ensure that teardown and other finalizing steps are always executed
trap on_exit EXIT
+# start the script
+
+# Parse the command-line arguments
+while [[ $# -gt 0 ]]
+do
+ key="$1"
+
+ case $key in
+ --skip-build-csit)
+ export SKIP_BUILDING_ROBOT_IMG=true
+ shift
+ ;;
+ --local)
+ export USE_LOCAL_IMAGES=true
+ shift
+ ;;
+ --no-exit)
+ export DO_NOT_TEARDOWN=true
+ shift
+ ;;
+ *)
+ export PROJECT="${1}"
+ shift
+ ;;
+ esac
+done
+
# setup all directories used for test resources
if [ -z "${WORKSPACE}" ]; then
WORKSPACE=$(git rev-parse --show-toplevel)
@@ -269,7 +324,6 @@ if [ -z "${WORKSPACE}" ]; then
fi
export GERRIT_BRANCH=$(awk -F= '$1 == "defaultbranch" { print $2 }' "${WORKSPACE}"/.gitreview)
-export PROJECT="${1}"
export ROBOT_LOG_DIR="${WORKSPACE}/csit/archives/${PROJECT}"
export SCRIPTS="${WORKSPACE}/csit/resources/scripts"
export CSAR_DIR="${WORKSPACE}/csit/resources/tests/data/csar"
@@ -306,7 +360,7 @@ unset http_proxy https_proxy
export ROBOT_FILES
# use a separate script to build a CSIT docker image, to isolate the test run
-if [ "${2}" == "--skip-build-csit" ]; then
+if [ "${SKIP_BUILDING_ROBOT_IMG}" == "true" ]; then
echo "Skipping build csit robot image"
else
build_robot_image
diff --git a/csit/run-s3p-tests.sh b/csit/run-s3p-tests.sh
new file mode 100755
index 00000000..4dce3b7e
--- /dev/null
+++ b/csit/run-s3p-tests.sh
@@ -0,0 +1,165 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+# Copyright (C) 2023-2025 Nordix Foundation. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+# This script will be used to automatically trigger the S3P
+# tests for policy components.
+
+script_start_time=$(date +%s)
+log_file="${TESTDIR:-$(pwd)}/s3p_test_log_$(date +%Y%m%d_%H%M%S).log"
+files_processed=0
+errors_encountered=0
+warnings_issued=0
+
+# Function to log messages
+log_message() {
+ local level="$1"
+ local message="$2"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$level] $message" | tee -a "$log_file"
+}
+
+# Start Kubernetes
+function start_kubernetes() {
+ log_message "INFO" "Starting Kubernetes cluster for $PROJECT"
+ bash resources/scripts/cluster_setup.sh install $PROJECT
+ if [ $? -eq 0 ]; then
+ log_message "INFO" "Kubernetes cluster started successfully"
+ else
+ log_message "ERROR" "Failed to start Kubernetes cluster"
+ ((errors_encountered++))
+ fi
+ bash resources/scripts/get-cluster-info.sh
+}
+
+function install_jmeter() {
+ log_message "INFO" "Installing JMeter"
+ cd ${TESTDIR}/automate-s3p-test || { log_message "ERROR" "Failed to change directory"; ((errors_encountered++)); return 1; }
+
+ sudo apt-get update
+ sudo apt install curl -y
+ sudo apt install -y default-jdk
+
+ curl -O https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.6.2.tgz
+ tar -xvf apache-jmeter-5.6.2.tgz
+ mv apache-jmeter-5.6.2 apache-jmeter
+
+ echo 'export JVM_ARGS="-Xms2g -Xmx4g"' > apache-jmeter/bin/setenv.sh
+ echo 'export HEAP="-Xms1G -Xmx2G -XX:MaxMetaspaceSize=512m"' >> apache-jmeter/bin/setenv.sh
+
+ rm -rf apache-jmeter/docs apache-jmeter/printable_docs
+
+ cd apache-jmeter/lib || { log_message "ERROR" "Failed to change directory"; ((errors_encountered++)); return 1; }
+ curl -O https://repo1.maven.org/maven2/kg/apc/cmdrunner/2.2.1/cmdrunner-2.2.1.jar
+ curl -O https://repo1.maven.org/maven2/org/apache/kafka/kafka-clients/3.9.0/kafka-clients-3.9.0.jar
+ curl -O https://repo1.maven.org/maven2/org/apache/kafka/kafka_2.13/3.9.0/kafka_2.13-3.9.0.jar
+
+ sudo cp -r ../../apache-jmeter /opt/
+
+ export JMETER_HOME="/opt/apache-jmeter"
+ export PATH="$JMETER_HOME/bin:$PATH"
+
+ log_message "INFO" "JMeter installation completed"
+ ((files_processed+=7))
+}
+
+function on_exit() {
+ local exit_status=$?
+ local end_time=$(date +%s)
+ local runtime=$((end_time - script_start_time))
+
+ log_message "INFO" "=============== Exit Report ==============="
+ log_message "INFO" "Script execution completed at $(date)"
+ log_message "INFO" "Exit status: $exit_status"
+ log_message "INFO" "Total runtime: $runtime seconds"
+ log_message "INFO" "Operations summary:"
+ log_message "INFO" " - Files processed: $files_processed"
+ log_message "INFO" " - Errors encountered: $errors_encountered"
+ log_message "INFO" " - Warnings issued: $warnings_issued"
+ log_message "INFO" "Resource usage:"
+ ps -p $$ -o %cpu,%mem,etime >> "$log_file"
+ log_message "INFO" "Full log available at: $log_file"
+ log_message "INFO" "============================================"
+}
+
+function show_usage() {
+ echo "Usage: $0 [option] {test <jmx_file> | clean}"
+ echo "Options:"
+ echo " test <jmx_file> Start the environment and run the specified JMX test plan"
+ echo " clean Uninstall the environment and remove temporary folders"
+}
+
+function teardown() {
+ log_message "INFO" "Starting teardown process"
+
+ log_message "INFO" "Tearing down Kubernetes cluster"
+ bash resources/scripts/cluster_setup.sh uninstall
+
+ log_message "INFO" "Deleting created services"
+ microk8s kubectl get svc | awk '/svc/{system("microk8s kubectl delete svc " $1)}'
+
+ log_message "INFO" "Teardown process completed"
+}
+
+function main() {
+ PROJECT="$3"
+ case "$1" in
+ clean)
+ log_message "INFO" "Uninstalling environment and removing temp folders"
+ teardown
+ ;;
+ test)
+ if [ -z "$2" ]; then
+ log_message "ERROR" "JMX file not specified for test option"
+ show_usage
+ ((errors_encountered++))
+ exit 1
+ fi
+ log_message "INFO" "Starting K8s Environment"
+ start_kubernetes
+
+ log_message "INFO" "Installing JMeter"
+ install_jmeter
+
+ log_message "INFO" "Executing tests"
+ cd "${TESTDIR}/automate-s3p-test" || { log_message "ERROR" "Failed to change directory"; ((errors_encountered++)); exit 1; }
+ nohup jmeter -n -t "$2" -l s3pTestResults.jtl
+ if [ $? -eq 0 ]; then
+ log_message "INFO" "JMeter test completed successfully"
+ ((files_processed++))
+ else
+ log_message "ERROR" "JMeter test failed"
+ ((errors_encountered++))
+ fi
+ ;;
+ *)
+ log_message "WARNING" "Invalid option provided"
+ show_usage
+ ((warnings_issued++))
+ exit 1
+ ;;
+ esac
+}
+
+# Set TESTDIR if not already set
+TESTDIR=${TESTDIR:-$(pwd)}
+
+# Set up trap for exit
+trap on_exit EXIT
+
+# Call the main function with all script arguments
+main "$@"
diff --git a/csit/start-s3p-tests.sh b/csit/start-s3p-tests.sh
deleted file mode 100755
index 41974601..00000000
--- a/csit/start-s3p-tests.sh
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/bin/bash
-# ============LICENSE_START=======================================================
-# Copyright (C) 2023 Nordix Foundation. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=========================================================
-
-# This script will be used to automatically trigger the S3P
-# tests for policy components.
-
-# Start Kubernetes
-function start_kubernetes() {
- bash run-k8s-csit.sh install
- bash resources/scripts/get-cluster-info.sh
-}
-
-function install_jmeter() {
-
- #NOTE: $TESTDIR is set by the component triggering this script
- cd ${TESTDIR}/automate-s3p-test
-
- sudo apt-get update
-
- # Install curl
- sudo apt install curl -y
-
- # Install JDK
- sudo apt install -y default-jdk
-
- # Install JMeter
- curl -O https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.6.2.tgz
- tar -xvf apache-jmeter-5.6.2.tgz
- mv apache-jmeter-5.6.2 apache-jmeter
-
- # Define your desired heap size values
- echo 'export JVM_ARGS="-Xms2g -Xmx4g"' > apache-jmeter/bin/setenv.sh
- echo 'export HEAP="-Xms1G -Xmx2G -XX:MaxMetaspaceSize=512m"' >> apache-jmeter/bin/setenv.sh
-
-
- # Remove unnecessary files
- rm -rf apache-jmeter/docs apache-jmeter/printable_docs
-
- # Install CMD Runner
- cd apache-jmeter/lib
- curl -O https://repo1.maven.org/maven2/kg/apc/cmdrunner/2.2.1/cmdrunner-2.2.1.jar
-
- # Move JMeter to /opt
- sudo cp -r ../../apache-jmeter /opt/
-
- # Add JMeter Path Variable
- export JMETER_HOME="/opt/apache-jmeter"
- export PATH="$JMETER_HOME/bin:$PATH"
-}
-
-function on_exit() {
- # TODO: Generate report
- echo "Generating report..."
-}
-
-function teardown() {
- echo "Removing temp directories.."
-
- rm -r ${TESTDIR}/automate-s3p-test
-
- echo "Removed directories"
-
- echo "Tearing down kubernetes cluster..."
- bash run-k8s-csit.sh uninstall
-
- # DELETE created services
- microk8s kubectl get svc | awk '/svc/{system("microk8s kubectl delete svc " $1)}'
-}
-
-#===MAIN===#
-
-if [ $1 == "run" ]
-then
-
- echo "==========================="
- echo "Starting K8s Environment"
- echo "==========================="
- start_kubernetes
-
- echo "==========================="
- echo "Installing JMeter"
- echo "==========================="
- install_jmeter
-
- # Run the JMX test plan
- echo "==========================="
- echo "Executing tests"
- echo "==========================="
- cd ${TESTDIR}/automate-s3p-test || exit
- nohup jmeter -n -t $2 -l s3pTestResults.jtl
-
- # TODO: Generate report on on_exit()
-
-elif [ $1 == "uninstall" ]
-then
- echo "Uninstalling environment and removing temp folders..."
- teardown
-else
- echo "Invalid arguments provided. Usage: $0 [option..] {run | uninstall}"
-fi