From f2e4da7e296548fb3980fd212e3a67dc83254e1d Mon Sep 17 00:00:00 2001 From: rameshiyer27 Date: Sat, 13 Jan 2024 21:26:09 +0000 Subject: Add kafka support in Policy CSIT Issue-ID: POLICY-4402 Signed-off-by: zrrmmua Change-Id: I802c19a3c9817d304164eba634adb8c119aa4ced --- compose/config/apex-pdp/OnapPfConfig.json | 17 +++--- .../custom/feature-lifecycle.properties | 27 ++++---- compose/config/drools-applications/env/base.conf | 11 ++-- .../env/feature-pooling-dmaap.conf | 3 +- compose/config/drools-pdp/custom/noop.pre.sh | 3 +- compose/config/drools-pdp/env/base.conf | 9 +-- compose/config/pap/papParameters.yaml | 22 +++---- compose/config/pap/papParametersPostgres.yaml | 22 +++---- compose/config/sim-all/simParameters.json | 18 +++--- compose/config/xacml-pdp/defaultConfig.json | 15 ++--- compose/docker-compose.postgres.yml | 35 +++++++++-- compose/docker-compose.yml | 24 ++++---- compose/export-ports.sh | 3 +- csit/resources/scripts/kafka_consumer.py | 71 ---------------------- csit/resources/scripts/kafka_producer.py | 41 ------------- csit/resources/scripts/make_topics.py | 41 ------------- csit/resources/scripts/run-test.sh | 5 +- csit/resources/scripts/setup-apex-pdp-postgres.sh | 4 +- csit/resources/scripts/setup-apex-pdp.sh | 6 +- .../resources/scripts/setup-drools-applications.sh | 4 +- csit/resources/scripts/setup-xacml-pdp.sh | 4 +- csit/resources/tests/apex-pdp-common.robot | 4 +- csit/resources/tests/apex-pdp-test.robot | 46 +++++++------- csit/resources/tests/apex-slas.robot | 14 ++--- csit/resources/tests/common-library.robot | 6 +- .../tests/data/onap.policies.apex.pnf.Test.json | 38 +++++++++--- .../onap.policies.apex.pnf.metadataSet.Test.json | 38 +++++++++--- csit/resources/tests/distribution-test.robot | 8 +-- .../resources/tests/drools-applications-test.robot | 29 ++++----- csit/resources/tests/kafka_consumer.py | 66 ++++++++++++++++++++ csit/resources/tests/kafka_producer.py | 41 +++++++++++++ csit/resources/tests/make_topics.py | 41 +++++++++++++ csit/resources/tests/pap-test.robot | 1 + csit/resources/tests/policy-clamp-test.robot | 1 + csit/resources/tests/xacml-pdp-test.robot | 11 ++-- csit/run-k8s-csit.sh | 4 +- 36 files changed, 397 insertions(+), 336 deletions(-) delete mode 100755 csit/resources/scripts/kafka_consumer.py delete mode 100755 csit/resources/scripts/kafka_producer.py delete mode 100755 csit/resources/scripts/make_topics.py create mode 100755 csit/resources/tests/kafka_consumer.py create mode 100755 csit/resources/tests/kafka_producer.py create mode 100755 csit/resources/tests/make_topics.py diff --git a/compose/config/apex-pdp/OnapPfConfig.json b/compose/config/apex-pdp/OnapPfConfig.json index e943dd37..6242b906 100644 --- a/compose/config/apex-pdp/OnapPfConfig.json +++ b/compose/config/apex-pdp/OnapPfConfig.json @@ -10,7 +10,7 @@ }, "pdpStatusParameters": { "pdpGroup": "defaultGroup", - "timeIntervalMs": 120000, + "timeIntervalMs": 20000, "pdpType": "apex", "description": "Pdp Heartbeat", "supportedPolicyTypes": [ @@ -27,21 +27,22 @@ "topicParameterGroup": { "topicSources": [ { - "topic": "POLICY-PDP-PAP", + "topic": "policy-pdp-pap", "servers": [ - "message-router" + "kafka:9092" ], - "topicCommInfrastructure": "dmaap", - "useHttps": false + "topicCommInfrastructure": "kafka", + "useHttps": false, + "fetchTimeout": 15000 } ], "topicSinks": [ { - "topic": "POLICY-PDP-PAP", + "topic": "policy-pdp-pap", "servers": [ - "message-router" + "kafka:9092" ], - "topicCommInfrastructure": "dmaap", + "topicCommInfrastructure": "kafka", "useHttps": false } ] diff --git a/compose/config/drools-applications/custom/feature-lifecycle.properties b/compose/config/drools-applications/custom/feature-lifecycle.properties index 3e862970..ec23bee6 100644 --- a/compose/config/drools-applications/custom/feature-lifecycle.properties +++ b/compose/config/drools-applications/custom/feature-lifecycle.properties @@ -2,6 +2,7 @@ # ONAP # ================================================================================ # Copyright (C) 2021 AT&T Intellectual Property. All rights reserved. +# Modification Copyright 2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,17 +23,19 @@ lifecycle.pdp.type=${envd:POLICY_PDP_PAP_TYPE:drools} # Mandatory policy types that this PDP-D must support at a minimum lifecycle.pdp.policytypes=${envd:POLICY_PDP_PAP_POLICYTYPES} -dmaap.source.topics=POLICY-PDP-PAP -dmaap.sink.topics=POLICY-PDP-PAP +kafka.source.topics=policy-pdp-pap +kafka.sink.topics=policy-pdp-pap +kafka.source.topics.fetchTimeout=15000 +kafka.source.topics.policy-pdp-pap.fetchTimeout=15000 -dmaap.source.topics.POLICY-PDP-PAP.servers=${envd:DMAAP_SERVERS} -dmaap.source.topics.POLICY-PDP-PAP.effectiveTopic=${envd:POLICY_PDP_PAP_TOPIC} -dmaap.source.topics.POLICY-PDP-PAP.apiKey=${envd:POLICY_PDP_PAP_API_KEY} -dmaap.source.topics.POLICY-PDP-PAP.apiSecret=${envd:POLICY_PDP_PAP_API_SECRET} -dmaap.source.topics.POLICY-PDP-PAP.https=${envd:DMAAP_HTTPS:false} +kafka.source.topics.policy-pdp-pap.servers=kafka:9092 +kafka.source.topics.policy-pdp-pap.effectiveTopic=${envd:POLICY_PDP_PAP_TOPIC} +kafka.source.topics.policy-pdp-pap.apiKey=${envd:POLICY_PDP_PAP_API_KEY} +kafka.source.topics.policy-pdp-pap.apiSecret=${envd:POLICY_PDP_PAP_API_SECRET} +kafka.source.topics.policy-pdp-pap.https=${envd:DMAAP_HTTPS:false} -dmaap.sink.topics.POLICY-PDP-PAP.servers=${envd:DMAAP_SERVERS} -dmaap.sink.topics.POLICY-PDP-PAP.effectiveTopic=${envd:POLICY_PDP_PAP_TOPIC} -dmaap.sink.topics.POLICY-PDP-PAP.apiKey=${envd:POLICY_PDP_PAP_API_KEY} -dmaap.sink.topics.POLICY-PDP-PAP.apiSecret=${envd:POLICY_PDP_PAP_API_SECRET} -dmaap.sink.topics.POLICY-PDP-PAP.https=${envd:DMAAP_HTTPS:false} +kafka.sink.topics.policy-pdp-pap.servers=kafka:9092 +kafka.sink.topics.policy-pdp-pap.effectiveTopic=${envd:POLICY_PDP_PAP_TOPIC} +kafka.sink.topics.policy-pdp-pap.apiKey=${envd:POLICY_PDP_PAP_API_KEY} +kafka.sink.topics.policy-pdp-pap.apiSecret=${envd:POLICY_PDP_PAP_API_SECRET} +kafka.sink.topics.policy-pdp-pap.https=${envd:DMAAP_HTTPS:false} diff --git a/compose/config/drools-applications/env/base.conf b/compose/config/drools-applications/env/base.conf index ca944dbb..0b92602f 100644 --- a/compose/config/drools-applications/env/base.conf +++ b/compose/config/drools-applications/env/base.conf @@ -1,6 +1,7 @@ # ============LICENSE_START================================================== # Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved. # Modifications Copyright (C) 2021 Bell Canada. All rights reserved. +# Modification Copyright 2024 Nordix Foundation. # =========================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -71,7 +72,7 @@ LIVENESS_CONTROLLERS=* # PDP-D DMaaP configuration channel -PDPD_CONFIGURATION_TOPIC=PDPD-CONFIGURATION +PDPD_CONFIGURATION_TOPIC=pdpd-configuration PDPD_CONFIGURATION_API_KEY= PDPD_CONFIGURATION_API_SECRET= PDPD_CONFIGURATION_CONSUMER_GROUP= @@ -80,7 +81,7 @@ PDPD_CONFIGURATION_PARTITION_KEY= # PAP-PDP configuration channel -POLICY_PDP_PAP_TOPIC=POLICY-PDP-PAP +POLICY_PDP_PAP_TOPIC=policy-pdp-pap POLICY_PDP_PAP_API_KEY= POLICY_PDP_PAP_API_SECRET= @@ -102,13 +103,13 @@ PDP_HTTPS=false # DCAE DMaaP -DCAE_TOPIC=unauthenticated.DCAE_CL_OUTPUT -DCAE_SERVERS=message-router +DCAE_TOPIC=unauthenticated.dcae_cl_output +DCAE_SERVERS=kafka:9092 DCAE_CONSUMER_GROUP=dcae.policy.shared # Open DMaaP -DMAAP_SERVERS=message-router +DMAAP_SERVERS=kafka:9092 DMAAP_HTTPS=false # AAI diff --git a/compose/config/drools-applications/env/feature-pooling-dmaap.conf b/compose/config/drools-applications/env/feature-pooling-dmaap.conf index 3808e3d9..f90a9f04 100644 --- a/compose/config/drools-applications/env/feature-pooling-dmaap.conf +++ b/compose/config/drools-applications/env/feature-pooling-dmaap.conf @@ -1,5 +1,6 @@ # ============LICENSE_START======================================================= # Copyright (C) 2020 AT&T Intellectual Property. All rights reserved. +# Modification Copyright 2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,4 +15,4 @@ # limitations under the License. # ============LICENSE_END========================================================= -POOLING_TOPIC=POOLING +POOLING_TOPIC=pooling diff --git a/compose/config/drools-pdp/custom/noop.pre.sh b/compose/config/drools-pdp/custom/noop.pre.sh index 63bdc4cf..2be33980 100755 --- a/compose/config/drools-pdp/custom/noop.pre.sh +++ b/compose/config/drools-pdp/custom/noop.pre.sh @@ -1,6 +1,7 @@ #!/bin/bash -x # ============LICENSE_START======================================================= # Copyright (C) 2020 AT&T Intellectual Property. All rights reserved. +# Modification Copyright 2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,7 @@ # limitations under the License. # ============LICENSE_END========================================================= -sed -i "s/^dmaap/noop/g" \ +sed -i "s/^dmaap/kafka/g" \ ${POLICY_HOME}/config/engine.properties \ ${POLICY_HOME}/config/feature-lifecycle.properties diff --git a/compose/config/drools-pdp/env/base.conf b/compose/config/drools-pdp/env/base.conf index 98cc10f8..d301d830 100644 --- a/compose/config/drools-pdp/env/base.conf +++ b/compose/config/drools-pdp/env/base.conf @@ -1,6 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved. # Modifications Copyright (C) 2021 Bell Canada. All rights reserved. +# Modification Copyright 2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -68,7 +69,7 @@ PROMETHEUS=true # PDP-D DMaaP configuration channel -PDPD_CONFIGURATION_TOPIC=PDPD-CONFIGURATION +PDPD_CONFIGURATION_TOPIC=pdpd-configuration PDPD_CONFIGURATION_API_KEY= PDPD_CONFIGURATION_API_SECRET= PDPD_CONFIGURATION_CONSUMER_GROUP= @@ -77,7 +78,7 @@ PDPD_CONFIGURATION_PARTITION_KEY= # PAP-PDP configuration channel -POLICY_PDP_PAP_TOPIC=POLICY-PDP-PAP +POLICY_PDP_PAP_TOPIC=policy-pdp-pap POLICY_PDP_PAP_API_KEY= POLICY_PDP_PAP_API_SECRET= @@ -101,13 +102,13 @@ GUARD_DISABLED=false # DCAE DMaaP -DCAE_TOPIC=unauthenticated.DCAE_CL_OUTPUT +DCAE_TOPIC=unauthenticated.dcae_cl_output DCAE_SERVERS= DCAE_CONSUMER_GROUP=dcae.policy.shared # Open DMaaP -DMAAP_SERVERS=mr.api.simpledemo.onap.org +DMAAP_SERVERS=kafka:9092 DMAAP_HTTPS=false # AAI diff --git a/compose/config/pap/papParameters.yaml b/compose/config/pap/papParameters.yaml index 9e8077a7..1f42a575 100644 --- a/compose/config/pap/papParameters.yaml +++ b/compose/config/pap/papParameters.yaml @@ -32,9 +32,9 @@ pap: name: PapGroup aaf: false topic: - pdp-pap.name: POLICY-PDP-PAP - notification.name: POLICY-NOTIFICATION - heartbeat.name: POLICY-HEARTBEAT + pdp-pap.name: policy-pdp-pap + notification.name: policy-notification + heartbeat.name: policy-heartbeat pdpParameters: heartBeatMs: 120000 updateParameters: @@ -48,28 +48,28 @@ pap: topicSources: - topic: ${pap.topic.pdp-pap.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false fetchTimeout: 15000 - topic: ${pap.topic.heartbeat.name} effectiveTopic: ${pap.topic.pdp-pap.name} consumerGroup: policy-pap servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false fetchTimeout: 15000 topicSinks: - topic: ${pap.topic.pdp-pap.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false - topic: ${pap.topic.notification.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false healthCheckRestClientParameters: - clientName: api diff --git a/compose/config/pap/papParametersPostgres.yaml b/compose/config/pap/papParametersPostgres.yaml index 1959e877..15e77f14 100644 --- a/compose/config/pap/papParametersPostgres.yaml +++ b/compose/config/pap/papParametersPostgres.yaml @@ -32,9 +32,9 @@ pap: name: PapGroup aaf: false topic: - pdp-pap.name: POLICY-PDP-PAP - notification.name: POLICY-NOTIFICATION - heartbeat.name: POLICY-HEARTBEAT + pdp-pap.name: policy-pdp-pap + notification.name: policy-notification + heartbeat.name: policy-heartbeat pdpParameters: heartBeatMs: 120000 updateParameters: @@ -48,28 +48,28 @@ pap: topicSources: - topic: ${pap.topic.pdp-pap.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false fetchTimeout: 15000 - topic: ${pap.topic.heartbeat.name} effectiveTopic: ${pap.topic.pdp-pap.name} consumerGroup: policy-pap servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false fetchTimeout: 15000 topicSinks: - topic: ${pap.topic.pdp-pap.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false - topic: ${pap.topic.notification.name} servers: - - message-router - topicCommInfrastructure: dmaap + - kafka:9092 + topicCommInfrastructure: kafka useHttps: false healthCheckRestClientParameters: - clientName: api diff --git a/compose/config/sim-all/simParameters.json b/compose/config/sim-all/simParameters.json index 41fc9057..2bc5812f 100644 --- a/compose/config/sim-all/simParameters.json +++ b/compose/config/sim-all/simParameters.json @@ -47,7 +47,7 @@ ], "topicSinks": [ { - "topic": "APPC-CL", + "topic": "appc-cl", "servers": ["${HOST_NAME}"], "topicCommInfrastructure": "DMAAP", "useHttps": false, @@ -55,7 +55,7 @@ "apiSecret": "some-secret" }, { - "topic": "APPC-LCM-WRITE", + "topic": "appc-lcm-write", "servers": ["${HOST_NAME}"], "topicCommInfrastructure": "DMAAP", "useHttps": false, @@ -65,7 +65,7 @@ ], "topicSources": [ { - "topic": "APPC-CL", + "topic": "appc-cl", "servers": ["${HOST_NAME}"], "topicCommInfrastructure": "DMAAP", "useHttps": false, @@ -73,7 +73,7 @@ "apiSecret": "some-secret" }, { - "topic": "APPC-LCM-READ", + "topic": "appc-lcm-read", "servers": ["${HOST_NAME}"], "topicCommInfrastructure": "DMAAP", "useHttps": false, @@ -85,14 +85,14 @@ { "name": "APPC Legacy simulator", "providerClass": "org.onap.policy.simulators.AppcLegacyTopicServer", - "sink": "APPC-CL", - "source": "APPC-CL" + "sink": "appc-cl", + "source": "appc-cl" }, { - "name": "APPC-LCM simulator", + "name": "appc-lcm-simulator", "providerClass": "org.onap.policy.simulators.AppcLcmTopicServer", - "sink": "APPC-LCM-WRITE", - "source": "APPC-LCM-READ" + "sink": "appc-lcm-write", + "source": "appc-lcm-read" } ], "grpcServer": { diff --git a/compose/config/xacml-pdp/defaultConfig.json b/compose/config/xacml-pdp/defaultConfig.json index d6f3bd99..ba587f1a 100644 --- a/compose/config/xacml-pdp/defaultConfig.json +++ b/compose/config/xacml-pdp/defaultConfig.json @@ -24,15 +24,16 @@ }, "topicParameterGroup": { "topicSources" : [{ - "topic" : "POLICY-PDP-PAP", - "servers" : [ "message-router" ], - "topicCommInfrastructure" : "dmaap", - "useHttps" : false + "topic" : "policy-pdp-pap", + "servers" : [ "kafka:9092" ], + "topicCommInfrastructure" : "kafka", + "useHttps" : false, + "fetchTimeout": 15000 }], "topicSinks" : [{ - "topic" : "POLICY-PDP-PAP", - "servers" : [ "message-router" ], - "topicCommInfrastructure" : "dmaap", + "topic" : "policy-pdp-pap", + "servers" : [ "kafka:9092" ], + "topicCommInfrastructure" : "kafka", "useHttps" : false }] } diff --git a/compose/docker-compose.postgres.yml b/compose/docker-compose.postgres.yml index 4ae95582..4fe40024 100644 --- a/compose/docker-compose.postgres.yml +++ b/compose/docker-compose.postgres.yml @@ -1,6 +1,6 @@ # # ===========LICENSE_START==================================================== -# Copyright (C) 2023 Nordix Foundation. +# Copyright (C) 2023-2024 Nordix Foundation. # ============================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -58,7 +58,6 @@ services: networks: default: aliases: - - message-router - aai-sim - grpc-sim - sdnc-sim @@ -72,7 +71,6 @@ services: - 6668 - 6669 - 6670 - - 3904 - 6680 ports: - ${DMAAP_PORT}:3904 @@ -99,7 +97,7 @@ services: container_name: policy-pap depends_on: - postgres - - simulator + - kafka - api hostname: policy-pap ports: @@ -113,7 +111,7 @@ services: command: [ '-c', './policy-pap.sh', 'postgres', '5432', - 'message-router', '3904', + 'kafka', '9092', 'api', '6969' ] apex-pdp: @@ -122,6 +120,7 @@ services: depends_on: - postgres - simulator + - kafka - pap hostname: policy-apex-pdp ports: @@ -135,7 +134,7 @@ services: command: [ '-c', '/opt/app/policy/apex-pdp/bin/apexOnapPf.sh -c /opt/app/policy/apex-pdp/etc/onappf/config/OnapPfConfig.json', 'postgres', '5432', - 'message-router', '3904', + 'kafka', '9092', 'pap', '6969' ] prometheus: @@ -146,3 +145,27 @@ services: - ${PROMETHEUS_PORT}:9090 volumes: - ./metrics/prometheus.yml:/etc/prometheus/prometheus.yml + + zookeeper: + image: confluentinc/cp-zookeeper:latest + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ports: + - 2181:2181 + + kafka: + image: confluentinc/cp-kafka:latest + container_name: kafka + depends_on: + - zookeeper + ports: + - 29092:29092 + - 9092:9092 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 diff --git a/compose/docker-compose.yml b/compose/docker-compose.yml index 3814f215..dbd62985 100644 --- a/compose/docker-compose.yml +++ b/compose/docker-compose.yml @@ -2,7 +2,7 @@ # ===========LICENSE_START==================================================== # Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved. # Modifications Copyright (C) 2021 Bell Canada. All rights reserved. -# Modification Copyright 2021-2023 Nordix Foundation. +# Modification Copyright 2021-2024 Nordix Foundation. # ============================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -58,7 +58,6 @@ services: networks: default: aliases: - - message-router - aai-sim - grpc-sim - sdnc-sim @@ -72,7 +71,6 @@ services: - 6668 - 6669 - 6670 - - 3904 - 6680 ports: - ${DMAAP_PORT}:3904 @@ -99,7 +97,7 @@ services: container_name: policy-pap depends_on: - mariadb - - simulator + - kafka - api hostname: policy-pap ports: @@ -113,7 +111,7 @@ services: command: [ '-c', './policy-pap.sh', 'mariadb', '3306', - 'message-router', '3904', + 'kafka', '9092', 'api', '6969' ] xacml-pdp: @@ -121,7 +119,7 @@ services: container_name: policy-xacml-pdp depends_on: - mariadb - - simulator + - kafka - pap hostname: policy-xacml-pdp ports: @@ -135,7 +133,7 @@ services: command: [ '-c', './policy-pdpx.sh', 'mariadb', '3306', - 'message-router', '3904', + 'kafka', '9092', 'pap', '6969' ] drools-pdp: @@ -143,7 +141,7 @@ services: container_name: drools depends_on: - mariadb - - simulator + - kafka - pap hostname: drools ports: @@ -158,7 +156,7 @@ services: command: [ '-c', '/opt/app/policy/bin/pdpd-entrypoint.sh boot', 'mariadb', '3306', - 'message-router', '3904' + 'kafka', '9092' ] drools-applications: image: ${CONTAINER_LOCATION}onap/policy-pdpd-cl:${POLICY_DROOLS_APPS_VERSION} @@ -166,6 +164,7 @@ services: depends_on: - mariadb - simulator + - kafka - pap - xacml-pdp hostname: drools-apps @@ -183,7 +182,7 @@ services: command: [ '-c', '/opt/app/policy/bin/pdpd-cl-entrypoint.sh boot', 'mariadb', '3306', - 'message-router', '3904', + 'kafka', '9092', 'pap', '6969', 'aai-sim', '6666', 'sdnc-sim', '6668', @@ -196,6 +195,7 @@ services: depends_on: - mariadb - simulator + - kafka - pap hostname: policy-apex-pdp ports: @@ -209,7 +209,7 @@ services: command: [ '-c', '/opt/app/policy/apex-pdp/bin/apexOnapPf.sh -c /opt/app/policy/apex-pdp/etc/onappf/config/OnapPfConfig.json', 'mariadb', '3306', - 'message-router', '3904', + 'kafka', '9092', 'pap', '6969' ] distribution: @@ -232,7 +232,7 @@ services: command: [ '-c', './policy-dist.sh', 'mariadb', '3306', - 'message-router', '3904', + 'kafka', '9092', 'pap', '6969', 'apex-pdp', '6969' ] diff --git a/compose/export-ports.sh b/compose/export-ports.sh index 701449e0..a55f755e 100755 --- a/compose/export-ports.sh +++ b/compose/export-ports.sh @@ -1,6 +1,6 @@ #!/bin/bash # ============LICENSE_START======================================================= -# Copyright 2023 Nordix Foundation. +# Copyright 2023-2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -44,5 +44,6 @@ export SIM_PARTICIPANT_PORT=30011 export DROOLS_APPS_PORT=30009 export DROOLS_APPS_TELEMETRY_PORT=30219 export DMAAP_PORT=30904 +export KAFKA_PORT=9092 export PROMETHEUS_PORT=30259 export GRAFANA_PORT=30269 diff --git a/csit/resources/scripts/kafka_consumer.py b/csit/resources/scripts/kafka_consumer.py deleted file mode 100755 index 80b6167a..00000000 --- a/csit/resources/scripts/kafka_consumer.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 -# -# ============LICENSE_START==================================================== -# Copyright (C) 2023 Nordix Foundation. -# ============================================================================= -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END====================================================== - -# Python utility to fetch kafka topic and look for required messages. -# Accepts the arguments {topic_name} and {list of expected values} and {timeout} to verify the kafka topic. - - -from confluent_kafka import Consumer, KafkaException -import sys -import time - -def consume_kafka_topic(topic, expected_values, timeout): - config = { - 'bootstrap.servers': 'localhost:29092', - 'group.id': 'testgrp', - 'auto.offset.reset': 'earliest' - } - consumer = Consumer(config) - consumer.subscribe([topic]) - try: - start_time = time.time() - while time.time() - start_time < timeout: - msg = consumer.poll(1.0) - if msg is None: - continue - if msg.error(): - if msg.error().code() == KafkaException._PARTITION_EOF: - sys.stderr.write(f"Reached end of topic {msg.topic()} / partition {msg.partition()}\n") - print('ERROR') - sys.exit(404) - else: - # Error - raise KafkaException(msg.error()) - else: - # Message received - message = msg.value().decode('utf-8') - if verify_msg(expected_values, message): - print(message) - sys.exit(200) - finally: - consumer.close() - -def verify_msg(expected_values, message): - for item in expected_values: - if item not in message: - return False - return True - - -if __name__ == '__main__': - topic_name = sys.argv[1] - timeout = sys.argv[2] # timeout in seconds for verifying the kafka topic - expected_values = sys.argv[3:] - consume_kafka_topic(topic_name, expected_values, timeout) diff --git a/csit/resources/scripts/kafka_producer.py b/csit/resources/scripts/kafka_producer.py deleted file mode 100755 index ff129872..00000000 --- a/csit/resources/scripts/kafka_producer.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -# -# ============LICENSE_START==================================================== -# Copyright (C) 2023 Nordix Foundation. -# ============================================================================= -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END====================================================== - -# Python utility to produce a message on a kafka topic -# Accepts the arguments {topic_name} and {message} - -from confluent_kafka import Producer -import sys - -def post_to_kafka(topic, message): - conf = {'bootstrap.servers': 'localhost:29092'} - - producer = Producer(conf) - try: - producer.produce(topic, value=message.encode('utf-8')) - producer.flush() - print('Message posted to Kafka topic: {}'.format(topic)) - except Exception as e: - print('Failed to post message: {}'.format(str(e))) - finally: - producer.flush() - -if __name__ == '__main__': - post_to_kafka(sys.argv[1], sys.argv[2]) diff --git a/csit/resources/scripts/make_topics.py b/csit/resources/scripts/make_topics.py deleted file mode 100755 index daee4341..00000000 --- a/csit/resources/scripts/make_topics.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -# -# ============LICENSE_START==================================================== -# Copyright (C) 2023 Nordix Foundation. -# ============================================================================= -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END====================================================== - -# Python utility to create a new kafka topic -# Accepts the argument {topic_name} - -from confluent_kafka.admin import AdminClient, NewTopic -import sys - -def create_topic(bootstrap_servers, topic_name, num_partitions=2, replication_factor=2): - admin_client = AdminClient({'bootstrap.servers': bootstrap_servers}) - - # Define the topic configuration - topic = NewTopic(topic_name, num_partitions=num_partitions, replication_factor=replication_factor) - - # Create the topic - admin_client.create_topics([topic]) - - -if __name__ == '__main__': - topic_name = sys.argv[1] - bootstrap_servers = 'localhost:29092' - - create_topic(bootstrap_servers, topic_name) diff --git a/csit/resources/scripts/run-test.sh b/csit/resources/scripts/run-test.sh index 02f06ff4..6bd7c07b 100755 --- a/csit/resources/scripts/run-test.sh +++ b/csit/resources/scripts/run-test.sh @@ -1,7 +1,7 @@ #!/bin/bash # # ============LICENSE_START==================================================== -# Copyright (C) 2023 Nordix Foundation. +# Copyright (C) 2023-2024 Nordix Foundation. # ============================================================================= # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ POLICY_PDPX_IP=policy-xacml-pdp:${DEFAULT_PORT} POLICY_DROOLS_IP=policy-drools-pdp:9696 DISTRIBUTION_IP=policy-distribution:6969 DMAAP_IP=message-router:3904 +KAFKA_IP=kafka:9092 APEX_EVENTS_IP=policy-apex-pdp:23324 PROMETHEUS_IP=prometheus:9090 CLAMP_K8S_TEST=true @@ -43,7 +44,7 @@ DIST_TEMP_FOLDER=/tmp/distribution export ROBOT_VARIABLES= ROBOT_VARIABLES="-v DATA:$DATA -v NODETEMPLATES:$NODETEMPLATES -v POLICY_API_IP:$POLICY_API_IP -v POLICY_RUNTIME_ACM_IP:$POLICY_RUNTIME_ACM_IP -v POLICY_PAP_IP:$POLICY_PAP_IP -v APEX_IP:$APEX_IP --v APEX_EVENTS_IP:$APEX_EVENTS_IP -v DMAAP_IP:$DMAAP_IP -v PROMETHEUS_IP:${PROMETHEUS_IP} +-v APEX_EVENTS_IP:$APEX_EVENTS_IP -v DMAAP_IP:$DMAAP_IP -v KAFKA_IP:$KAFKA_IP -v PROMETHEUS_IP:${PROMETHEUS_IP} -v POLICY_PDPX_IP:$POLICY_PDPX_IP -v POLICY_DROOLS_IP:$POLICY_DROOLS_IP -v TEMP_FOLDER:${DIST_TEMP_FOLDER} -v DISTRIBUTION_IP:$DISTRIBUTION_IP -v CLAMP_K8S_TEST:$CLAMP_K8S_TEST" diff --git a/csit/resources/scripts/setup-apex-pdp-postgres.sh b/csit/resources/scripts/setup-apex-pdp-postgres.sh index f088da3c..150ec8f6 100755 --- a/csit/resources/scripts/setup-apex-pdp-postgres.sh +++ b/csit/resources/scripts/setup-apex-pdp-postgres.sh @@ -44,10 +44,10 @@ do sleep 10s done -export DMAAP_IP="localhost:${DMAAP_PORT}" +export KAFKA_IP="localhost:${KAFKA_PORT}" export SUITES="apex-pdp-test.robot" ROBOT_VARIABLES="-v POLICY_PAP_IP:localhost:${PAP_PORT} -v POLICY_API_IP:localhost:${API_PORT} -v PROMETHEUS_IP:localhost:${PROMETHEUS_PORT} -v DATA:${DATA} -v NODETEMPLATES:${NODETEMPLATES} --v APEX_IP:localhost:${APEX_PORT} -v DMAAP_IP:${DMAAP_IP} +-v APEX_IP:localhost:${APEX_PORT} -v KAFKA_IP:${KAFKA_IP} -v APEX_EVENTS_IP:localhost:${APEX_EVENTS_PORT}" diff --git a/csit/resources/scripts/setup-apex-pdp.sh b/csit/resources/scripts/setup-apex-pdp.sh index b9b1a78a..198a6017a 100755 --- a/csit/resources/scripts/setup-apex-pdp.sh +++ b/csit/resources/scripts/setup-apex-pdp.sh @@ -2,7 +2,7 @@ # ============LICENSE_START======================================================= # Copyright (C) 2018 Ericsson. All rights reserved. # -# Modifications Copyright (c) 2019-2023 Nordix Foundation. +# Modifications Copyright (c) 2019-2024 Nordix Foundation. # Modifications Copyright (C) 2020-2021 AT&T Intellectual Property. # Modifications Copyright (C) 2021 Bell Canada. All rights reserved. # ================================================================================ @@ -26,9 +26,9 @@ source "${SCRIPTS}"/setup-pap.sh # wait for the app to start up bash "${SCRIPTS}"/wait_for_rest.sh localhost ${APEX_PORT} -export DMAAP_IP="localhost:${DMAAP_PORT}" +export KAFKA_IP="kafka:${KAFKA_PORT}" export SUITES="apex-pdp-test.robot apex-slas.robot" -ROBOT_VARIABLES="${ROBOT_VARIABLES} -v APEX_IP:localhost:${APEX_PORT} -v DMAAP_IP:${DMAAP_IP} +ROBOT_VARIABLES="${ROBOT_VARIABLES} -v APEX_IP:localhost:${APEX_PORT} -v KAFKA_IP:${KAFKA_IP} -v APEX_EVENTS_IP:localhost:${APEX_EVENTS_PORT}" diff --git a/csit/resources/scripts/setup-drools-applications.sh b/csit/resources/scripts/setup-drools-applications.sh index d8542bd1..369874b6 100755 --- a/csit/resources/scripts/setup-drools-applications.sh +++ b/csit/resources/scripts/setup-drools-applications.sh @@ -2,7 +2,7 @@ # # ===========LICENSE_START==================================================== # Copyright (C) 2019-2021 AT&T Intellectual Property. All rights reserved. -# Modifications Copyright 2021-2023 Nordix Foundation. +# Modifications Copyright 2021-2024 Nordix Foundation. # ============================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,4 +35,4 @@ sleep 15 ROBOT_VARIABLES="-v DATA:${DATA} -v DROOLS_IP:localhost:${DROOLS_APPS_PORT} -v DROOLS_IP_2:localhost:${DROOLS_APPS_TELEMETRY_PORT} -v POLICY_API_IP:localhost:${API_PORT} --v POLICY_PAP_IP:localhost:${PAP_PORT} -v DMAAP_IP:localhost:${DMAAP_PORT}" +-v POLICY_PAP_IP:localhost:${PAP_PORT} -v KAFKA_IP:localhost:${KAFKA_PORT}" diff --git a/csit/resources/scripts/setup-xacml-pdp.sh b/csit/resources/scripts/setup-xacml-pdp.sh index 251cb29c..4511d91e 100755 --- a/csit/resources/scripts/setup-xacml-pdp.sh +++ b/csit/resources/scripts/setup-xacml-pdp.sh @@ -1,7 +1,7 @@ #!/bin/bash # ============LICENSE_START======================================================= # Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved. -# Modifications Copyright 2021-2023 Nordix Foundation. +# Modifications Copyright 2021-2024 Nordix Foundation. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -32,4 +32,4 @@ export SUITES="xacml-pdp-test.robot" ROBOT_VARIABLES="-v DATA:${DATA} -v POLICY_PDPX_IP:localhost:${XACML_PORT} -v POLICY_API_IP:localhost:${API_PORT} -v POLICY_PAP_IP:localhost:${PAP_PORT} --v DMAAP_IP:localhost:${DMAAP_PORT}" +-v KAFKA_IP:localhost:${KAFKA_PORT}" diff --git a/csit/resources/tests/apex-pdp-common.robot b/csit/resources/tests/apex-pdp-common.robot index 8ae63af6..e6458318 100644 --- a/csit/resources/tests/apex-pdp-common.robot +++ b/csit/resources/tests/apex-pdp-common.robot @@ -27,8 +27,8 @@ RunEventOnApexEngine CheckLogMessage [Documentation] Read log messages received and check for expected content. - [Arguments] ${status} ${expectedMsg} - ${result}= CheckTopic APEX-CL-MGT ${status} + [Arguments] ${topic} ${status} ${expectedMsg} + ${result}= CheckKafkaTopic ${topic} ${status} Should Contain ${result} ${expectedMsg} ValidatePolicyExecution diff --git a/csit/resources/tests/apex-pdp-test.robot b/csit/resources/tests/apex-pdp-test.robot index b0232263..5e4ea34b 100644 --- a/csit/resources/tests/apex-pdp-test.robot +++ b/csit/resources/tests/apex-pdp-test.robot @@ -31,17 +31,17 @@ ExecuteApexTestPnfPolicy CreatePolicy /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies 200 ${postjson} ${policyName} 1.0.0 DeployPolicy Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex - GetTopic APEX-CL-MGT - Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy + GetKafkaTopic apex-cl-mgt + Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy apex-cl-mgt -ExecuteApexTestVnfPolicy - Set Test Variable ${policyName} onap.policies.apex.vnf.Test - ${postjson}= Get File ${CURDIR}/data/${policyName}.json - CreatePolicy /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies 200 ${postjson} ${policyName} 1.0.0 - DeployPolicy - Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex - GetTopic APEX-CL-MGT - Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestVnfPolicy +#ExecuteApexTestVnfPolicy +# Set Test Variable ${policyName} onap.policies.apex.vnf.Test +# ${postjson}= Get File ${CURDIR}/data/${policyName}.json +# CreatePolicy /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies 200 ${postjson} ${policyName} 1.0.0 +# DeployPolicy +# Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex +# GetTopic apex-cl-mgt +# Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestVnfPolicy ExecuteApexTestPnfPolicyWithMetadataSet Set Test Variable ${policyName} onap.policies.apex.pnf.metadataSet.Test @@ -51,17 +51,17 @@ ExecuteApexTestPnfPolicyWithMetadataSet CreateNodeTemplate /policy/api/v1/nodetemplates 200 ${postjson} 1 DeployPolicy Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex - GetTopic APEX-CL-MGT2 - Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy + GetKafkaTopic apex-cl-mgt2 + Wait Until Keyword Succeeds 2 min 5 sec TriggerAndVerifyTestPnfPolicy apex-cl-mgt2 Metrics [Documentation] Verify policy-apex-pdp is exporting prometheus metrics ${auth}= PolicyAdminAuth ${resp}= PerformGetRequest ${APEX_IP} /metrics 200 null ${auth} - Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="TOTAL",} 4.0 - Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="SUCCESS",} 4.0 - Should Contain ${resp.text} pdpa_policy_executions_total{status="SUCCESS",} 3.0 - Should Contain ${resp.text} pdpa_policy_executions_total{status="TOTAL",} 3.0 + Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="TOTAL",} 3.0 + Should Contain ${resp.text} pdpa_policy_deployments_total{operation="deploy",status="SUCCESS",} 3.0 + Should Contain ${resp.text} pdpa_policy_executions_total{status="SUCCESS",} 6.0 + Should Contain ${resp.text} pdpa_policy_executions_total{status="TOTAL",} 6.0 Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="NSOApexEngine-*:0.0.1",}* Should Match ${resp.text} *pdpa_engine_event_executions{engine_instance_id="MyApexEngine-*:0.0.1",}* Should Match ${resp.text} *pdpa_engine_state{engine_instance_id=*,} 2.0* @@ -78,18 +78,16 @@ Metrics TriggerAndVerifyTestPnfPolicy [Documentation] Send TestPnf policy trigger event to DMaaP and read notifications to verify policy execution - Create Session apexSession http://${DMAAP_IP} max_retries=1 + [Arguments] ${topic} ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json - &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= POST On Session apexSession /events/unauthenticated.DCAE_CL_OUTPUT data=${data} headers=${headers} - Should Be Equal As Strings ${resp.status_code} 200 - Run Keyword CheckLogMessage ACTIVE VES event has been received. Going to fetch details from AAI. - Run Keyword CheckLogMessage SUCCESS Received response from AAI successfully. Hostname in AAI matches with the one in Ves event. Going to make the update-config request to CDS. - Run Keyword CheckLogMessage FINAL_SUCCESS Successfully processed the VES event. Hostname is updated. + ${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_cl_output ${data} + Run Keyword CheckLogMessage ${topic} ACTIVE VES event has been received. Going to fetch details from AAI. + Run Keyword CheckLogMessage ${topic} SUCCESS Received response from AAI successfully. Hostname in AAI matches with the one in Ves event. Going to make the update-config request to CDS. + Run Keyword CheckLogMessage ${topic} FINAL_SUCCESS Successfully processed the VES event. Hostname is updated. TriggerAndVerifyTestVnfPolicy [Documentation] Send TestVnf policy trigger event to DMaaP and read notifications to verify policy execution - Create Session apexSession http://${DMAAP_IP} max_retries=1 + Create Session apexSession http://${KAFKA_IP} max_retries=1 ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json &{headers}= Create Dictionary Content-Type=application/json Accept=application/json ${resp}= POST On Session apexSession /events/unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT data=${data} headers=${headers} diff --git a/csit/resources/tests/apex-slas.robot b/csit/resources/tests/apex-slas.robot index 408b0add..4191bb2a 100644 --- a/csit/resources/tests/apex-slas.robot +++ b/csit/resources/tests/apex-slas.robot @@ -23,13 +23,10 @@ ValidatePolicyExecutionAndEventRateLowComplexity CreatePolicy /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies 200 ${postjson} ${policyName} 1.0.0 DeployPolicy Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex - GetTopic APEX-CL-MGT - Create Session apexSession http://${DMAAP_IP} max_retries=1 + GetKafkaTopic apex-cl-mgt ${data}= Get Binary File ${CURDIR}/data/VesEventForPnfPolicy.json - &{headers}= Create Dictionary Content-Type=application/json Accept=application/json ${eventStartTime}= Get Current Date - ${resp}= POST On Session apexSession /events/unauthenticated.DCAE_CL_OUTPUT data=${data} headers=${headers} - Should Be Equal As Strings ${resp.status_code} 200 + ${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_cl_output ${data} ${eventEndTime}= Get Current Date ValidateEventExecution ${eventStartTime} ${eventEndTime} 10 @@ -53,13 +50,10 @@ ValidatePolicyExecutionAndEventRateHighComplexity CreateNodeTemplate /policy/api/v1/nodetemplates 200 ${postjson} 1 DeployPolicy Wait Until Keyword Succeeds 2 min 5 sec QueryPolicyStatus ${policyName} defaultGroup apex ${pdpName} onap.policies.native.Apex - GetTopic APEX-CL-MGT2 - Create Session apexSession http://${DMAAP_IP} max_retries=1 + GetKafkaTopic apex-cl-mgt2 ${data}= Get Binary File ${CURDIR}/data/VesEventForVnfPolicy.json - &{headers}= Create Dictionary Content-Type=application/json Accept=application/json ${eventStartTime}= Get Current Date - ${resp}= POST On Session apexSession /events/unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT data=${data} headers=${headers} - Should Be Equal As Strings ${resp.status_code} 200 + ${resp}= Run Process ${CURDIR}/kafka_producer.py unauthenticated.dcae_policy_example_output ${data} ${eventEndTime}= Get Current Date ValidateEventExecution ${eventStartTime} ${eventEndTime} 0.2 diff --git a/csit/resources/tests/common-library.robot b/csit/resources/tests/common-library.robot index f5db8e0e..531572d2 100644 --- a/csit/resources/tests/common-library.robot +++ b/csit/resources/tests/common-library.robot @@ -150,9 +150,9 @@ CheckTopic CheckKafkaTopic [Arguments] ${topic} ${expected_status} - ${resp}= Run Process ${CURDIR}/kafka_consumer.py ${topic} 30 ${expected_status} - Log to console Received response from kafka ${resp.stdout} - Should Contain ${resp.text} ${expected_status} + ${resp}= Run Process ${CURDIR}/kafka_consumer.py ${topic} 60 ${expected_status} + Should Contain ${resp.stdout} ${expected_status} + [Return] ${resp.stdout} GetKafkaTopic [Arguments] ${topic} diff --git a/csit/resources/tests/data/onap.policies.apex.pnf.Test.json b/csit/resources/tests/data/onap.policies.apex.pnf.Test.json index 05522936..fc9b521c 100644 --- a/csit/resources/tests/data/onap.policies.apex.pnf.Test.json +++ b/csit/resources/tests/data/onap.policies.apex.pnf.Test.json @@ -32,7 +32,7 @@ "taskParameters": [ { "key": "logUrl", - "value": "http://message-router:3904/events/APEX-CL-MGT" + "value": "http://localhost:8082/topics/apex-cl-mgt" } ] }, @@ -2775,10 +2775,23 @@ "eventInputParameters": { "SimpleCL_DCAEConsumer": { "carrierTechnologyParameters": { - "carrierTechnology": "RESTCLIENT", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters", + "carrierTechnology": "KAFKA", + "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { - "url": "http://message-router:3904/events/unauthenticated.DCAE_CL_OUTPUT/cl/apex?timeout=30000" + "bootstrapServers": "kafka:9092", + "groupId": "apex-grp", + "enableAutoCommit": true, + "autoCommitTime": 1000, + "sessionTimeout": 30000, + "consumerPollTime": 100, + "consumerTopicList": [ + "unauthenticated.dcae_cl_output" + ], + "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "kafkaProperties": [ + + ] } }, "eventProtocolParameters": { @@ -2876,10 +2889,21 @@ }, "SimpleCL_logOutputter": { "carrierTechnologyParameters": { - "carrierTechnology": "RESTCLIENT", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters", + "carrierTechnology": "KAFKA", + "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { - "url": "http://message-router:3904/events/APEX-CL-MGT" + "bootstrapServers": "kafka:9092", + "acks": "all", + "retries": 0, + "batchSize": 16384, + "lingerTime": 1, + "bufferMemory": 33554432, + "producerTopic": "apex-cl-mgt", + "keySerializer": "org.apache.kafka.common.serialization.StringSerializer", + "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer", + "kafkaProperties": [ + + ] } }, "eventProtocolParameters": { diff --git a/csit/resources/tests/data/onap.policies.apex.pnf.metadataSet.Test.json b/csit/resources/tests/data/onap.policies.apex.pnf.metadataSet.Test.json index 20338fff..d9cd87e2 100644 --- a/csit/resources/tests/data/onap.policies.apex.pnf.metadataSet.Test.json +++ b/csit/resources/tests/data/onap.policies.apex.pnf.metadataSet.Test.json @@ -36,7 +36,7 @@ "taskParameters": [ { "key": "logUrl", - "value": "http://message-router:3904/events/APEX-CL-MGT2" + "value": "http://kafka:9092/topics/apex-cl-mgt2" } ] } @@ -44,10 +44,23 @@ "eventInputParameters": { "SimpleCL_DCAEConsumer2": { "carrierTechnologyParameters": { - "carrierTechnology": "RESTCLIENT", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters", + "carrierTechnology": "KAFKA", + "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { - "url": "http://message-router:3904/events/unauthenticated.DCAE_CL_OUTPUT/cl/apex?timeout=30000" + "bootstrapServers": "kafka:9092", + "groupId": "apex-grp2", + "enableAutoCommit": true, + "autoCommitTime": 1000, + "sessionTimeout": 30000, + "consumerPollTime": 100, + "consumerTopicList": [ + "unauthenticated.dcae_cl_output" + ], + "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "kafkaProperties": [ + + ] } }, "eventProtocolParameters": { @@ -145,10 +158,21 @@ }, "SimpleCL_logOutputter2": { "carrierTechnologyParameters": { - "carrierTechnology": "RESTCLIENT", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters", + "carrierTechnology": "KAFKA", + "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { - "url": "http://message-router:3904/events/APEX-CL-MGT2" + "bootstrapServers": "kafka:9092", + "acks": "all", + "retries": 0, + "batchSize": 16384, + "lingerTime": 1, + "bufferMemory": 33554432, + "producerTopic": "apex-cl-mgt2", + "keySerializer": "org.apache.kafka.common.serialization.StringSerializer", + "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer", + "kafkaProperties": [ + + ] } }, "eventProtocolParameters": { diff --git a/csit/resources/tests/distribution-test.robot b/csit/resources/tests/distribution-test.robot index b8b49248..c85be4fd 100644 --- a/csit/resources/tests/distribution-test.robot +++ b/csit/resources/tests/distribution-test.robot @@ -20,11 +20,11 @@ MetricsAfterExecution [Documentation] Verify policy-distribution is exporting prometheus metrics after execution ${hcauth}= PolicyAdminAuth ${resp}= PerformGetRequest ${DISTRIBUTION_IP} /metrics 200 null ${hcauth} - Should Contain ${resp.text} total_distribution_received_count_total 1.0 - Should Contain ${resp.text} distribution_success_count_total 1.0 + Should Contain ${resp.text} total_distribution_received_count_total 2.0 + Should Contain ${resp.text} distribution_success_count_total 2.0 Should Contain ${resp.text} distribution_failure_count_total 0.0 - Should Contain ${resp.text} total_download_received_count_total 1.0 - Should Contain ${resp.text} download_success_count_total 1.0 + Should Contain ${resp.text} total_download_received_count_total 2.0 + Should Contain ${resp.text} download_success_count_total 2.0 Should Contain ${resp.text} download_failure_count_total 0.0 *** Keywords *** diff --git a/csit/resources/tests/drools-applications-test.robot b/csit/resources/tests/drools-applications-test.robot index 81ec65e0..5c6a1b5c 100644 --- a/csit/resources/tests/drools-applications-test.robot +++ b/csit/resources/tests/drools-applications-test.robot @@ -29,8 +29,8 @@ Healthcheck MakeTopics [Documentation] Creates the Policy topics - GetTopic POLICY-PDP-PAP - GetTopic POLICY-CL-MGT + GetKafkaTopic policy-pdp-pap + GetKafkaTopic policy-cl-mgt CreateVcpeXacmlPolicy [Documentation] Create VCPE Policy for Xacml @@ -59,27 +59,20 @@ CreateVfwDroolsPolicy DeployXacmlPolicies [Documentation] Deploys the Policies to Xacml PerformPostRequest /policy/pap/v1/pdps/deployments/batch null ${POLICY_PAP_IP} deploy.xacml.policies.json ${CURDIR}/data json 202 - ${result}= CheckTopic POLICY-PDP-PAP PDP_UPDATE - Sleep 5s - ${result}= CheckTopic POLICY-PDP-PAP ACTIVE - Should Contain ${result} responseTo - Should Contain ${result} xacml - Should Contain ${result} restart - Should Contain ${result} onap.restart.tca + Sleep 5s + ${result}= CheckKafkaTopic policy-notification onap.vfirewall.tca + Should Contain ${result} deployed-policies Should Contain ${result} onap.scaleout.tca - Should Contain ${result} onap.vfirewall.tca + Should Contain ${result} onap.restart.tca DeployDroolsPolicies [Documentation] Deploys the Policies to Drools PerformPostRequest /policy/pap/v1/pdps/deployments/batch null ${POLICY_PAP_IP} deploy.drools.policies.json ${CURDIR}/data json 202 - ${result}= CheckTopic POLICY-PDP-PAP PDP_UPDATE - Sleep 5s - ${result}= CheckTopic POLICY-PDP-PAP ACTIVE - Should Contain ${result} responseTo - Should Contain ${result} drools - Should Contain ${result} operational.restart + Sleep 5s + ${result}= CheckKafkaTopic policy-notification operational.modifyconfig + Should Contain ${result} deployed-policies Should Contain ${result} operational.scaleout - Should Contain ${result} operational.modifyconfig + Should Contain ${result} operational.restart #VcpeExecute # [Documentation] Executes VCPE Policy @@ -179,7 +172,7 @@ OnSet ${data}= Get File ${file} Create Session session http://${DMAAP_IP} max_retries=1 ${headers}= Create Dictionary Content-Type=application/json - ${resp}= POST On Session session /events/unauthenticated.DCAE_CL_OUTPUT headers=${headers} data=${data} + ${resp}= POST On Session session /events/unauthenticateddcae_cl_output headers=${headers} data=${data} Log Response from dmaap ${resp.text} Status Should Be OK [Return] ${resp.text} diff --git a/csit/resources/tests/kafka_consumer.py b/csit/resources/tests/kafka_consumer.py new file mode 100755 index 00000000..595e3db7 --- /dev/null +++ b/csit/resources/tests/kafka_consumer.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# +# ============LICENSE_START==================================================== +# Copyright (C) 2023-2024 Nordix Foundation. +# ============================================================================= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END====================================================== + +# Python utility to fetch kafka topic and look for required messages. +# Accepts the arguments {topic_name} and {list of expected values} and {timeout} to verify the kafka topic. + + +from confluent_kafka import Consumer, KafkaException +import sys +import time + + +def consume_kafka_topic(topic, expected_values, timeout): + config = { + 'bootstrap.servers': 'localhost:29092', + 'group.id': 'testgrp', + 'auto.offset.reset': 'earliest' + } + consumer = Consumer(config) + consumer.subscribe([topic]) + try: + start_time = time.time() + while time.time() - start_time < timeout: + msg = consumer.poll(1.0) + if msg is None: + continue + if msg.error(): + if msg.error().code() == KafkaException._PARTITION_EOF: + sys.stderr.write(f"Reached end of topic {msg.topic()} / partition {msg.partition()}\n") + print('ERROR') + sys.exit(404) + else: + # Error + raise KafkaException(msg.error()) + else: + # Message received + message = msg.value().decode('utf-8') + if expected_values in message: + print(message) + sys.exit(200) + finally: + consumer.close() + + +if __name__ == '__main__': + topic_name = sys.argv[1] + timeout = int(sys.argv[2]) # timeout in seconds for verifying the kafka topic + expected_values = sys.argv[3] + consume_kafka_topic(topic_name, expected_values, timeout) diff --git a/csit/resources/tests/kafka_producer.py b/csit/resources/tests/kafka_producer.py new file mode 100755 index 00000000..e6f01c21 --- /dev/null +++ b/csit/resources/tests/kafka_producer.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +# +# ============LICENSE_START==================================================== +# Copyright (C) 2023-2024 Nordix Foundation. +# ============================================================================= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END====================================================== + +# Python utility to produce a message on a kafka topic +# Accepts the arguments {topic_name} and {message} + +from confluent_kafka import Producer +import sys + +def post_to_kafka(topic, message): + conf = {'bootstrap.servers': 'localhost:29092'} + + producer = Producer(conf) + try: + producer.produce(topic, value=message.encode('utf-8')) + producer.flush() + print('Message posted to Kafka topic: {}'.format(topic)) + except Exception as e: + print('Failed to post message: {}'.format(str(e))) + finally: + producer.flush() + +if __name__ == '__main__': + post_to_kafka(sys.argv[1], sys.argv[2]) diff --git a/csit/resources/tests/make_topics.py b/csit/resources/tests/make_topics.py new file mode 100755 index 00000000..64a230eb --- /dev/null +++ b/csit/resources/tests/make_topics.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +# +# ============LICENSE_START==================================================== +# Copyright (C) 2023-2024 Nordix Foundation. +# ============================================================================= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END====================================================== + +# Python utility to create a new kafka topic +# Accepts the argument {topic_name} + +from confluent_kafka.admin import AdminClient, NewTopic +import sys + +def create_topic(bootstrap_servers, topic_name, num_partitions=2, replication_factor=2): + admin_client = AdminClient({'bootstrap.servers': bootstrap_servers}) + + # Define the topic configuration + topic = NewTopic(topic_name, num_partitions=num_partitions, replication_factor=replication_factor) + + # Create the topic + admin_client.create_topics([topic]) + + +if __name__ == '__main__': + topic_name = sys.argv[1] + bootstrap_servers = 'localhost:29092' + + create_topic(bootstrap_servers, topic_name) diff --git a/csit/resources/tests/pap-test.robot b/csit/resources/tests/pap-test.robot index 82328437..74d299a5 100644 --- a/csit/resources/tests/pap-test.robot +++ b/csit/resources/tests/pap-test.robot @@ -47,6 +47,7 @@ Healthcheck Consolidated Healthcheck [Documentation] Verify policy consolidated health check + sleep 20 ${resp}= GetReq /policy/pap/v1/components/healthcheck Should Be Equal As Strings ${resp.json()['healthy']} True diff --git a/csit/resources/tests/policy-clamp-test.robot b/csit/resources/tests/policy-clamp-test.robot index d5fec492..10f93893 100644 --- a/csit/resources/tests/policy-clamp-test.robot +++ b/csit/resources/tests/policy-clamp-test.robot @@ -97,6 +97,7 @@ QueryPolicies QueryPolicyTypes [Documentation] Verify the new policy types created ${auth}= Create List policyadmin zb!XztG34 + sleep 10 Log Creating session http://${POLICY_API_IP}}:6969 ${session}= Create Session policy http://${POLICY_API_IP} auth=${auth} ${headers}= Create Dictionary Accept=application/json Content-Type=application/json diff --git a/csit/resources/tests/xacml-pdp-test.robot b/csit/resources/tests/xacml-pdp-test.robot index 331a7fa5..e500a04c 100644 --- a/csit/resources/tests/xacml-pdp-test.robot +++ b/csit/resources/tests/xacml-pdp-test.robot @@ -19,7 +19,7 @@ Metrics MakeTopics [Documentation] Creates the Policy topics - GetTopic POLICY-PDP-PAP + GetKafkaTopic policy-pdp-pap ExecuteXacmlPolicy CreateMonitorPolicy @@ -56,12 +56,9 @@ DeployPolicies ${postjson}= Get file ${CURDIR}/data/vCPE.policy.input.tosca.deploy.json ${policyadmin}= PolicyAdminAuth PerformPostRequest ${POLICY_PAP_IP} /policy/pap/v1/pdps/policies 202 ${postjson} null ${policyadmin} - ${result}= CheckTopic POLICY-PDP-PAP PDP_UPDATE - Sleep 5s - ${result}= CheckTopic POLICY-PDP-PAP ACTIVE - Should Contain ${result} responseTo - Should Contain ${result} xacml - Should Contain ${result} onap.restart.tca + sleep 20s + ${result}= CheckKafkaTopic policy-notification onap.restart.tca + Should Contain ${result} deployed-policies GetAbbreviatedDecisionResult [Documentation] Get Decision with abbreviated results from Policy Xacml PDP diff --git a/csit/run-k8s-csit.sh b/csit/run-k8s-csit.sh index 9fd6159c..5a3ac3bb 100755 --- a/csit/run-k8s-csit.sh +++ b/csit/run-k8s-csit.sh @@ -1,7 +1,7 @@ #!/bin/bash # # ============LICENSE_START==================================================== -# Copyright (C) 2022-2023 Nordix Foundation. +# Copyright (C) 2022-2024 Nordix Foundation. # ============================================================================= # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -253,7 +253,7 @@ function set_project_config() { $POLICY_CLAMP_CONTAINER,$POLICY_PF_PPNT_CONTAINER,$POLICY_K8S_PPNT_CONTAINER, $POLICY_HTTP_PPNT_CONTAINER) export SET_VALUES="--set $POLICY_APEX_CONTAINER.enabled=true --set $POLICY_XACML_CONTAINER.enabled=true - --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true --set $POLICY_POLICY_DROOLS_CONTAINER.enabled=true + --set $POLICY_DISTRIBUTION_CONTAINER.enabled=true --set $POLICY_DROOLS_CONTAINER.enabled=true --set $POLICY_CLAMP_CONTAINER.enabled=true --set $POLICY_PF_PPNT_CONTAINER.enabled=true --set $POLICY_K8S_PPNT_CONTAINER.enabled=true --set $POLICY_HTTP_PPNT_CONTAINER.enabled=true" ;; -- cgit 1.2.3-korg