From fbe47369f95b3154c879026a196d5b5259e5f130 Mon Sep 17 00:00:00 2001 From: waynedunican Date: Mon, 27 May 2024 16:29:18 +0100 Subject: Update to Apex jmx files Issue-ID: POLICY-4977 Change-Id: If2aad259007ed3cf84aea4f7ad99ea7fbe379a69 Signed-off-by: waynedunican --- .../main/resources/apexPdpPerformanceTestPlan.jmx | 1784 ++++++-------------- 1 file changed, 543 insertions(+), 1241 deletions(-) (limited to 'testsuites/performance/performance-benchmark-test') diff --git a/testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx b/testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx index bb6870d17..681becf1e 100644 --- a/testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx +++ b/testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx @@ -22,15 +22,13 @@ --> - - - false - true + true - + true + - + false @@ -46,7 +44,7 @@ - + 23324 http @@ -55,7 +53,7 @@ - + threads @@ -90,7 +88,7 @@ KAFKA_PORT - ${__P(KAFKA_PORT,29092)} + ${__P(KAFKA_PORT,9092)} = @@ -105,7 +103,7 @@ LOG_PATH - ${__P(LOG_PATH,/jmeter_results/apex_perf)} + ${__P(LOG_PATH,/jmeter_results/apex_stability)} = @@ -116,7 +114,7 @@ - + http://${HOSTNAME}:${API_PORT} @@ -157,7 +155,7 @@ false - + 1 1 true @@ -168,7 +166,7 @@ - + 6 ${HOSTNAME} ${API_PORT} @@ -3234,146 +3232,42 @@ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { "bootstrapServers": "kafka:9092", - "groupId": "apex-grp2", + "groupId": "apex-group", "enableAutoCommit": true, - "autoCommitTime": 1000, - "sessionTimeout": 30000, - "consumerPollTime": 100, - "consumerTopicList": [ - "unauthenticated.dcae_cl_output" - ], - "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", - "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", - "kafkaProperties": [ - - ] + "autoCommitTime": 1000, + "sessionTimeout": 30000, + "consumerPollTime": 100, + "consumerTopicList": ["unauthenticated.dcae_policy_example_output"], + "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer" } }, "eventProtocolParameters": { "eventProtocol": "JSON" }, "eventName": "ApexExample_VesEvent" - }, - "ApexExample_AAIResponseConsumer": { - "carrierTechnologyParameters": { - "carrierTechnology": "RESTREQUESTOR", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restrequestor.RestRequestorCarrierTechnologyParameters", - "parameters": { - "url": "https://aai:8443/aai/v14/network/generic-vnfs/generic-vnf?vnf-name={vnf-name}", - "httpMethod": "GET", - "httpCodeFilter": "[245][0-9][0-9]", - "httpHeaders": [ - [ - "Accept", - "application/json" - ], - [ - "Content-Type", - "application/json" - ], - [ - "X-FromAppId", - "dcae-curl" - ], - [ - "x-transactionId", - "9998" - ], - [ - "Authorization", - "Basic QUFJOkFBSQ==" - ] - ] - } - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventName": "ApexExample_AAISuccessResponseEvent|ApexExample_AAIFailureResponseEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_AAIProducer", - "requestorTimeout": 2000 - }, - "ApexExample_CDSConfigModifyResponseConsumer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters" - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventName": "ApexExample_CDSConfigModifySuccessResponseEvent|ApexExample_CDSConfigModifyFailureResponseEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_CDSConfigModifyRequestProducer", - "requestorTimeout": 500 - }, - "ApexExample_CDSRestartResponseConsumer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters" - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventName": "ApexExample_CDSRestartSuccessResponseEvent|ApexExample_CDSRestartFailureResponseEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_CDSRestartRequestProducer", - "requestorTimeout": 500 } }, "eventOutputParameters": { "ApexExample_AAIProducer": { "carrierTechnologyParameters": { - "carrierTechnology": "RESTREQUESTOR", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restrequestor.RestRequestorCarrierTechnologyParameters" - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventNameFilter": "ApexExample_AAIRequestEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_AAIResponseConsumer", - "requestorTimeout": 2000 - }, - "ApexExample_CDSConfigModifyRequestProducer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters", - "parameters": { - "host": "${KAFKA_HOST}", - "port": ${CDS_PORT}, - "username": "ccsdkapps", - "password": "ccsdkapps", - "timeout": 10 - } - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventNameFilter": "ApexExample_CDSConfigModifyRequestEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_CDSConfigModifyResponseConsumer", - "requestorTimeout": 500 - }, - "ApexExample_CDSRestartRequestProducer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters", + "carrierTechnology": "KAFKA", + "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { - "host": "${KAFKA_HOST}", - "port": ${CDS_PORT}, - "username": "ccsdkapps", - "password": "ccsdkapps", - "timeout": 10 + "bootstrapServers": "kafka:9092", + "acks": "all", + "retries": 0, + "batchSize": 16384, + "lingerTime": 1, + "bufferMemory": 33554432, + "producerTopic": "apex-cl-mgt", + "keySerializer": "org.apache.kafka.common.serialization.StringSerializer", + "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer" } }, "eventProtocolParameters": { "eventProtocol": "JSON" - }, - "eventNameFilter": "ApexExample_CDSRestartRequestEvent", - "requestorMode": true, - "requestorPeer": "ApexExample_CDSRestartResponseConsumer", - "requestorTimeout": 500 + } } } } @@ -3399,7 +3293,7 @@ - + 6 ${HOSTNAME} ${API_PORT} @@ -6190,114 +6084,23 @@ "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.kafka.KafkaCarrierTechnologyParameters", "parameters": { "bootstrapServers": "kafka:9092", - "groupId": "apex-grp2", + "groupId": "apex-group", "enableAutoCommit": true, - "autoCommitTime": 1000, - "sessionTimeout": 30000, - "consumerPollTime": 100, - "consumerTopicList": [ - "unauthenticated.dcae_cl_output" - ], - "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", - "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", - "kafkaProperties": [ - - ] + "autoCommitTime": 1000, + "sessionTimeout": 30000, + "consumerPollTime": 100, + "consumerTopicList": ["unauthenticated.dcae_cl_output"], + "keyDeserializer": "org.apache.kafka.common.serialization.StringDeserializer", + "valueDeserializer": "org.apache.kafka.common.serialization.StringDeserializer" } }, "eventProtocolParameters": { "eventProtocol": "JSON" }, "eventName": "SimpleCL_VesEvent" - }, - "SimpleCL_AAIResponseConsumer": { - "carrierTechnologyParameters": { - "carrierTechnology": "RESTREQUESTOR", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restrequestor.RestRequestorCarrierTechnologyParameters", - "parameters": { - "url": "https://aai:8443/aai/v14/network/pnfs/pnf/{pnfName}", - "httpMethod": "GET", - "httpCodeFilter": "[2-5][0-1][0-9]", - "httpHeaders": [ - [ - "Accept", - "application/json" - ], - [ - "Content-Type", - "application/json" - ], - [ - "X-FromAppId", - "dcae-curl" - ], - [ - "x-transactionId", - "9998" - ], - [ - "Authorization", - "Basic QUFJOkFBSQ==" - ] - ] - } - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventName": "SimpleCL_AAISuccessResponseEvent|SimpleCL_AAIFailureResponseEvent", - "requestorMode": true, - "requestorPeer": "SimpleCL_AAIProducer", - "requestorTimeout": 2000 - }, - "SimpleCL_CDSConfigModifyResponseConsumer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters" - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventName": "SimpleCL_CDSConfigModifySuccessResponseEvent|SimpleCL_CDSConfigModifyFailureResponseEvent", - "requestorMode": true, - "requestorPeer": "SimpleCL_CDSConfigModifyRequestProducer", - "requestorTimeout": 500 } }, "eventOutputParameters": { - "SimpleCL_AAIProducer": { - "carrierTechnologyParameters": { - "carrierTechnology": "RESTREQUESTOR", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restrequestor.RestRequestorCarrierTechnologyParameters" - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventNameFilter": "SimpleCL_AAIRequestEvent", - "requestorMode": true, - "requestorPeer": "SimpleCL_AAIResponseConsumer", - "requestorTimeout": 2000 - }, - "SimpleCL_CDSConfigModifyRequestProducer": { - "carrierTechnologyParameters": { - "carrierTechnology": "GRPC", - "parameterClassName": "org.onap.policy.apex.plugins.event.carrier.grpc.GrpcCarrierTechnologyParameters", - "parameters": { - "host": "${KAFKA_HOST}", - "port": ${CDS_PORT}, - "username": "ccsdkapps", - "password": "ccsdkapps", - "timeout": 60 - } - }, - "eventProtocolParameters": { - "eventProtocol": "JSON" - }, - "eventNameFilter": "SimpleCL_CDSConfigModifyRequestEvent", - "requestorMode": true, - "requestorPeer": "SimpleCL_CDSConfigModifyResponseConsumer", - "requestorTimeout": 500 - }, "SimpleCL_LogEventProducer": { "carrierTechnologyParameters": { "carrierTechnology": "KAFKA", @@ -6309,18 +6112,14 @@ "batchSize": 16384, "lingerTime": 1, "bufferMemory": 33554432, - "producerTopic": "apex-cl-mgt2", + "producerTopic": "apex-cl-mgt", "keySerializer": "org.apache.kafka.common.serialization.StringSerializer", - "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer", - "kafkaProperties": [ - - ] + "valueSerializer": "org.apache.kafka.common.serialization.StringSerializer" } }, "eventProtocolParameters": { "eventProtocol": "JSON" - }, - "eventNameFilter": "SimpleCL_LogEvent" + } } } } @@ -6404,7 +6203,7 @@ - + ${threads} 1 7200 @@ -6417,7 +6216,7 @@ - + 6 ${HOSTNAME} ${APEX_PORT} @@ -6432,25 +6231,6 @@ - - true - - - import groovy.json.JsonSlurper; - -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - res = jsonSlurper.parseText(prev.getResponseDataAsString()); - - assert res instanceof Map; - assert res.healthy == true; - assert res.message == 'alive'; -} - - groovy - - 200 @@ -6462,7 +6242,7 @@ if (prev.getResponseCode() == '200') { - + 6 ${HOSTNAME} ${APEX_PORT} @@ -6489,662 +6269,527 @@ if (prev.getResponseCode() == '200') { Assert response coming back from apex-pdp - + + + + true - import groovy.json.JsonSlurper; + import org.apache.kafka.clients.producer.Callback +import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.ProducerConfig +import org.apache.kafka.common.serialization.StringSerializer -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - res = jsonSlurper.parseText(prev.getResponseDataAsString()); +def props = new Properties() +props.put("bootstrap.servers", "kafka:9092") +props.put("group.id", "new-group") +props.put("acks", "all") +props.put("retries", 0) +props.put("batch.size", 16384) +props.put("linger.ms", 1) +props.put("buffer.memory", 33554432) +props.put("key.serializer", StringSerializer.class.getName()) +props.put("value.serializer", StringSerializer.class.getName()) - assert res instanceof Map; - assert res.healthy == true; - assert res.message == 'alive'; -} +def producer = new KafkaProducer<>(props) + +def jsonString = '''{ + "event": { + "commonEventHeader": { + "startEpochMicrosec": 1597953057126, + "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", + "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", + "lastEpochMicrosec": 1597952499468, + "eventName": "Report hostname & IP", + "sourceName": "PNF101", + "reportingEntityName": "DCAE", + "vesEventListenerVersion": "7.1", + "internalHeaderFields": {}, + "version": "4.0.1", + "priority": "Normal", + "sequence": 1, + "domain": "other" + }, + "otherFields": { + "hashMap": { + "hostname": "UNDEFINED", + "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", + "pnfName": "PNF101", + "ip": "10.101.200.10" + }, + "otherFieldsVersion": "3.0" + } + } +}''' + +def record = new ProducerRecord<>("unauthenticated.dcae_cl_output", jsonString) + +producer.send(record, new Callback() { + @Override + void onCompletion(RecordMetadata metadata, Exception exception) { + if (exception != null) { + exception.printStackTrace() + } else { + println("Sent message to topic ${metadata.topic()} partition ${metadata.partition()} with offset ${metadata.offset()}") + } + } +}) + +producer.close() groovy - + - - - - - Clean up DMaaP notification topic - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=300&timeout=500 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - - - - ${threads} - ${threadsTimeOutInMs} - - - - Send Trigger Event - simplecl success - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/unauthenticated.dcae_cl_output - true - POST - true - true - - - - false - { - "event": { - "commonEventHeader": { - "startEpochMicrosec": 1597953057126, - "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", - "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", - "lastEpochMicrosec": 1597952499468, - "eventName": "Report hostname & IP", - "sourceName": "PNF101", - "reportingEntityName": "DCAE", - "vesEventListenerVersion": "7.1", - "internalHeaderFields": {}, - "version": "4.0.1", - "priority": "Normal", - "sequence": 1, - "domain": "other" - }, - "otherFields": { - "hashMap": { - "hostname": "UNDEFINED", - "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", - "pnfName": "PNF101", - "ip": "10.101.200.10" - }, - "otherFieldsVersion": "3.0" - } - } -} - = - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ${wait} - - - - Read notification events - simplecl success - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=100&timeout=30000 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - true - - - import groovy.json.JsonSlurper; + + groovy + + + true + import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import groovy.json.JsonSlurper + +// Consumer properties +def props = new Properties() +props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092") // Adjust to your Kafka bootstrap server +props.put(ConsumerConfig.GROUP_ID_CONFIG, "apex-group") +props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + +// Create the Kafka consumer +def consumer = new KafkaConsumer<>(props) + +// Subscribe to the topic +consumer.subscribe(["apex-cl-mgt"]) -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - def responseString = prev.getResponseDataAsString(); - res = jsonSlurper.parseText(responseString); - def expectedNumberOfNotifications = ${threads} * 3; - assert res instanceof List; - assert res.size() == expectedNumberOfNotifications; - assert responseString.contains("ACTIVE") - assert responseString.contains("SUCCESS") - assert responseString.contains("FINAL_SUCCESS") - assert !responseString.contains("FAILURE") +// Poll for messages and assert the value +def found = false +def timeout = 10000 // timeout in milliseconds +def startTime = System.currentTimeMillis() + +while (!found && (System.currentTimeMillis() - startTime) < timeout) { + println("Inside while loop") + ConsumerRecords<String, String> records = consumer.poll(1000) + for (ConsumerRecord<String, String> record : records) { + println("Received message: Key=${record.key()}, Value=${record.value()}, Partition=${record.partition()}, Offset=${record.offset()}") + + // Parse the JSON value + def jsonSlurper = new JsonSlurper() + def message = jsonSlurper.parseText(record.value()) + + // Perform assertion on specific fields in the JSON + if ((message?.source == "APEX") || (message?.component?.status == "ACTIVE")) { + found = true + println("Assertion passed: source is 'APEX' or status is 'ACTIVE'") + break + } + } } + +// Close the consumer +consumer.close() - groovy - - - - - - - - Clean up DMaaP notification topic - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=300&timeout=500 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - + + - + - - ${threads} - ${threadsTimeOutInMs} - - - - Send Trigger Event - simplecl failure - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/unauthenticated.dcae_cl_output - true - POST - true - true - - - - false - { - "event": { - "commonEventHeader": { - "startEpochMicrosec": 1597953057126, - "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", - "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", - "lastEpochMicrosec": 1597952499468, - "eventName": "Report hostname & IP", - "sourceName": "PNF101", - "reportingEntityName": "DCAE", - "vesEventListenerVersion": "7.1", - "internalHeaderFields": {}, - "version": "4.0.1", - "priority": "Normal", - "sequence": 1, - "domain": "other" - }, - "otherFields": { - "hashMap": { - "hostname": "UNDEFINED", - "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", - "pnfName": "PNF_INVALID", - "ip": "10.101.200.10" - }, - "otherFieldsVersion": "3.0" - } - } -} - = - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ${wait} - + + groovy + + + true + import org.apache.kafka.clients.producer.Callback +import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.ProducerConfig +import org.apache.kafka.common.serialization.StringSerializer + +def props = new Properties() +props.put("bootstrap.servers", "kafka:9092") +props.put("group.id", "new-group") +props.put("acks", "all") +props.put("retries", 0) +props.put("batch.size", 16384) +props.put("linger.ms", 1) +props.put("buffer.memory", 33554432) +props.put("key.serializer", StringSerializer.class.getName()) +props.put("value.serializer", StringSerializer.class.getName()) + +def producer = new KafkaProducer<>(props) + +def jsonString = '''{ + "event": { + "commonEventHeader": { + "domain": "fault", + "eventId": "fault0000245", + "eventName": "Fault_Vscf:PilotNumberPoolExhaustion", + "lastEpochMicrosec": 1413378172000000, + "priority": "High", + "reportingEntityName": "ibcx0001vm002oam001", + "sequence": 1, + "sourceId": "de305d54-75b4-431b-adb2-eb6b9e546014${__threadNum}", + "sourceName": "vfw-cnf-cds-test-1-vnf", + "startEpochMicrosec": 1413378172000000, + "timeZoneOffset": "UTC-05:30", + "version": "4.0.1", + "vesEventListenerVersion": "7.0.1" + }, + "faultFields": { + "alarmAdditionalInformation": { + "PilotNumberPoolSize": "1000" + }, + "alarmCondition": "PilotNumberPoolExhaustion", + "eventSeverity": "CRITICAL", + "eventSourceType": "other", + "faultFieldsVersion": 4, + "specificProblem": "Calls cannot complete - pilot numbers are unavailable", + "vfStatus": "Active" + } + } +}''' + +def record = new ProducerRecord<>("unauthenticated.dcae_policy_example_output", jsonString) + +producer.send(record, new Callback() { + @Override + void onCompletion(RecordMetadata metadata, Exception exception) { + if (exception != null) { + exception.printStackTrace() + } else { + println("Sent message to topic ${metadata.topic()} partition ${metadata.partition()} with offset ${metadata.offset()}") + } + } +}) + +producer.close() + + - - Read notification events - simplecl failure - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=100&timeout=30000 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - true - - - import groovy.json.JsonSlurper; + + groovy + + + true + import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import groovy.json.JsonSlurper + +// Consumer properties +def props = new Properties() +props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092") // Adjust to your Kafka bootstrap server +props.put(ConsumerConfig.GROUP_ID_CONFIG, "apex-group") +props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + +// Create the Kafka consumer +def consumer = new KafkaConsumer<>(props) -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - def responseString = prev.getResponseDataAsString(); - res = jsonSlurper.parseText(responseString); - def expectedNumberOfNotifications = ${threads} * 2; - assert res instanceof List; - assert res.size() == expectedNumberOfNotifications; - assert responseString.contains("ACTIVE") - assert responseString.contains("FINAL_FAILURE") - assert !responseString.contains("FINAL_SUCCESS") +// Subscribe to the topic +consumer.subscribe(["apex-cl-mgt"]) + +// Poll for messages and assert the value +def found = false +def timeout = 10000 // timeout in milliseconds +def startTime = System.currentTimeMillis() + +while (!found && (System.currentTimeMillis() - startTime) < timeout) { + println("Inside while loop") + ConsumerRecords<String, String> records = consumer.poll(1000) + for (ConsumerRecord<String, String> record : records) { + println("Received message: Key=${record.key()}, Value=${record.value()}, Partition=${record.partition()}, Offset=${record.offset()}") + + // Parse the JSON value + def jsonSlurper = new JsonSlurper() + def message = jsonSlurper.parseText(record.value()) + + // Perform assertion on specific fields in the JSON + if ((message?.source == "APEX") || (message?.component?.status == "ACTIVE")) { + found = true + println("Assertion passed: source is 'APEX' or status is 'ACTIVE'") + break + } + } } + +// Close the consumer +consumer.close() - groovy - - - - - - - - Clean up DMaaP notification topic - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=300&timeout=500 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - + + - + - - ${threads} - ${threadsTimeOutInMs} - - - - Send Trigger Event - example success - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/unauthenticated.dcae_policy_example_output - true - POST - true - true - - - - false - { - "event": { - "commonEventHeader": { - "domain": "fault", - "eventId": "fault0000245", - "eventName": "Fault_Vscf:PilotNumberPoolExhaustion", - "lastEpochMicrosec": 1413378172000000, - "priority": "High", - "reportingEntityName": "ibcx0001vm002oam001", - "sequence": 1, - "sourceId": "de305d54-75b4-431b-adb2-eb6b9e546014${__threadNum}", - "sourceName": "vfw-cnf-cds-test-1-vnf", - "startEpochMicrosec": 1413378172000000, - "timeZoneOffset": "UTC-05:30", - "version": "4.0.1", - "vesEventListenerVersion": "7.0.1" - }, - "faultFields": { - "alarmAdditionalInformation": { - "PilotNumberPoolSize": "1000" - }, - "alarmCondition": "PilotNumberPoolExhaustion", - "eventSeverity": "CRITICAL", - "eventSourceType": "other", - "faultFieldsVersion": 4, - "specificProblem": "Calls cannot complete - pilot numbers are unavailable", - "vfStatus": "Active" - } - } -} - = - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ${wait} - + + true + + + import org.apache.kafka.clients.producer.Callback +import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.ProducerConfig +import org.apache.kafka.common.serialization.StringSerializer + +def props = new Properties() +props.put("bootstrap.servers", "kafka:9092") +props.put("group.id", "new-group") +props.put("acks", "all") +props.put("retries", 0) +props.put("batch.size", 16384) +props.put("linger.ms", 1) +props.put("buffer.memory", 33554432) +props.put("key.serializer", StringSerializer.class.getName()) +props.put("value.serializer", StringSerializer.class.getName()) + +def producer = new KafkaProducer<>(props) + +def jsonString = '''{ + "event": { + "commonEventHeader": { + "startEpochMicrosec": 1597953057126, + "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", + "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", + "lastEpochMicrosec": 1597952499468, + "eventName": "Report hostname & IP", + "sourceName": "PNF101", + "reportingEntityName": "DCAE", + "vesEventListenerVersion": "7.1", + "internalHeaderFields": {}, + "version": "4.0.1", + "priority": "Normal", + "sequence": 1, + "domain": "other" + }, + "otherFields": { + "hashMap": { + "hostname": "UNDEFINED", + "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", + "pnfName": "PNF_INVALID", + "ip": "10.101.200.10" + }, + "otherFieldsVersion": "3.0" + } + } +}''' + +def record = new ProducerRecord<>("unauthenticated.dcae_cl_output", jsonString) + +producer.send(record, new Callback() { + @Override + void onCompletion(RecordMetadata metadata, Exception exception) { + if (exception != null) { + exception.printStackTrace() + } else { + println("Sent message to topic ${metadata.topic()} partition ${metadata.partition()} with offset ${metadata.offset()}") + } + } +}) + +producer.close() + + groovy + - - Read notification events - example success - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=100&timeout=30000 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - true - - - import groovy.json.JsonSlurper; + + groovy + + + true + import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import groovy.json.JsonSlurper + +// Consumer properties +def props = new Properties() +props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092") // Adjust to your Kafka bootstrap server +props.put(ConsumerConfig.GROUP_ID_CONFIG, "apex-group") +props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + +// Create the Kafka consumer +def consumer = new KafkaConsumer<>(props) -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - def responseString = prev.getResponseDataAsString(); - res = jsonSlurper.parseText(responseString); - def expectedNumberOfNotifications = ${threads} * 4; - assert res instanceof List; - assert res.size() == expectedNumberOfNotifications; - assert responseString.contains("ACTIVE") - assert responseString.contains("SUCCESS") - assert responseString.contains("FINAL_SUCCESS") - assert !responseString.contains("FAILURE") +// Subscribe to the topic +consumer.subscribe(["apex-cl-mgt"]) + +// Poll for messages and assert the value +def found = false +def timeout = 10000 // timeout in milliseconds +def startTime = System.currentTimeMillis() + +while (!found && (System.currentTimeMillis() - startTime) < timeout) { + println("Inside while loop") + ConsumerRecords<String, String> records = consumer.poll(1000) + for (ConsumerRecord<String, String> record : records) { + println("Received message: Key=${record.key()}, Value=${record.value()}, Partition=${record.partition()}, Offset=${record.offset()}") + + // Parse the JSON value + def jsonSlurper = new JsonSlurper() + def message = jsonSlurper.parseText(record.value()) + + // Perform assertion on specific fields in the JSON + if (!(message?.name != "SUCCESS")) { + found = true + println("Assertion passed") + break + } + } } + +// Close the consumer +consumer.close() - groovy - - - - - - - - Clean up DMaaP notification topic - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=300&timeout=500 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - + + - + - - ${threads} - ${threadsTimeOutInMs} - - - - Send Trigger Event - example failure - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/unauthenticated.dcae_policy_example_output - true - POST - true - true - - - - false - { - "event": { - "commonEventHeader": { - "domain": "fault", - "eventId": "fault0000245", - "eventName": "Fault_Vscf:PilotNumberPoolExhaustion", - "lastEpochMicrosec": 1413378172000000, - "priority": "High", - "reportingEntityName": "ibcx0001vm002oam001", - "sequence": 1, - "sourceId": "de305d54-75b4-431b-adb2-eb6b9e546014${__threadNum}", - "sourceName": "vfw-cnf-cds-test-2-vnf", - "startEpochMicrosec": 1413378172000000, - "timeZoneOffset": "UTC-05:30", - "version": "4.0.1", - "vesEventListenerVersion": "7.0.1" - }, - "faultFields": { - "alarmAdditionalInformation": { - "PilotNumberPoolSize": "1000" - }, - "alarmCondition": "PilotNumberPoolExhaustion", - "eventSeverity": "CRITICAL", - "eventSourceType": "other", - "faultFieldsVersion": 4, - "specificProblem": "Calls cannot complete - pilot numbers are unavailable", - "vfStatus": "InActive" - } - } -} - = - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ${wait} - + + true + + + import org.apache.kafka.clients.producer.Callback +import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.ProducerConfig +import org.apache.kafka.common.serialization.StringSerializer + +def props = new Properties() +props.put("bootstrap.servers", "kafka:9092") +props.put("group.id", "new-group") +props.put("acks", "all") +props.put("retries", 0) +props.put("batch.size", 16384) +props.put("linger.ms", 1) +props.put("buffer.memory", 33554432) +props.put("key.serializer", StringSerializer.class.getName()) +props.put("value.serializer", StringSerializer.class.getName()) + +def producer = new KafkaProducer<>(props) + +def jsonString = '''{ + "event": { + "commonEventHeader": { + "startEpochMicrosec": 1597953057126, + "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", + "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", + "lastEpochMicrosec": 1597952499468, + "eventName": "Report hostname & IP", + "sourceName": "PNF101", + "reportingEntityName": "DCAE", + "vesEventListenerVersion": "7.1", + "internalHeaderFields": {}, + "version": "4.0.1", + "priority": "Normal", + "sequence": 1, + "domain": "other" + }, + "otherFields": { + "hashMap": { + "hostname": "UNDEFINED", + "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", + "pnfName": "PNF101", + "ip": "10.101.200.10" + }, + "otherFieldsVersion": "3.0" + } + } +}''' + +def record = new ProducerRecord<>("unauthenticated.dcae_policy_example_output", jsonString) + +producer.send(record, new Callback() { + @Override + void onCompletion(RecordMetadata metadata, Exception exception) { + if (exception != null) { + exception.printStackTrace() + } else { + println("Sent message to topic ${metadata.topic()} partition ${metadata.partition()} with offset ${metadata.offset()}") + } + } +}) + +producer.close() + + groovy + - - Read notification events - example failure - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=100&timeout=30000 - true - GET - true - false - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - true - - - import groovy.json.JsonSlurper; + + groovy + + + true + import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import groovy.json.JsonSlurper + +// Consumer properties +def props = new Properties() +props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092") // Adjust to your Kafka bootstrap server +props.put(ConsumerConfig.GROUP_ID_CONFIG, "apex-group") +props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()) +props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + +// Create the Kafka consumer +def consumer = new KafkaConsumer<>(props) + +// Subscribe to the topic +consumer.subscribe(["apex-cl-mgt"]) -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - def responseString = prev.getResponseDataAsString(); - res = jsonSlurper.parseText(responseString); - def expectedNumberOfNotifications = ${threads} * 2; - assert res instanceof List; - assert res.size() == expectedNumberOfNotifications; - assert responseString.contains("ACTIVE") - assert responseString.contains("FINAL_FAILURE") - assert !responseString.contains("FINAL_SUCCESS") +// Poll for messages and assert the value +def found = false +def timeout = 10000 // timeout in milliseconds +def startTime = System.currentTimeMillis() + +while (!found && (System.currentTimeMillis() - startTime) < timeout) { + println("Inside while loop") + ConsumerRecords<String, String> records = consumer.poll(1000) + for (ConsumerRecord<String, String> record : records) { + println("Received message: Key=${record.key()}, Value=${record.value()}, Partition=${record.partition()}, Offset=${record.offset()}") + + // Parse the JSON value + def jsonSlurper = new JsonSlurper() + def message = jsonSlurper.parseText(record.value()) + + // Perform assertion on specific fields in the JSON + if (!(message?.name == "SUCCESS")) { + found = true + println("Assertion passed: source is 'APEX' or status is 'ACTIVE'") + break + } + } } + +// Close the consumer +consumer.close() - groovy - - - + + - + 1 1 true @@ -7159,7 +6804,7 @@ if (prev.getResponseCode() == '200') { 6 ${HOSTNAME} ${PAP_PORT} - https + http /policy/pap/v1/pdps/deployments/batch true POST @@ -7197,7 +6842,7 @@ if (prev.getResponseCode() == '200') { - + 202 @@ -7212,7 +6857,7 @@ if (prev.getResponseCode() == '200') { 6 ${HOSTNAME} ${API_PORT} - https + http /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies/onap.policies.apex.Simplecontrolloop/versions/1.0.0 true DELETE @@ -7223,7 +6868,7 @@ if (prev.getResponseCode() == '200') { - + 200 @@ -7238,7 +6883,7 @@ if (prev.getResponseCode() == '200') { 6 ${HOSTNAME} ${API_PORT} - https + http /policy/api/v1/policytypes/onap.policies.native.Apex/versions/1.0.0/policies/onap.policies.apex.Example/versions/1.0.0 true DELETE @@ -7249,7 +6894,7 @@ if (prev.getResponseCode() == '200') { - + 200 @@ -7261,349 +6906,6 @@ if (prev.getResponseCode() == '200') { - - ${threads} - 1 - 259200 - true - true - continue - - -1 - false - - - - - 6 - ${HOSTNAME} - ${APEX_PORT} - https - /policy/apex-pdp/v1/healthcheck - true - GET - true - false - - - - - - - true - - - import groovy.json.JsonSlurper; - -def res = []; -if (prev.getResponseCode() == '200') { - def jsonSlurper = new JsonSlurper(); - res = jsonSlurper.parseText(prev.getResponseDataAsString()); - - assert res instanceof Map; - assert res.healthy == true; - assert res.message == 'alive'; -} - - groovy - - - - - - - ${threads} - ${threadsTimeOutInMs} - - - - Send input event to apex-pdp - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/unauthenticated.dcae_cl_output - true - POST - true - true - - - - false - { - "event": { - "commonEventHeader": { - "startEpochMicrosec": 1597953057126, - "sourceId": "927b2580-36d9-4f13-8421-3c9d43b7a57${__threadNum}", - "eventId": "8c7935a4-79d8-4ec0-b661-dcca3cd6800${__threadNum}", - "lastEpochMicrosec": 1597952499468, - "eventName": "Report hostname & IP", - "sourceName": "PNF101", - "reportingEntityName": "DCAE", - "vesEventListenerVersion": "7.1", - "internalHeaderFields": {}, - "version": "4.0.1", - "priority": "Normal", - "sequence": 1, - "domain": "other" - }, - "otherFields": { - "hashMap": { - "hostname": "UNDEFINED", - "pnfId": "927b2580-36d9-4f13-8421-3c9d43b7a57e", - "pnfName": "PNF101", - "ip": "10.101.200.10" - }, - "otherFieldsVersion": "3.0" - } - } -} - = - - - - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - 1000 - - - - - Check output event - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?timeout=30000 - true - GET - true - false - - - - - - - 1000 - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ACTIVE - VES event has been received. Going to fetch details from AAI. - - - Assertion.response_data - false - 2 - Response code - Assert response message coming back from apex-pdp - - - - ${wait} - - - - - Check output event - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=1&timeout=30000 - true - GET - true - false - - - - - - - 1000 - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - SUCCESS - - - Assertion.response_data - false - 2 - Response code - Assert response message coming back from apex-pdp - - - - ${wait} - - - - - Check output event - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?limit=1&timeout=30000 - true - GET - true - false - - - - - - - 1000 - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - FINAL_SUCCESS - - - Assertion.response_data - false - 2 - Response code - Assert response message coming back from apex-pdp - - - - ${wait} - - - - - ${__javaScript(("${response}".indexOf("VES event has been received. Going to fetch details from AAI") == -1 && ${counter} < 5),)} - - - - 0 - 1 - 1 - - - false - - - - Check output event - 6 - ${KAFKA_HOST} - ${KAFKA_PORT} - http - /events/apex-cl-mgt/cg${__threadNum}/c1?timeout=30000 - true - GET - true - false - - - - - - - 1000 - - - - - 200 - - - Assertion.response_code - false - 8 - Response code - Assert response code coming back from apex-pdp - - - - - ACTIVE - VES event has been received. Going to fetch details from AAI. - - - Assertion.response_data - false - 2 - Response code - Assert response message coming back from apex-pdp - - - - ${wait} - - - - true - - - vars.put("response", prev.getResponseDataAsString()); - groovy - - - - - - false @@ -7638,7 +6940,7 @@ if (prev.getResponseCode() == '200') { true - /home/dhanrahan/jmeter_results/apex_perf/summary.log + /home/wayne/dev/s3ps/apex-pdp/testsuites/apex-pdp-stability/src/main/resources/s3pTestResults.jtl @@ -7675,7 +6977,7 @@ if (prev.getResponseCode() == '200') { true - /home/dhanrahan/jmeter_results/apex_perf/resultTree.log + /home/wayne/dev/s3ps/apex-pdp/testsuites/apex-pdp-stability/src/main/resources/s3pTestResults.jtl @@ -7712,7 +7014,7 @@ if (prev.getResponseCode() == '200') { true - /home/dhanrahan/jmeter_results/apex_perf/resultTable.log + /home/wayne/dev/s3ps/apex-pdp/testsuites/apex-pdp-stability/src/main/resources/s3pTestResults.jtl -- cgit 1.2.3-korg