diff options
author | Daniel Hanrahan <daniel.hanrahan@est.tech> | 2024-10-09 11:22:46 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@onap.org> | 2024-10-09 11:22:46 +0000 |
commit | 110528232b5704b86d609550224994dded24788d (patch) | |
tree | 965d3fac9a42ab2639a2ce91c8d26381323b0031 | |
parent | fe5bbade7c7e1711b6d9ca626d8505d9828eda1b (diff) | |
parent | 6041e3f4b9af9387added1c5d093150a247a24a3 (diff) |
Merge "Added K6 script test to check a throughput of CPS kafka consumer"
-rw-r--r-- | k6-tests/once-off-test/kafka/produce-avc-event.js | 99 | ||||
-rw-r--r-- | k6-tests/resources/sampleAvcInputEvent.json | 38 |
2 files changed, 137 insertions, 0 deletions
diff --git a/k6-tests/once-off-test/kafka/produce-avc-event.js b/k6-tests/once-off-test/kafka/produce-avc-event.js new file mode 100644 index 0000000000..981a21af65 --- /dev/null +++ b/k6-tests/once-off-test/kafka/produce-avc-event.js @@ -0,0 +1,99 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +import { crypto } from 'k6/experimental/webcrypto'; +import { check } from 'k6'; +import { Writer, SchemaRegistry, SCHEMA_TYPE_STRING } from 'k6/x/kafka'; + +const testEventPayload = JSON.stringify(JSON.parse(open('../../resources/sampleAvcInputEvent.json'))); +const schemaRegistry = new SchemaRegistry(); +const kafkaProducer = new Writer({ + brokers: ['localhost:9092'], + topic: 'dmi-cm-events', + autoCreateTopic: true, + batchSize: 5000, + compression: 'gzip', + requestTimeout: 30000 +}); + +const TOTAL_MESSAGES = 100000; +const VIRTUAL_USERS = 1000; + +export const options = { + setupTimeout: '1m', + teardownTimeout: '1m', + scenarios: { + produceKafkaMessages: { + executor: 'shared-iterations', + exec: 'sendKafkaMessages', + vus: VIRTUAL_USERS, + iterations: TOTAL_MESSAGES, + maxDuration: '10m', + } + } +}; + +function getCloudEventHeaders() { + return { + ce_type: 'org.onap.cps.ncmp.events.avc1_0_0.AvcEvent', + ce_source: 'DMI', + ce_destination: 'dmi-cm-events', + ce_specversion: '1.0', + ce_time: new Date().toISOString(), + ce_id: crypto.randomUUID(), + ce_dataschema: 'urn:cps:org.onap.cps.ncmp.events.avc1_0_0.AvcEvent:1.0.0', + ce_correlationid: crypto.randomUUID() + }; +} + +export function sendKafkaMessages() { + const cloudEventHeaders = getCloudEventHeaders(); + + const avcCloudEvent = { + key: schemaRegistry.serialize({ + data: cloudEventHeaders.ce_correlationid, + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: testEventPayload, + schemaType: SCHEMA_TYPE_STRING + }), + headers: cloudEventHeaders + }; + + try { + kafkaProducer.produce({ messages: [avcCloudEvent] }); + + const isMessageSent = check(kafkaProducer, { + 'Message sent successfully': (producer) => producer != null, + }); + + if (!isMessageSent) { + console.error('Failed to send message:', avcCloudEvent); + } + + } catch (error) { + console.error('Error during message production:', error, avcCloudEvent); + } +} + +export function teardown() { + kafkaProducer.close(); +} diff --git a/k6-tests/resources/sampleAvcInputEvent.json b/k6-tests/resources/sampleAvcInputEvent.json new file mode 100644 index 0000000000..4c9cd721df --- /dev/null +++ b/k6-tests/resources/sampleAvcInputEvent.json @@ -0,0 +1,38 @@ +{ + "data": { + "push-change-update": { + "datastore-changes": { + "ietf-yang-patch:yang-patch": { + "patch-id": "34534ffd98", + "edit": [ + { + "edit-id": "ded43434-1", + "operation": "replace", + "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']", + "value": { + "attributes": [] + } + }, + { + "edit-id": "ded43434-2", + "operation": "create", + "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']", + "value": { + "attributes": [ + { + "isHoAllowed": false + } + ] + } + }, + { + "edit-id": "ded43434-3", + "operation": "delete", + "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']" + } + ] + } + } + } + } +}
\ No newline at end of file |