diff options
author | Daniel Hanrahan <daniel.hanrahan@est.tech> | 2024-07-31 12:30:21 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@onap.org> | 2024-07-31 12:30:21 +0000 |
commit | b66c993cc2ab670db0f62afe22bb0b5020e5dc40 (patch) | |
tree | 348d380b7e451db8273c9a302cfff8e0961dfb2a /k6-tests | |
parent | 9d8e6852b2ebfffca8204557a005aba27c1fc581 (diff) | |
parent | 65b870baf15ceaa336c1290a8bb920b48875a3a0 (diff) |
Merge "Add k6 for legacy async batch passthrough read operation"
Diffstat (limited to 'k6-tests')
-rw-r--r-- | k6-tests/README.md | 3 | ||||
-rw-r--r-- | k6-tests/ncmp/common/passthrough-crud.js | 25 | ||||
-rw-r--r-- | k6-tests/ncmp/common/utils.js | 4 | ||||
-rw-r--r-- | k6-tests/ncmp/ncmp-kpi.js | 53 |
4 files changed, 79 insertions, 6 deletions
diff --git a/k6-tests/README.md b/k6-tests/README.md index 0fdebcfe9d..9a385e100a 100644 --- a/k6-tests/README.md +++ b/k6-tests/README.md @@ -4,8 +4,7 @@ k6 tests are written in JavaScript. ## k6 installation -Follow the instructions in the [k6 installation guide](https://grafana.com/docs/k6/latest/set-up/install-k6/) -to get started. +Follow the instructions in the [build from source guide](https://github.com/mostafa/xk6-kafka) to get started. ## Running the k6 test suites Simply run the main script. (The script assumes k6 and docker-compose have been installed). diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js index 43a215fdf8..76bda4e1bd 100644 --- a/k6-tests/ncmp/common/passthrough-crud.js +++ b/k6-tests/ncmp/common/passthrough-crud.js @@ -19,7 +19,12 @@ */ import http from 'k6/http'; -import { NCMP_BASE_URL, CONTENT_TYPE_JSON_PARAM, getRandomCmHandleId } from './utils.js'; +import { + CONTENT_TYPE_JSON_PARAM, + getRandomCmHandleId, + NCMP_BASE_URL, + TOPIC_DATA_OPERATIONS_BATCH_READ +} from './utils.js'; export function passthroughRead() { const cmHandleId = getRandomCmHandleId(); @@ -40,3 +45,21 @@ export function passthroughWrite() { const response = http.post(url, JSON.stringify(body), CONTENT_TYPE_JSON_PARAM); return response; } + +export function batchRead(cmHandleIds) { + const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}` + const payload = { + "operations": [ + { + "resourceIdentifier": "parent/child", + "targetIds": cmHandleIds, + "datastore": "ncmp-datastore:passthrough-operational", + "options": "(fields=schemas/schema)", + "operationId": "12", + "operation": "read" + } + ] + }; + const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM); + return response; +}
\ No newline at end of file diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index 0f3b8d9c96..f24edc50d6 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -25,6 +25,9 @@ export const REGISTRATION_BATCH_SIZE = 100; export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as in docker-compose.yml export const CONTENT_TYPE_JSON_PARAM = { headers: {'Content-Type': 'application/json'} }; +export const DATA_OPERATION_READ_BATCH_SIZE = 200; +export const TOPIC_DATA_OPERATIONS_BATCH_READ = 'topic-data-operations-batch-read'; +export const KAFKA_BOOTSTRAP_SERVERS = ['localhost:9092']; export function recordTimeInSeconds(functionToExecute) { const startTimeInMillis = Date.now(); @@ -65,6 +68,7 @@ export function makeCustomSummaryReport(data, options) { makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read', 'milliseconds', 'ncmp_overhead_passthrough_read', data, options), makeSummaryCsvLine('6a', 'Synchronous single CM-handle pass-through write', 'requests/second', 'http_reqs{scenario:passthrough_write}', data, options), makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', data, options), + makeSummaryCsvLine('7', 'Data operations batch read', 'events/second', 'data_operations_batch_read_cmhandles_per_second', data, options), ]; return summaryCsvLines.join('\n') + '\n'; } diff --git a/k6-tests/ncmp/ncmp-kpi.js b/k6-tests/ncmp/ncmp-kpi.js index b4c476ecb2..8ff9ec50b4 100644 --- a/k6-tests/ncmp/ncmp-kpi.js +++ b/k6-tests/ncmp/ncmp-kpi.js @@ -20,16 +20,28 @@ import { check } from 'k6'; import { Gauge, Trend } from 'k6/metrics'; -import { TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, - makeCustomSummaryReport, recordTimeInSeconds } from './common/utils.js'; +import { + TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, + makeCustomSummaryReport, recordTimeInSeconds, makeBatchOfCmHandleIds, DATA_OPERATION_READ_BATCH_SIZE, + TOPIC_DATA_OPERATIONS_BATCH_READ, KAFKA_BOOTSTRAP_SERVERS +} from './common/utils.js'; import { registerAllCmHandles, deregisterAllCmHandles } from './common/cmhandle-crud.js'; import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js'; -import { passthroughRead, passthroughWrite } from './common/passthrough-crud.js'; +import { passthroughRead, passthroughWrite, batchRead } from './common/passthrough-crud.js'; +import { + Reader, +} from 'k6/x/kafka'; let cmHandlesCreatedPerSecondGauge = new Gauge('cmhandles_created_per_second'); let cmHandlesDeletedPerSecondGauge = new Gauge('cmhandles_deleted_per_second'); let passthroughReadNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_read'); let passthroughWriteNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_write'); +let dataOperationsBatchReadCmHandlePerSecondTrend = new Trend('data_operations_batch_read_cmhandles_per_second'); + +const reader = new Reader({ + brokers: KAFKA_BOOTSTRAP_SERVERS, + topic: TOPIC_DATA_OPERATIONS_BATCH_READ, +}); const DURATION = '15m'; @@ -61,6 +73,22 @@ export const options = { vus: 3, duration: DURATION, }, + data_operation_send_async_http_request: { + executor: 'constant-arrival-rate', + exec: 'data_operation_send_async_http_request', + duration: DURATION, + rate: 1, + timeUnit: '1s', + preAllocatedVUs: 1, + }, + data_operation_async_batch_read: { + executor: 'constant-arrival-rate', + exec: 'data_operation_async_batch_read', + duration: DURATION, + rate: 1, + timeUnit: '1s', + preAllocatedVUs: 1, + } }, thresholds: { 'cmhandles_created_per_second': ['value >= 22'], @@ -75,6 +103,9 @@ export const options = { 'http_req_failed{scenario:cm_search_module}': ['rate == 0'], 'http_req_failed{scenario:passthrough_read}': ['rate == 0'], 'http_req_failed{scenario:passthrough_write}': ['rate == 0'], + 'http_req_failed{scenario:data_operation_send_async_http_request}': ['rate == 0'], + 'kafka_reader_error_count{scenario:data_operation_consume_kafka_responses}': ['count == 0'], + 'data_operations_batch_read_cmhandles_per_second': ['avg >= 150'], }, }; @@ -114,6 +145,22 @@ export function cm_search_module() { check(JSON.parse(response.body), { 'module search returned expected CM-handles': (arr) => arr.length === TOTAL_CM_HANDLES }); } +export function data_operation_send_async_http_request() { + const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(DATA_OPERATION_READ_BATCH_SIZE,1); + const response = batchRead(nextBatchOfCmHandleIds) + check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 }); +} + +export function data_operation_async_batch_read() { + try { + let messages = reader.consume({ limit: DATA_OPERATION_READ_BATCH_SIZE }); + dataOperationsBatchReadCmHandlePerSecondTrend.add(messages.length); + } catch (error) { + dataOperationsBatchReadCmHandlePerSecondTrend.add(0); + console.error(error); + } +} + export function handleSummary(data) { return { stdout: makeCustomSummaryReport(data, options), |