diff options
author | halil.cakal <halil.cakal@est.tech> | 2024-07-25 11:12:29 +0100 |
---|---|---|
committer | halil.cakal <halil.cakal@est.tech> | 2024-07-31 10:14:25 +0100 |
commit | 65b870baf15ceaa336c1290a8bb920b48875a3a0 (patch) | |
tree | 6640f54681d1853d07e9a6a3da501d6e139ee0c1 /k6-tests/ncmp/common | |
parent | b33ea92a29a2724750554ebb412cf841fbe8c2a8 (diff) |
Add k6 for legacy async batch passthrough read operation
- add kafka support to k6 codebase
- add two new tests: data operation batch passthrough read
and data operation async batch read (from kafka)
- calculate the events/second via Trend
- add kafka ui support to docker-compose
Note: Before this commit being merged, the k6 executable
should have been compiled with the kafka extension of xk6-kafka
Issue-ID: CPS-2268
Change-Id: Ib7777b7bc9f15b210ea36d3541cba0e0c943f883
Signed-off-by: halil.cakal <halil.cakal@est.tech>
Diffstat (limited to 'k6-tests/ncmp/common')
-rw-r--r-- | k6-tests/ncmp/common/passthrough-crud.js | 25 | ||||
-rw-r--r-- | k6-tests/ncmp/common/utils.js | 4 |
2 files changed, 28 insertions, 1 deletions
diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js index 43a215fdf8..76bda4e1bd 100644 --- a/k6-tests/ncmp/common/passthrough-crud.js +++ b/k6-tests/ncmp/common/passthrough-crud.js @@ -19,7 +19,12 @@ */ import http from 'k6/http'; -import { NCMP_BASE_URL, CONTENT_TYPE_JSON_PARAM, getRandomCmHandleId } from './utils.js'; +import { + CONTENT_TYPE_JSON_PARAM, + getRandomCmHandleId, + NCMP_BASE_URL, + TOPIC_DATA_OPERATIONS_BATCH_READ +} from './utils.js'; export function passthroughRead() { const cmHandleId = getRandomCmHandleId(); @@ -40,3 +45,21 @@ export function passthroughWrite() { const response = http.post(url, JSON.stringify(body), CONTENT_TYPE_JSON_PARAM); return response; } + +export function batchRead(cmHandleIds) { + const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}` + const payload = { + "operations": [ + { + "resourceIdentifier": "parent/child", + "targetIds": cmHandleIds, + "datastore": "ncmp-datastore:passthrough-operational", + "options": "(fields=schemas/schema)", + "operationId": "12", + "operation": "read" + } + ] + }; + const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM); + return response; +}
\ No newline at end of file diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index 0f3b8d9c96..f24edc50d6 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -25,6 +25,9 @@ export const REGISTRATION_BATCH_SIZE = 100; export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as in docker-compose.yml export const CONTENT_TYPE_JSON_PARAM = { headers: {'Content-Type': 'application/json'} }; +export const DATA_OPERATION_READ_BATCH_SIZE = 200; +export const TOPIC_DATA_OPERATIONS_BATCH_READ = 'topic-data-operations-batch-read'; +export const KAFKA_BOOTSTRAP_SERVERS = ['localhost:9092']; export function recordTimeInSeconds(functionToExecute) { const startTimeInMillis = Date.now(); @@ -65,6 +68,7 @@ export function makeCustomSummaryReport(data, options) { makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read', 'milliseconds', 'ncmp_overhead_passthrough_read', data, options), makeSummaryCsvLine('6a', 'Synchronous single CM-handle pass-through write', 'requests/second', 'http_reqs{scenario:passthrough_write}', data, options), makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', data, options), + makeSummaryCsvLine('7', 'Data operations batch read', 'events/second', 'data_operations_batch_read_cmhandles_per_second', data, options), ]; return summaryCsvLines.join('\n') + '\n'; } |