diff options
Diffstat (limited to 'k6-tests')
-rwxr-xr-x | k6-tests/install-deps.sh | 47 | ||||
-rw-r--r-- | k6-tests/ncmp/common/cmhandle-crud.js | 67 | ||||
-rw-r--r-- | k6-tests/ncmp/common/passthrough-crud.js | 59 | ||||
-rw-r--r-- | k6-tests/ncmp/common/search-base.js | 48 | ||||
-rw-r--r-- | k6-tests/ncmp/common/utils.js | 62 | ||||
-rw-r--r-- | k6-tests/ncmp/ncmp-kpi.js | 172 | ||||
-rwxr-xr-x | k6-tests/ncmp/run-all-tests.sh | 4 | ||||
-rwxr-xr-x | k6-tests/run-k6-tests.sh | 4 | ||||
-rwxr-xr-x | k6-tests/teardown.sh | 8 |
9 files changed, 293 insertions, 178 deletions
diff --git a/k6-tests/install-deps.sh b/k6-tests/install-deps.sh new file mode 100755 index 0000000000..bb5deb93dd --- /dev/null +++ b/k6-tests/install-deps.sh @@ -0,0 +1,47 @@ +#!/bin/bash +# +# Copyright 2024 Nordix Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +echo "---> install-deps.sh" +echo "Installing dependencies" + +# Create directory for downloaded binaries. +mkdir -p bin +touch bin/.gitignore + +# Add it to the PATH, so downloaded versions will be used. +export PATH="$(pwd)/bin:$PATH" + +# Download docker-compose. +if [ ! -x bin/docker-compose ]; then + echo " Downloading docker-compose" + curl -s -L https://github.com/docker/compose/releases/download/v2.29.2/docker-compose-linux-x86_64 > bin/docker-compose + chmod +x bin/docker-compose +else + echo " docker-compose already installed" +fi +docker-compose version + +# Download k6 with kafka extension. +if [ ! -x bin/k6 ]; then + echo " Downloading k6 with kafka extension" + curl -s -L https://github.com/mostafa/xk6-kafka/releases/download/v0.26.0/xk6-kafka_v0.26.0_linux_amd64.tar.gz | tar -xz + mv dist/xk6-kafka_v0.26.0_linux_amd64 bin/k6 && rmdir dist + chmod +x bin/k6 +else + echo " k6 already installed" +fi +k6 --version diff --git a/k6-tests/ncmp/common/cmhandle-crud.js b/k6-tests/ncmp/common/cmhandle-crud.js index 88ecdb45b8..7fab62abd8 100644 --- a/k6-tests/ncmp/common/cmhandle-crud.js +++ b/k6-tests/ncmp/common/cmhandle-crud.js @@ -18,35 +18,42 @@ * ============LICENSE_END========================================================= */ -import http from 'k6/http'; -import { check, sleep } from 'k6'; -import { NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS, REGISTRATION_BATCH_SIZE, CONTENT_TYPE_JSON_PARAM, makeBatchOfCmHandleIds } from './utils.js'; +import { sleep } from 'k6'; +import { performPostRequest, NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS +} from './utils.js'; import { executeCmHandleIdSearch } from './search-base.js'; -export function registerAllCmHandles() { - forEachBatchOfCmHandles(createCmHandles); - waitForAllCmHandlesToBeReady(); +export function createCmHandles(cmHandleIds) { + const url = `${NCMP_BASE_URL}/ncmpInventory/v1/ch`; + const payload = JSON.stringify(createCmHandlePayload(cmHandleIds)); + return performPostRequest(url, payload, 'createCmHandles'); } -export function deregisterAllCmHandles() { - forEachBatchOfCmHandles(deleteCmHandles); +export function deleteCmHandles(cmHandleIds) { + const url = `${NCMP_BASE_URL}/ncmpInventory/v1/ch`; + const payload = JSON.stringify({ + "dmiPlugin": DMI_PLUGIN_URL, + "removedCmHandles": cmHandleIds, + }); + return performPostRequest(url, payload, 'deleteCmHandles'); } -function forEachBatchOfCmHandles(functionToExecute) { - const TOTAL_BATCHES = Math.ceil(TOTAL_CM_HANDLES / REGISTRATION_BATCH_SIZE); - for (let batchNumber = 0; batchNumber < TOTAL_BATCHES; batchNumber++) { - const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(REGISTRATION_BATCH_SIZE, batchNumber); - functionToExecute(nextBatchOfCmHandleIds); - } +export function waitForAllCmHandlesToBeReady() { + const POLLING_INTERVAL_SECONDS = 5; + let cmHandlesReady = 0; + do { + sleep(POLLING_INTERVAL_SECONDS); + cmHandlesReady = getNumberOfReadyCmHandles(); + console.log(`${cmHandlesReady}/${TOTAL_CM_HANDLES} CM handles are READY`); + } while (cmHandlesReady < TOTAL_CM_HANDLES); } -function createCmHandles(cmHandleIds) { - const url = `${NCMP_BASE_URL}/ncmpInventory/v1/ch`; - const payload = { +function createCmHandlePayload(cmHandleIds) { + return { "dmiPlugin": DMI_PLUGIN_URL, "createdCmHandles": cmHandleIds.map((cmHandleId, index) => ({ "cmHandle": cmHandleId, - "alternateId": `alt-${cmHandleId}`, + "alternateId": cmHandleId.replace('ch-', 'alt-'), "moduleSetTag": MODULE_SET_TAGS[index % MODULE_SET_TAGS.length], "cmHandleProperties": {"neType": "RadioNode"}, "publicCmHandleProperties": { @@ -56,30 +63,6 @@ function createCmHandles(cmHandleIds) { } })), }; - const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM); - check(response, { 'create CM-handles status equals 200': (r) => r.status === 200 }); - return response; -} - -function deleteCmHandles(cmHandleIds) { - const url = `${NCMP_BASE_URL}/ncmpInventory/v1/ch`; - const payload = { - "dmiPlugin": DMI_PLUGIN_URL, - "removedCmHandles": cmHandleIds, - }; - const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM); - check(response, { 'delete CM-handles status equals 200': (r) => r.status === 200 }); - return response; -} - -function waitForAllCmHandlesToBeReady() { - const POLLING_INTERVAL_SECONDS = 5; - let cmHandlesReady = 0; - do { - sleep(POLLING_INTERVAL_SECONDS); - cmHandlesReady = getNumberOfReadyCmHandles(); - console.log(`${cmHandlesReady}/${TOTAL_CM_HANDLES} CM handles are READY`); - } while (cmHandlesReady < TOTAL_CM_HANDLES); } function getNumberOfReadyCmHandles() { diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js index 5617f9d093..0cd96ad64d 100644 --- a/k6-tests/ncmp/common/passthrough-crud.js +++ b/k6-tests/ncmp/common/passthrough-crud.js @@ -18,47 +18,37 @@ * ============LICENSE_END========================================================= */ -import http from 'k6/http'; +import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js'; import { - CONTENT_TYPE_JSON_PARAM, - getRandomCmHandleId, + performPostRequest, + performGetRequest, NCMP_BASE_URL, - TOPIC_DATA_OPERATIONS_BATCH_READ + TOPIC_DATA_OPERATIONS_BATCH_READ, + TOTAL_CM_HANDLES } from './utils.js'; -export function passthroughRead() { - const cmHandleId = getRandomCmHandleId(); +export function passthroughRead(useAlternateId) { + const cmHandleReference = getRandomCmHandleReference(useAlternateId); const resourceIdentifier = 'my-resource-identifier'; - const includeDescendants = true; const datastoreName = 'ncmp-datastore:passthrough-operational'; - const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleId}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}&include-descendants=${includeDescendants}` - const response = http.get(url); - return response; -} - -export function passthroughReadWithAltId() { - const cmHandleId = getRandomCmHandleId(); - const resourceIdentifier = 'my-resource-identifier'; const includeDescendants = true; - const datastoreName = 'ncmp-datastore:passthrough-operational'; - const url = `${NCMP_BASE_URL}/ncmp/v1/ch/alt-${cmHandleId}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}&include-descendants=${includeDescendants}` - const response = http.get(url); - return response; + const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants); + return performGetRequest(url, 'passthroughRead'); } -export function passthroughWrite() { - const cmHandleId = getRandomCmHandleId(); +export function passthroughWrite(useAlternateId) { + const cmHandleReference = getRandomCmHandleReference(useAlternateId); const resourceIdentifier = 'my-resource-identifier'; const datastoreName = 'ncmp-datastore:passthrough-running'; - const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleId}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}` - const body = `{"neType": "BaseStation"}` - const response = http.post(url, JSON.stringify(body), CONTENT_TYPE_JSON_PARAM); - return response; + const includeDescendants = false; + const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants); + const payload = JSON.stringify({"neType": "BaseStation"}); + return performPostRequest(url, payload, 'passthroughWrite'); } export function batchRead(cmHandleIds) { - const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}` - const payload = { + const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}`; + const payload = JSON.stringify({ "operations": [ { "resourceIdentifier": "parent/child", @@ -69,7 +59,16 @@ export function batchRead(cmHandleIds) { "operation": "read" } ] - }; - const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM); - return response; + }); + return performPostRequest(url, payload, 'batchRead'); +} + +function getRandomCmHandleReference(useAlternateId) { + const prefix = useAlternateId ? 'alt' : 'ch'; + return `${prefix}-${randomIntBetween(1, TOTAL_CM_HANDLES)}`; +} + +function generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants) { + const descendantsParam = includeDescendants ? `&include-descendants=${includeDescendants}` : ''; + return `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleReference}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}${descendantsParam}`; }
\ No newline at end of file diff --git a/k6-tests/ncmp/common/search-base.js b/k6-tests/ncmp/common/search-base.js index bc964856af..a6424fe5d0 100644 --- a/k6-tests/ncmp/common/search-base.js +++ b/k6-tests/ncmp/common/search-base.js @@ -18,27 +18,7 @@ * ============LICENSE_END========================================================= */ -import http from 'k6/http'; -import { NCMP_BASE_URL, CONTENT_TYPE_JSON_PARAM } from './utils.js'; - -const SEARCH_PARAMETERS_PER_SCENARIO = { - 'module': { - 'cmHandleQueryParameters': [ - { - 'conditionName': 'hasAllModules', - 'conditionParameters': [{'moduleName': 'ietf-yang-types-1'}] - } - ] - }, - 'readyCmHandles': { - 'cmHandleQueryParameters': [ - { - 'conditionName': 'cmHandleWithCpsPath', - 'conditionParameters': [{'cpsPath': '//state[@cm-handle-state="READY"]'}] - } - ] - } -}; +import {performPostRequest, NCMP_BASE_URL} from './utils.js'; export function executeCmHandleSearch(scenario) { return executeSearchRequest('searches', scenario); @@ -52,6 +32,28 @@ function executeSearchRequest(searchType, scenario) { const searchParameters = SEARCH_PARAMETERS_PER_SCENARIO[scenario]; const payload = JSON.stringify(searchParameters); const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${searchType}`; - const response = http.post(url, payload, CONTENT_TYPE_JSON_PARAM); - return response; + return performPostRequest(url, payload, searchType); } + +const SEARCH_PARAMETERS_PER_SCENARIO = { + "module-and-properties": { + "cmHandleQueryParameters": [ + { + "conditionName": "hasAllModules", + "conditionParameters": [{"moduleName": "ietf-yang-types"}] + }, + { + "conditionName": "hasAllProperties", + "conditionParameters": [{"Color": "yellow"}] + } + ] + }, + "readyCmHandles": { + "cmHandleQueryParameters": [ + { + "conditionName": "cmHandleWithCpsPath", + "conditionParameters": [{"cpsPath": "//state[@cm-handle-state='READY']"}] + } + ] + } +}; diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index 294789f940..e6d9c92ee6 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -18,25 +18,19 @@ * ============LICENSE_END========================================================= */ +import http from 'k6/http'; export const NCMP_BASE_URL = 'http://localhost:8883'; export const DMI_PLUGIN_URL = 'http://ncmp-dmi-plugin-demo-and-csit-stub:8092'; export const TOTAL_CM_HANDLES = 20000; export const REGISTRATION_BATCH_SIZE = 100; export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as in docker-compose.yml -export const CONTENT_TYPE_JSON_PARAM = { headers: {'Content-Type': 'application/json'} }; +export const CONTENT_TYPE_JSON_PARAM = {'Content-Type': 'application/json'}; export const DATA_OPERATION_READ_BATCH_SIZE = 200; export const TOPIC_DATA_OPERATIONS_BATCH_READ = 'topic-data-operations-batch-read'; export const KAFKA_BOOTSTRAP_SERVERS = ['localhost:9092']; export const MODULE_SET_TAGS = ['tagA','tagB','tagC',' tagD'] -export function recordTimeInSeconds(functionToExecute) { - const startTimeInMillis = Date.now(); - functionToExecute(); - const endTimeInMillis = Date.now(); - const totalTimeInSeconds = (endTimeInMillis - startTimeInMillis) / 1000.0; - return totalTimeInSeconds; -} /** * Generates a batch of CM-handle IDs based on batch size and number. @@ -45,29 +39,59 @@ export function recordTimeInSeconds(functionToExecute) { * @returns {string[]} Array of CM-handle IDs, for example ['ch-201', 'ch-202' ... 'ch-300'] */ export function makeBatchOfCmHandleIds(batchSize, batchNumber) { - const batchOfIds = []; const startIndex = 1 + batchNumber * batchSize; - for (let i = 0; i < batchSize; i++) { - let cmHandleId = `ch-${startIndex + i}`; - batchOfIds.push(cmHandleId); - } - return batchOfIds; + return Array.from({ length: batchSize }, (_, i) => `ch-${startIndex + i}`); +} + +/** + * Helper function to perform POST requests with JSON payload and content type. + * @param {string} url - The URL to send the POST request to. + * @param {Object} payload - The JSON payload to send in the POST request. + * @param {string} metricTag - A tag for the metric endpoint. + * @returns {Object} The response from the HTTP POST request. + */ +export function performPostRequest(url, payload, metricTag) { + const metricTags = { + endpoint: metricTag + }; + + return http.post(url, payload, { + headers: CONTENT_TYPE_JSON_PARAM, + tags: metricTags + }); } -export function getRandomCmHandleId() { - return `ch-${Math.floor(Math.random() * TOTAL_CM_HANDLES) + 1}`; +/** + * Helper function to perform GET requests with metric tags. + * + * This function sends an HTTP GET request to the specified URL and attaches + * a metric tag to the request, which is useful for monitoring and analytics. + * + * @param {string} url - The URL to which the GET request will be sent. + * @param {string} metricTag - A string representing the metric tag to associate with the request. + * This tag is used for monitoring and tracking the request. + * @returns {Object} The response from the HTTP GET request. The response includes the status code, + * headers, body, and other related information. + */ +export function performGetRequest(url, metricTag) { + const metricTags = { + endpoint: metricTag + }; + return http.get(url, {tags: metricTags}); } export function makeCustomSummaryReport(data, options) { const summaryCsvLines = [ '#,Test Name,Unit,Limit,Actual', + makeSummaryCsvLine('0', 'HTTP request failures for all tests', 'rate of failed requests', 'http_req_failed', data, options), makeSummaryCsvLine('1', 'Registration of CM-handles', 'CM-handles/second', 'cmhandles_created_per_second', data, options), makeSummaryCsvLine('2', 'De-registration of CM-handles', 'CM-handles/second', 'cmhandles_deleted_per_second', data, options), - makeSummaryCsvLine('3', 'CM-handle ID search with Module filter', 'milliseconds', 'http_req_duration{scenario:id_search_module}', data, options), - makeSummaryCsvLine('4', 'CM-handle search with Module filter', 'milliseconds', 'http_req_duration{scenario:cm_search_module}', data, options), + makeSummaryCsvLine('3', 'CM-handle ID search with Module and Property filter', 'milliseconds', 'id_search_duration', data, options), + makeSummaryCsvLine('4', 'CM-handle search with Module and Property filter', 'milliseconds', 'cm_search_duration', data, options), makeSummaryCsvLine('5a', 'NCMP overhead for Synchronous single CM-handle pass-through read', 'milliseconds', 'ncmp_overhead_passthrough_read', data, options), makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_read_alt_id', data, options), - makeSummaryCsvLine('6', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', data, options), + makeSummaryCsvLine('6a', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', data, options), + makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_write_alt_id', data, options), makeSummaryCsvLine('7', 'Data operations batch read', 'events/second', 'data_operations_batch_read_cmhandles_per_second', data, options), ]; return summaryCsvLines.join('\n') + '\n'; diff --git a/k6-tests/ncmp/ncmp-kpi.js b/k6-tests/ncmp/ncmp-kpi.js index f4a44dba68..1d084f21e3 100644 --- a/k6-tests/ncmp/ncmp-kpi.js +++ b/k6-tests/ncmp/ncmp-kpi.js @@ -19,23 +19,26 @@ */ import { check } from 'k6'; -import { Gauge, Trend } from 'k6/metrics'; +import { Trend } from 'k6/metrics'; import { Reader } from 'k6/x/kafka'; import { TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, - makeCustomSummaryReport, recordTimeInSeconds, makeBatchOfCmHandleIds, DATA_OPERATION_READ_BATCH_SIZE, - TOPIC_DATA_OPERATIONS_BATCH_READ, KAFKA_BOOTSTRAP_SERVERS + makeCustomSummaryReport, makeBatchOfCmHandleIds, DATA_OPERATION_READ_BATCH_SIZE, + TOPIC_DATA_OPERATIONS_BATCH_READ, KAFKA_BOOTSTRAP_SERVERS, REGISTRATION_BATCH_SIZE } from './common/utils.js'; -import { registerAllCmHandles, deregisterAllCmHandles } from './common/cmhandle-crud.js'; +import { createCmHandles, deleteCmHandles, waitForAllCmHandlesToBeReady } from './common/cmhandle-crud.js'; import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js'; import { passthroughRead, passthroughReadWithAltId, passthroughWrite, batchRead } from './common/passthrough-crud.js'; -let cmHandlesCreatedPerSecondGauge = new Gauge('cmhandles_created_per_second'); -let cmHandlesDeletedPerSecondGauge = new Gauge('cmhandles_deleted_per_second'); +let cmHandlesCreatedPerSecondTrend = new Trend('cmhandles_created_per_second', false); +let cmHandlesDeletedPerSecondTrend = new Trend('cmhandles_deleted_per_second', false); let passthroughReadNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_read', true); let passthroughReadNcmpOverheadTrendWithAlternateId = new Trend('ncmp_overhead_passthrough_read_alt_id', true); let passthroughWriteNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_write', true); -let dataOperationsBatchReadCmHandlePerSecondTrend = new Trend('data_operations_batch_read_cmhandles_per_second'); +let passthroughWriteNcmpOverheadTrendWithAlternateId = new Trend('ncmp_overhead_passthrough_write_alt_id', true); +let idSearchDurationTrend = new Trend('id_search_duration', true); +let cmSearchDurationTrend = new Trend('cm_search_duration', true); +let dataOperationsBatchReadCmHandlePerSecondTrend = new Trend('data_operations_batch_read_cmhandles_per_second', false); const reader = new Reader({ brokers: KAFKA_BOOTSTRAP_SERVERS, @@ -45,37 +48,43 @@ const reader = new Reader({ const DURATION = '15m'; export const options = { - setupTimeout: '6m', + setupTimeout: '8m', teardownTimeout: '6m', scenarios: { passthrough_read: { executor: 'constant-vus', - exec: 'passthrough_read', - vus: 9, + exec: 'executePassthroughReadScenario', + vus: 4, duration: DURATION, }, passthrough_read_alt_id: { executor: 'constant-vus', - exec: 'passthrough_read_alt_id', - vus: 1, + exec: 'executePassthroughReadAltIdScenario', + vus: 4, duration: DURATION, }, passthrough_write: { executor: 'constant-vus', - exec: 'passthrough_write', - vus: 10, + exec: 'executePassthroughWriteScenario', + vus: 4, duration: DURATION, }, - id_search_module: { + passthrough_write_alt_id: { executor: 'constant-vus', - exec: 'id_search_module', - vus: 3, + exec: 'executePassthroughWriteAltIdScenario', + vus: 4, duration: DURATION, }, - cm_search_module: { + cm_handle_id_search: { executor: 'constant-vus', - exec: 'cm_search_module', - vus: 3, + exec: 'executeCmHandleIdSearchScenario', + vus: 5, + duration: DURATION, + }, + cm_handle_search: { + executor: 'constant-vus', + exec: 'executeCmHandleSearchScenario', + vus: 5, duration: DURATION, }, data_operation_send_async_http_request: { @@ -96,69 +105,108 @@ export const options = { } }, thresholds: { - 'cmhandles_created_per_second': ['value >= 22'], - 'cmhandles_deleted_per_second': ['value >= 22'], - 'ncmp_overhead_passthrough_read': ['avg <= 100'], - 'ncmp_overhead_passthrough_read_alt_id': ['avg <= 100'], - 'ncmp_overhead_passthrough_write': ['avg <= 100'], - 'http_req_duration{scenario:id_search_module}': ['avg <= 625'], - 'http_req_duration{scenario:cm_search_module}': ['avg <= 13000'], - 'http_req_failed{scenario:id_search_module}': ['rate == 0'], - 'http_req_failed{scenario:cm_search_module}': ['rate == 0'], - 'http_req_failed{scenario:passthrough_read}': ['rate == 0'], - 'http_req_failed{scenario:passthrough_write}': ['rate == 0'], - 'http_req_failed{scenario:data_operation_send_async_http_request}': ['rate == 0'], - 'kafka_reader_error_count{scenario:data_operation_consume_kafka_responses}': ['count == 0'], + 'http_req_failed': ['rate == 0'], + 'cmhandles_created_per_second': ['avg >= 22'], + 'cmhandles_deleted_per_second': ['avg >= 22'], + 'ncmp_overhead_passthrough_read': ['avg <= 40'], + 'ncmp_overhead_passthrough_write': ['avg <= 40'], + 'ncmp_overhead_passthrough_read_alt_id': ['avg <= 40'], + 'ncmp_overhead_passthrough_write_alt_id': ['avg <= 40'], + 'id_search_duration': ['avg <= 2000'], + 'cm_search_duration': ['avg <= 15000'], 'data_operations_batch_read_cmhandles_per_second': ['avg >= 150'], }, }; export function setup() { - const totalRegistrationTimeInSeconds = recordTimeInSeconds(registerAllCmHandles); - cmHandlesCreatedPerSecondGauge.add(TOTAL_CM_HANDLES / totalRegistrationTimeInSeconds); + const startTimeInMillis = Date.now(); + + const TOTAL_BATCHES = Math.ceil(TOTAL_CM_HANDLES / REGISTRATION_BATCH_SIZE); + for (let batchNumber = 0; batchNumber < TOTAL_BATCHES; batchNumber++) { + const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(REGISTRATION_BATCH_SIZE, batchNumber); + const response = createCmHandles(nextBatchOfCmHandleIds); + check(response, { 'create CM-handles status equals 200': (r) => r.status === 200 }); + } + + waitForAllCmHandlesToBeReady(); + + const endTimeInMillis = Date.now(); + const totalRegistrationTimeInSeconds = (endTimeInMillis - startTimeInMillis) / 1000.0; + + cmHandlesCreatedPerSecondTrend.add(TOTAL_CM_HANDLES / totalRegistrationTimeInSeconds); } export function teardown() { - const totalDeregistrationTimeInSeconds = recordTimeInSeconds(deregisterAllCmHandles); - cmHandlesDeletedPerSecondGauge.add(TOTAL_CM_HANDLES / totalDeregistrationTimeInSeconds); + const startTimeInMillis = Date.now(); + + let DEREGISTERED_CM_HANDLES = 0 + const TOTAL_BATCHES = Math.ceil(TOTAL_CM_HANDLES / REGISTRATION_BATCH_SIZE); + for (let batchNumber = 0; batchNumber < TOTAL_BATCHES; batchNumber++) { + const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(REGISTRATION_BATCH_SIZE, batchNumber); + const response = deleteCmHandles(nextBatchOfCmHandleIds); + if (response.error_code === 0) { + DEREGISTERED_CM_HANDLES += REGISTRATION_BATCH_SIZE + } + check(response, { 'delete CM-handles status equals 200': (r) => r.status === 200 }); + } + + const endTimeInMillis = Date.now(); + const totalDeregistrationTimeInSeconds = (endTimeInMillis - startTimeInMillis) / 1000.0; + + cmHandlesDeletedPerSecondTrend.add(DEREGISTERED_CM_HANDLES / totalDeregistrationTimeInSeconds); +} + +export function executePassthroughReadScenario() { + const response = passthroughRead(false); + if (check(response, { 'passthrough read status equals 200': (r) => r.status === 200 })) { + const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS; + passthroughReadNcmpOverheadTrend.add(overhead); + } } -export function passthrough_read() { - const response = passthroughRead(); - check(response, { 'passthrough read status equals 200': (r) => r.status === 200 }); - const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS; - passthroughReadNcmpOverheadTrend.add(overhead); +export function executePassthroughReadAltIdScenario() { + const response = passthroughRead(true); + if (check(response, { 'passthrough read with alternate Id status equals 200': (r) => r.status === 200 })) { + const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS; + passthroughReadNcmpOverheadTrendWithAlternateId.add(overhead); + } } -export function passthrough_read_alt_id() { - const response = passthroughReadWithAltId(); - check(response, { 'passthrough read with alternate Id status equals 200': (r) => r.status === 200 }); - const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS; - passthroughReadNcmpOverheadTrendWithAlternateId.add(overhead); +export function executePassthroughWriteScenario() { + const response = passthroughWrite(false); + if (check(response, { 'passthrough write status equals 201': (r) => r.status === 201 })) { + const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS; + passthroughWriteNcmpOverheadTrend.add(overhead); + } } -export function passthrough_write() { - const response = passthroughWrite(); - check(response, { 'passthrough write status equals 201': (r) => r.status === 201 }); - const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS; - passthroughWriteNcmpOverheadTrend.add(overhead); +export function executePassthroughWriteAltIdScenario() { + const response = passthroughWrite(true); + if (check(response, { 'passthrough write with alternate Id status equals 201': (r) => r.status === 201 })) { + const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS; + passthroughWriteNcmpOverheadTrendWithAlternateId.add(overhead); + } } -export function id_search_module() { - const response = executeCmHandleIdSearch('module'); - check(response, { 'module ID search status equals 200': (r) => r.status === 200 }); - check(JSON.parse(response.body), { 'module ID search returned expected CM-handles': (arr) => arr.length === TOTAL_CM_HANDLES }); +export function executeCmHandleIdSearchScenario() { + const response = executeCmHandleIdSearch('module-and-properties'); + if (check(response, { 'CM handle ID search status equals 200': (r) => r.status === 200 }) + && check(response, { 'CM handle ID search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + idSearchDurationTrend.add(response.timings.duration); + } } -export function cm_search_module() { - const response = executeCmHandleSearch('module'); - check(response, { 'module search status equals 200': (r) => r.status === 200 }); - check(JSON.parse(response.body), { 'module search returned expected CM-handles': (arr) => arr.length === TOTAL_CM_HANDLES }); +export function executeCmHandleSearchScenario() { + const response = executeCmHandleSearch('module-and-properties'); + if (check(response, { 'CM handle search status equals 200': (r) => r.status === 200 }) + && check(response, { 'CM handle search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + cmSearchDurationTrend.add(response.timings.duration); + } } export function data_operation_send_async_http_request() { - const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(DATA_OPERATION_READ_BATCH_SIZE,1); - const response = batchRead(nextBatchOfCmHandleIds) + const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(DATA_OPERATION_READ_BATCH_SIZE, 0); + const response = batchRead(nextBatchOfCmHandleIds); check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 }); } diff --git a/k6-tests/ncmp/run-all-tests.sh b/k6-tests/ncmp/run-all-tests.sh index 2db32ecd76..1fa661a472 100755 --- a/k6-tests/ncmp/run-all-tests.sh +++ b/k6-tests/ncmp/run-all-tests.sh @@ -19,7 +19,9 @@ pushd "$(dirname "$0")" >/dev/null || exit 1 number_of_failures=0 echo "Running K6 performance tests..." -k6 --quiet run ncmp-kpi.js > summary.csv || ((number_of_failures++)) + +# Redirecting stderr to /dev/null to prevent large log files +k6 --quiet run ncmp-kpi.js > summary.csv 2>/dev/null || ((number_of_failures++)) if [ -f summary.csv ]; then diff --git a/k6-tests/run-k6-tests.sh b/k6-tests/run-k6-tests.sh index 9b8747b1ff..b1ad38911a 100755 --- a/k6-tests/run-k6-tests.sh +++ b/k6-tests/run-k6-tests.sh @@ -31,6 +31,10 @@ trap on_exit EXIT pushd "$(dirname "$0")" || exit 1 +# Install needed dependencies. +source install-deps.sh + +# Run k6 test suite. ./setup.sh ./ncmp/run-all-tests.sh NCMP_RESULT=$? diff --git a/k6-tests/teardown.sh b/k6-tests/teardown.sh index 1b4d721a23..7693dc03a4 100755 --- a/k6-tests/teardown.sh +++ b/k6-tests/teardown.sh @@ -19,4 +19,10 @@ echo '================================== docker info ==========================' docker ps -a echo 'Stopping, Removing containers and volumes...' -docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub down --volumes +docker_compose_cmd="docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub down --volumes" +# Set an environment variable CLEAN_DOCKER_IMAGES=1 to also remove docker images when done (used on jenkins job) +if [ "${CLEAN_DOCKER_IMAGES:-0}" -eq 1 ]; then + $docker_compose_cmd --rmi all +else + $docker_compose_cmd +fi |