diff options
-rw-r--r-- | cps-dependencies/pom.xml | 4 | ||||
-rw-r--r-- | cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java | 28 | ||||
-rw-r--r-- | integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/YangModulesSpec.groovy | 101 | ||||
-rw-r--r-- | k6-tests/README.md | 6 | ||||
-rw-r--r-- | k6-tests/ncmp/common/utils.js | 12 | ||||
-rw-r--r-- | k6-tests/ncmp/config/endurance.json | 13 | ||||
-rw-r--r-- | k6-tests/ncmp/config/kpi.json | 14 | ||||
-rw-r--r-- | k6-tests/ncmp/ncmp-kpi.js | 11 | ||||
-rwxr-xr-x | k6-tests/ncmp/run-all-tests.sh | 31 | ||||
-rwxr-xr-x | k6-tests/run-k6-tests.sh | 13 | ||||
-rwxr-xr-x | k6-tests/setup.sh | 14 | ||||
-rwxr-xr-x | k6-tests/teardown.sh | 10 |
12 files changed, 210 insertions, 47 deletions
diff --git a/cps-dependencies/pom.xml b/cps-dependencies/pom.xml index ad1828ec5c..6c034c2d7a 100644 --- a/cps-dependencies/pom.xml +++ b/cps-dependencies/pom.xml @@ -219,7 +219,7 @@ <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> - <version>3.11</version> + <version>3.17.0</version> </dependency> <dependency> <groupId>org.apache.maven.plugins</groupId> @@ -249,7 +249,7 @@ <dependency> <groupId>org.liquibase</groupId> <artifactId>liquibase-core</artifactId> - <version>4.29.0</version> + <version>4.30.0</version> </dependency> <dependency> <groupId>org.mapstruct</groupId> diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java index f0547d3d24..ec440f4905 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java @@ -27,8 +27,10 @@ package org.onap.cps.ncmp.api.inventory; import static org.onap.cps.ncmp.impl.inventory.CmHandleQueryParametersValidator.validateCmHandleQueryParameters; import java.util.Collection; +import java.util.Collections; import java.util.Map; import lombok.RequiredArgsConstructor; +import org.onap.cps.ncmp.api.exceptions.CmHandleNotFoundException; import org.onap.cps.ncmp.api.inventory.models.CmHandleQueryApiParameters; import org.onap.cps.ncmp.api.inventory.models.CmHandleQueryServiceParameters; import org.onap.cps.ncmp.api.inventory.models.CompositeState; @@ -111,8 +113,12 @@ public class NetworkCmProxyInventoryFacade { * @return a collection of modules names and revisions */ public Collection<ModuleReference> getYangResourcesModuleReferences(final String cmHandleReference) { - final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); - return inventoryPersistence.getYangResourcesModuleReferences(cmHandleId); + try { + final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); + return inventoryPersistence.getYangResourcesModuleReferences(cmHandleId); + } catch (final CmHandleNotFoundException cmHandleNotFoundException) { + return Collections.emptyList(); + } } /** @@ -122,8 +128,12 @@ public class NetworkCmProxyInventoryFacade { * @return a collection of module definition (moduleName, revision and yang resource content) */ public Collection<ModuleDefinition> getModuleDefinitionsByCmHandleReference(final String cmHandleReference) { - final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); - return inventoryPersistence.getModuleDefinitionsByCmHandleId(cmHandleId); + try { + final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); + return inventoryPersistence.getModuleDefinitionsByCmHandleId(cmHandleId); + } catch (final CmHandleNotFoundException cmHandleNotFoundException) { + return Collections.emptyList(); + } } /** @@ -137,8 +147,12 @@ public class NetworkCmProxyInventoryFacade { public Collection<ModuleDefinition> getModuleDefinitionsByCmHandleAndModule(final String cmHandleReference, final String moduleName, final String moduleRevision) { - final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); - return inventoryPersistence.getModuleDefinitionsByCmHandleAndModule(cmHandleId, moduleName, moduleRevision); + try { + final String cmHandleId = alternateIdMatcher.getCmHandleId(cmHandleReference); + return inventoryPersistence.getModuleDefinitionsByCmHandleAndModule(cmHandleId, moduleName, moduleRevision); + } catch (final CmHandleNotFoundException cmHandleNotFoundException) { + return Collections.emptyList(); + } } /** @@ -227,4 +241,4 @@ public class NetworkCmProxyInventoryFacade { .getEffectiveTrustLevel(ncmpServiceCmHandle.getCmHandleId())); } -}
\ No newline at end of file +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/YangModulesSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/YangModulesSpec.groovy new file mode 100644 index 0000000000..4492e3d183 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/YangModulesSpec.groovy @@ -0,0 +1,101 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import org.onap.cps.integration.base.CpsIntegrationSpecBase + +import static org.hamcrest.Matchers.containsInAnyOrder +import static org.hamcrest.Matchers.emptyString +import static org.hamcrest.Matchers.everyItem +import static org.hamcrest.Matchers.equalTo +import static org.hamcrest.Matchers.hasSize +import static org.hamcrest.Matchers.is +import static org.hamcrest.Matchers.not +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status + +class YangModulesSpec extends CpsIntegrationSpecBase { + + def setup() { + dmiDispatcher1.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] + dmiDispatcher1.moduleNamesPerCmHandleId['ch-2'] = ['M1', 'M3'] + registerCmHandle(DMI1_URL, 'ch-1', NO_MODULE_SET_TAG, 'alt-1') + registerCmHandle(DMI1_URL, 'ch-2', NO_MODULE_SET_TAG, 'alt-2') + // Note DMI dispatcher is not configured to return modules for this handle, so module sync will fail + registerCmHandleWithoutWaitForReady(DMI1_URL, 'not-ready-id', NO_MODULE_SET_TAG, NO_ALTERNATE_ID) + } + + def cleanup() { + deregisterCmHandles(DMI1_URL, ['ch-1', 'ch-2', 'not-ready-id']) + } + + def 'Get yang module references returns expected modules with #scenario.'() { + expect: 'get module references API to return expected modules' + mvc.perform(get("/ncmp/v1/ch/${cmHandleReference}/modules")) + .andExpect(status().is2xxSuccessful()) + .andExpect(jsonPath('$', hasSize(expectedModuleNames.size()))) + .andExpect(jsonPath('$[*].moduleName', containsInAnyOrder(expectedModuleNames.toArray()))) + .andExpect(jsonPath('$[*].revision', everyItem(equalTo('2024-01-01')))) + where: 'following scenarios are applied' + scenario | cmHandleReference || expectedModuleNames + 'cm-handle id' | 'ch-1' || ['M1', 'M2'] + 'alternate id' | 'alt-2' || ['M1', 'M3'] + 'not ready CM handle' | 'not-ready-id' || [] + 'non-existing CM handle' | 'non-existing' || [] + } + + def 'Get yang module definitions returns expected modules with #scenario.'() { + expect: 'get module definitions API to return expected module definitions' + mvc.perform(get("/ncmp/v1/ch/${cmHandleReference}/modules/definitions")) + .andExpect(status().is2xxSuccessful()) + .andExpect(jsonPath('$', hasSize(expectedModuleNames.size()))) + .andExpect(jsonPath('$[*].moduleName', containsInAnyOrder(expectedModuleNames.toArray()))) + .andExpect(jsonPath('$[*].revision', everyItem(equalTo('2024-01-01')))) + .andExpect(jsonPath('$[*].content', not(is(emptyString())))) + where: 'following scenarios are applied' + scenario | cmHandleReference || expectedModuleNames + 'cm-handle id' | 'ch-1' || ['M1', 'M2'] + 'alternate id' | 'alt-2' || ['M1', 'M3'] + 'not ready CM handle' | 'not-ready-id' || [] + 'non-existing CM handle' | 'non-existing' || [] + } + + def 'Get yang module definition for specific module with #scenario.'() { + expect: 'get module definition API to return definition of requested module name and revision' + mvc.perform(get("/ncmp/v1/ch/${cmHandleReference}/modules/definitions") + .queryParam('module-name', requestedModuleName) + .queryParam('revision', '2024-01-01')) + .andExpect(status().is2xxSuccessful()) + .andExpect(jsonPath('$', hasSize(expectedModuleNames.size()))) + .andExpect(jsonPath('$[*].moduleName', containsInAnyOrder(expectedModuleNames.toArray()))) + .andExpect(jsonPath('$[*].revision', everyItem(equalTo('2024-01-01')))) + .andExpect(jsonPath('$[*].content', not(is(emptyString())))) + where: 'following scenarios are applied' + scenario | cmHandleReference | requestedModuleName || expectedModuleNames + 'cm-handle id' | 'ch-1' | 'M1' || ['M1'] + 'alternate id' | 'alt-2' | 'M1' || ['M1'] + 'non-existing module' | 'ch-1' | 'non-existing' || [] + 'not ready CM handle' | 'not-ready-id' | 'not-relevant' || [] + 'non-existing CM handle' | 'non-existing' | 'not-relevant' || [] + } + +} diff --git a/k6-tests/README.md b/k6-tests/README.md index 9a385e100a..f74c9d4f7b 100644 --- a/k6-tests/README.md +++ b/k6-tests/README.md @@ -7,9 +7,11 @@ k6 tests are written in JavaScript. Follow the instructions in the [build from source guide](https://github.com/mostafa/xk6-kafka) to get started. ## Running the k6 test suites -Simply run the main script. (The script assumes k6 and docker-compose have been installed). +These tests measure the system capabilities as per requirements. +There are two test profiles can be run with either: kpi or endurance. +Simply run the main script. (The script assumes k6 and the relevant docker-compose have been installed). ```shell -./run-k6-tests.sh +./run-k6-tests.sh kpi ``` ## Running k6 tests manually diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index a2467edf6f..45f6e96050 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -19,8 +19,14 @@ */ import http from 'k6/http'; -export const NCMP_BASE_URL = 'http://localhost:8883'; -export const DMI_PLUGIN_URL = 'http://ncmp-dmi-plugin-demo-and-csit-stub:8092'; + +const testConfig = JSON.parse(open(`../config/${__ENV.TEST_PROFILE}.json`)); +export const KAFKA_BOOTSTRAP_SERVERS = testConfig.hosts.kafkaBootstrapServer; +export const LEGACY_BATCH_TOPIC_NAME = testConfig.kafka.legacyBatchTopic; +export const DURATION = testConfig.timingConfig.testDuration; +export const LEGACY_BATCH_THROUGHPUT_TEST_START_TIME = testConfig.timingConfig.legacyBatchThroughputTestStartTime; +export const NCMP_BASE_URL = testConfig.hosts.ncmpBaseUrl; +export const DMI_PLUGIN_URL = testConfig.hosts.dmiStubUrl; export const TOTAL_CM_HANDLES = 20000; export const REGISTRATION_BATCH_SIZE = 100; export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml @@ -28,8 +34,6 @@ export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as export const CONTENT_TYPE_JSON_PARAM = {'Content-Type': 'application/json'}; export const LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE = 200; export const LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS = 100; -export const LEGACY_BATCH_TOPIC_NAME = 'legacy_batch_topic'; -export const KAFKA_BOOTSTRAP_SERVERS = ['localhost:9092']; export const MODULE_SET_TAGS = ['tagA', 'tagB', 'tagC', 'tagD', 'tagE']; diff --git a/k6-tests/ncmp/config/endurance.json b/k6-tests/ncmp/config/endurance.json new file mode 100644 index 0000000000..c9def6ce39 --- /dev/null +++ b/k6-tests/ncmp/config/endurance.json @@ -0,0 +1,13 @@ +{ + "hosts": { + "ncmpBaseUrl": "http://localhost:8884", + "dmiStubUrl": "http://ncmp-dmi-plugin-demo-and-csit-stub:8092", + "kafkaBootstrapServer": "localhost:9093" + }, + "timingConfig": { + "testDuration": "2h" + }, + "kafka": { + "legacyBatchTopic": "legacy_batch_topic" + } +} diff --git a/k6-tests/ncmp/config/kpi.json b/k6-tests/ncmp/config/kpi.json new file mode 100644 index 0000000000..ad79f92d4d --- /dev/null +++ b/k6-tests/ncmp/config/kpi.json @@ -0,0 +1,14 @@ +{ + "hosts": { + "ncmpBaseUrl": "http://localhost:8883", + "dmiStubUrl": "http://ncmp-dmi-plugin-demo-and-csit-stub:8092", + "kafkaBootstrapServer": "localhost:9092" + }, + "timingConfig": { + "testDuration": "15m", + "legacyBatchThroughputTestStartTime": "15m30s" + }, + "kafka": { + "legacyBatchTopic": "legacy_batch_topic" + } +}
\ No newline at end of file diff --git a/k6-tests/ncmp/ncmp-kpi.js b/k6-tests/ncmp/ncmp-kpi.js index e46c547c30..20fb1e86a9 100644 --- a/k6-tests/ncmp/ncmp-kpi.js +++ b/k6-tests/ncmp/ncmp-kpi.js @@ -24,8 +24,8 @@ import { Reader } from 'k6/x/kafka'; import { TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, makeCustomSummaryReport, makeBatchOfCmHandleIds, LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, - LEGACY_BATCH_TOPIC_NAME, KAFKA_BOOTSTRAP_SERVERS, REGISTRATION_BATCH_SIZE, - LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS + REGISTRATION_BATCH_SIZE, LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS, DURATION, + LEGACY_BATCH_THROUGHPUT_TEST_START_TIME, KAFKA_BOOTSTRAP_SERVERS, LEGACY_BATCH_TOPIC_NAME } from './common/utils.js'; import { createCmHandles, deleteCmHandles, waitForAllCmHandlesToBeReady } from './common/cmhandle-crud.js'; import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js'; @@ -49,14 +49,11 @@ let cmSearchCpsPathDurationTrend = new Trend('cm_search_cpspath_duration', true) let cmSearchTrustLevelDurationTrend = new Trend('cm_search_trustlevel_duration', true); let legacyBatchReadCmHandlesPerSecondTrend = new Trend('legacy_batch_read_cmhandles_per_second', false); -const legacyBatchEventReader = new Reader({ - brokers: KAFKA_BOOTSTRAP_SERVERS, +export const legacyBatchEventReader = new Reader({ + brokers: [KAFKA_BOOTSTRAP_SERVERS], topic: LEGACY_BATCH_TOPIC_NAME, }); -const DURATION = '15m'; -const LEGACY_BATCH_THROUGHPUT_TEST_START_TIME = '15m30s'; - export const options = { setupTimeout: '20m', teardownTimeout: '20m', diff --git a/k6-tests/ncmp/run-all-tests.sh b/k6-tests/ncmp/run-all-tests.sh index 1fa661a472..3457903c01 100755 --- a/k6-tests/ncmp/run-all-tests.sh +++ b/k6-tests/ncmp/run-all-tests.sh @@ -18,31 +18,36 @@ pushd "$(dirname "$0")" >/dev/null || exit 1 number_of_failures=0 -echo "Running K6 performance tests..." +testProfile=$1 +summaryFile="${testProfile}Summary.csv" -# Redirecting stderr to /dev/null to prevent large log files -k6 --quiet run ncmp-kpi.js > summary.csv 2>/dev/null || ((number_of_failures++)) +echo "Running $testProfile performance tests..." +k6 run ncmp-kpi.js --quiet -e TEST_PROFILE="$testProfile" > "$summaryFile" 2>/dev/null || ((number_of_failures++)) -if [ -f summary.csv ]; then +if [ -f "$summaryFile" ]; then # Output raw CSV for plotting job - echo '-- BEGIN CSV REPORT' - cat summary.csv - echo '-- END CSV REPORT' + echo "-- BEGIN CSV REPORT" + cat "$summaryFile" + echo "-- END CSV REPORT" echo # Output human-readable report - echo '####################################################################################################' - echo '## K 6 P E R F O R M A N C E T E S T R E S U L T S ##' - echo '####################################################################################################' - column -t -s, summary.csv + echo "####################################################################################################" + if [ "$testProfile" = "kpi" ]; then + echo "## K 6 K P I P E R F O R M A N C E T E S T R E S U L T S ##" + else + echo "## K 6 E N D U R A N C E P E R F O R M A N C E T E S T R E S U L T S ##" + fi + echo "####################################################################################################" + column -t -s, "$summaryFile" echo # Clean up - rm -f summary.csv + rm -f "$summaryFile" else - echo "Error: Failed to generate summary.csv" >&2 + echo "Error: Failed to generate $summaryFile" >&2 ((number_of_failures++)) fi diff --git a/k6-tests/run-k6-tests.sh b/k6-tests/run-k6-tests.sh index b1ad38911a..8c4048ba8c 100755 --- a/k6-tests/run-k6-tests.sh +++ b/k6-tests/run-k6-tests.sh @@ -20,9 +20,12 @@ set -o nounset # Disallow expansion of unset variables set -o pipefail # Use last non-zero exit code in a pipeline #set -o xtrace # Uncomment for debugging +# default is empty string, which means performance tests +testProfile=${1:-kpi} + on_exit() { rc=$? - ./teardown.sh + ./teardown.sh "$testProfile" popd echo "TEST FAILURES: $rc" exit $rc @@ -34,10 +37,12 @@ pushd "$(dirname "$0")" || exit 1 # Install needed dependencies. source install-deps.sh +echo "Test profile provided: $testProfile" + # Run k6 test suite. -./setup.sh -./ncmp/run-all-tests.sh +./setup.sh "$testProfile" +./ncmp/run-all-tests.sh "$testProfile" NCMP_RESULT=$? # Note that the final steps are done in on_exit function after this exit! -exit $NCMP_RESULT +exit $NCMP_RESULT
\ No newline at end of file diff --git a/k6-tests/setup.sh b/k6-tests/setup.sh index a4508e180d..c794c64dd0 100755 --- a/k6-tests/setup.sh +++ b/k6-tests/setup.sh @@ -15,14 +15,20 @@ # limitations under the License. # -docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub up --quiet-pull -d +testProfile=$1 +echo "Spinning off the CPS and NCMP containers for $testProfile testing..." + +if [[ "$testProfile" == "endurance" ]]; then + docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name "$testProfile" --env-file ../docker-compose/config/endurance.env up --quiet-pull -d + CONTAINER_IDS=$(docker ps --filter "name=endurance-cps-and-ncmp" --format "{{.ID}}") +else + docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name "$testProfile" up --quiet-pull -d + CONTAINER_IDS=$(docker ps --filter "name=kpi-cps-and-ncmp" --format "{{.ID}}") +fi echo "Waiting for CPS to start..." READY_MESSAGE="Inventory Model updated successfully" -# Get the container IDs of the cps-and-ncmp replicas -CONTAINER_IDS=$(docker ps --filter "name=cps-and-ncmp" --format "{{.ID}}") - # Check the logs for each container for CONTAINER_ID in $CONTAINER_IDS; do echo "Checking logs for container: $CONTAINER_ID" diff --git a/k6-tests/teardown.sh b/k6-tests/teardown.sh index 7693dc03a4..c3233919dc 100755 --- a/k6-tests/teardown.sh +++ b/k6-tests/teardown.sh @@ -18,11 +18,13 @@ echo '================================== docker info ==========================' docker ps -a -echo 'Stopping, Removing containers and volumes...' -docker_compose_cmd="docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub down --volumes" +testProfile=$1 +docker_compose_shutdown_cmd="docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name $testProfile down --volumes" + # Set an environment variable CLEAN_DOCKER_IMAGES=1 to also remove docker images when done (used on jenkins job) +echo "Stopping, Removing containers and volumes for $testProfile tests..." if [ "${CLEAN_DOCKER_IMAGES:-0}" -eq 1 ]; then - $docker_compose_cmd --rmi all + $docker_compose_shutdown_cmd --rmi all else - $docker_compose_cmd + $docker_compose_shutdown_cmd fi |