diff options
379 files changed, 16983 insertions, 8190 deletions
diff --git a/.gitignore b/.gitignore index a721cb4489..f7e2bf7aa8 100755 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ cps-ncmp-rest-stub/dependency-reduced-pom.xml cps-application/archunit_store cps-ri/src/main/resources/changelog/db/changes/data/dmi/generated-csv/generated_yang_resource_* +checkstyle/src/main/__pycache__/ +docs/venv/ +docs/__pycache__/ target/ log/ diff --git a/checkstyle/pom.xml b/checkstyle/pom.xml index d6fbcd98d5..b54f2290c4 100644 --- a/checkstyle/pom.xml +++ b/checkstyle/pom.xml @@ -2,7 +2,7 @@ <!-- ============LICENSE_START======================================================= Copyright (C) 2020 Pantheon.tech - Modifications Copyright (C) 2023 Nordix Foundation + Modifications Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -26,7 +26,7 @@ <modelVersion>4.0.0</modelVersion> <groupId>org.onap.cps</groupId> <artifactId>checkstyle</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <profiles> <profile> @@ -53,13 +53,6 @@ </profile> </profiles> - <properties> - <onap.nexus.url>https://nexus.onap.org</onap.nexus.url> - <releaseNexusPath>/content/repositories/releases/</releaseNexusPath> - <sonar.skip>true</sonar.skip> - <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath> - </properties> - <build> <pluginManagement> <plugins> @@ -101,17 +94,4 @@ </plugin> </plugins> </build> - - <distributionManagement> - <repository> - <id>ecomp-releases</id> - <name>ECOMP Release Repository</name> - <url>${onap.nexus.url}${releaseNexusPath}</url> - </repository> - <snapshotRepository> - <id>ecomp-snapshots</id> - <name>ECOMP Snapshot Repository</name> - <url>${onap.nexus.url}${snapshotNexusPath}</url> - </snapshotRepository> - </distributionManagement> </project>
\ No newline at end of file diff --git a/checkstyle/src/main/resources/project-committers-config.csv b/checkstyle/src/main/resources/project-committers-config.csv index 85ee43bdab..134975f4d5 100644 --- a/checkstyle/src/main/resources/project-committers-config.csv +++ b/checkstyle/src/main/resources/project-committers-config.csv @@ -1,3 +1,3 @@ email,signature -@est.tech,Nordix Foundation +@est.tech,OpenInfra Foundation Europe @bell.ca,Bell Canada
\ No newline at end of file diff --git a/cps-application/pom.xml b/cps-application/pom.xml index 5ac2202e85..76a2da95d3 100644 --- a/cps-application/pom.xml +++ b/cps-application/pom.xml @@ -3,7 +3,7 @@ ============LICENSE_START======================================================= Copyright (c) 2021 Pantheon.tech. Modifications Copyright (C) 2021 Bell Canada. - Modifications Copyright (C) 2021-2024 Nordix Foundation + Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. Modifications Copyright (C) 2022 Deutsche Telekom AG ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,7 +28,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> @@ -37,7 +37,6 @@ <properties> <app>org.onap.cps.Application</app> <maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format> - <minimum-coverage>0.68</minimum-coverage> <base.image>${docker.pull.registry}/onap/integration-java17:12.0.0</base.image> <image.tag>${project.version}-${maven.build.timestamp}</image.tag> </properties> @@ -78,6 +77,10 @@ <artifactId>micrometer-tracing-bridge-otel</artifactId> </dependency> <dependency> + <groupId>io.github.mweirauch</groupId> + <artifactId>micrometer-jvm-extras</artifactId> + </dependency> + <dependency> <groupId>com.fasterxml.jackson.dataformat</groupId> <artifactId>jackson-dataformat-xml</artifactId> </dependency> diff --git a/cps-application/src/main/java/org/onap/cps/Application.java b/cps-application/src/main/java/org/onap/cps/Application.java index 053139fcc8..d1c99bcc31 100644 --- a/cps-application/src/main/java/org/onap/cps/Application.java +++ b/cps-application/src/main/java/org/onap/cps/Application.java @@ -1,6 +1,6 @@ /*
* ============LICENSE_START=======================================================
- * Copyright (C) 2020 Nordix Foundation.
+ * Copyright (C) 2020-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,14 +20,29 @@ package org.onap.cps;
+import lombok.extern.slf4j.Slf4j;
+import org.onap.cps.startup.InstanceStartupDelayManager;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.retry.annotation.EnableRetry;
-@EnableRetry
@SpringBootApplication
+@Slf4j
public class Application {
+
+ static InstanceStartupDelayManager instanceStartupDelayManager = new InstanceStartupDelayManager();
+
+ /**
+ * The main method which serves as the entry point to the Spring Boot application.
+ * It first applies a hostname-based startup delay to avoid potential race conditions
+ * during schema migration in distributed environments. After applying the delay,
+ * it initializes the Spring Application context and logs the application startup status.
+ *
+ * @param args Command-line arguments passed to the application (not used in this implementation).
+ */
public static void main(final String[] args) {
+ instanceStartupDelayManager.applyHostnameBasedStartupDelay();
+ log.info("Initializing Spring Application context...");
SpringApplication.run(Application.class, args);
+ log.info("🚀 APPLICATION STARTED");
}
}
diff --git a/cps-application/src/main/java/org/onap/cps/config/MicroMeterConfig.java b/cps-application/src/main/java/org/onap/cps/config/MicroMeterConfig.java index de981164f5..6bf3f87d17 100644 --- a/cps-application/src/main/java/org/onap/cps/config/MicroMeterConfig.java +++ b/cps-application/src/main/java/org/onap/cps/config/MicroMeterConfig.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2025 Nordix Foundation. + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,11 +20,13 @@ package org.onap.cps.config; -import com.hazelcast.map.IMap; +import io.github.mweirauch.micrometer.jvm.extras.ProcessMemoryMetrics; +import io.github.mweirauch.micrometer.jvm.extras.ProcessThreadMetrics; import io.micrometer.core.aop.TimedAspect; -import io.micrometer.core.instrument.Gauge; import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.binder.MeterBinder; import lombok.RequiredArgsConstructor; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -32,73 +34,21 @@ import org.springframework.context.annotation.Configuration; @RequiredArgsConstructor public class MicroMeterConfig { - private static final String STATE_TAG = "state"; - private static final String CM_HANDLE_STATE_GAUGE = "cmHandlesByState"; - final IMap<String, Integer> cmHandlesByState; - @Bean public TimedAspect timedAspect(final MeterRegistry meterRegistry) { return new TimedAspect(meterRegistry); } - /** - * Register gauge metric for cm handles with state 'advised'. - * - * @param meterRegistry meter registry - * @return cm handle state gauge - */ - @Bean - public Gauge advisedCmHandles(final MeterRegistry meterRegistry) { - return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, - value -> cmHandlesByState.get("advisedCmHandlesCount")) - .tag(STATE_TAG, "ADVISED") - .description("Current number of cmhandles in advised state") - .register(meterRegistry); - } - - /** - * Register gauge metric for cm handles with state 'ready'. - * - * @param meterRegistry meter registry - * @return cm handle state gauge - */ - @Bean - public Gauge readyCmHandles(final MeterRegistry meterRegistry) { - return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, - value -> cmHandlesByState.get("readyCmHandlesCount")) - .tag(STATE_TAG, "READY") - .description("Current number of cmhandles in ready state") - .register(meterRegistry); - } - - /** - * Register gauge metric for cm handles with state 'locked'. - * - * @param meterRegistry meter registry - * @return cm handle state gauge - */ @Bean - public Gauge lockedCmHandles(final MeterRegistry meterRegistry) { - return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, - value -> cmHandlesByState.get("lockedCmHandlesCount")) - .tag(STATE_TAG, "LOCKED") - .description("Current number of cmhandles in locked state") - .register(meterRegistry); + @ConditionalOnProperty("cps.monitoring.micrometer-jvm-extras") + public MeterBinder processMemoryMetrics() { + return new ProcessMemoryMetrics(); } - /** - * Register gauge metric for cm handles with state 'deleting'. - * - * @param meterRegistry meter registry - * @return cm handle state gauge - */ @Bean - public Gauge deletingCmHandles(final MeterRegistry meterRegistry) { - return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, - value -> cmHandlesByState.get("deletingCmHandlesCount")) - .tag(STATE_TAG, "DELETING") - .description("Current number of cmhandles in deleting state") - .register(meterRegistry); + @ConditionalOnProperty("cps.monitoring.micrometer-jvm-extras") + public MeterBinder processThreadMetrics() { + return new ProcessThreadMetrics(); } } diff --git a/cps-application/src/main/java/org/onap/cps/startup/InstanceStartupDelayManager.java b/cps-application/src/main/java/org/onap/cps/startup/InstanceStartupDelayManager.java new file mode 100644 index 0000000000..927c59f75d --- /dev/null +++ b/cps-application/src/main/java/org/onap/cps/startup/InstanceStartupDelayManager.java @@ -0,0 +1,63 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.startup; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class InstanceStartupDelayManager { + + /** + * Applies a consistent hash-based startup delay based on the host's name + * to avoid race conditions during schema migration. + * This method is useful in environments with multiple instances + * (e.g., Docker Compose, Kubernetes), where simultaneous Liquibase executions + * might result in conflicts. + * Delay logic: + * - A hash of the hostname is calculated. + * - The result is used to derive a delay up to 5000 milliseconds. + * - This provides a reasonably distributed delay across instances. + */ + public void applyHostnameBasedStartupDelay() { + try { + final String hostname = getHostName(); + final long startupDelayInMillis = Math.abs(hostname.hashCode() % 5_000L); + log.info("Startup delay applied for Hostname: {} | Delay: {} ms", hostname, startupDelayInMillis); + haveALittleSleepInMs(startupDelayInMillis); + } catch (final InterruptedException e) { + log.warn("Sleep interrupted, re-interrupting the thread"); + Thread.currentThread().interrupt(); + } catch (final Exception e) { + log.info("Exception during startup delay ignored. {}", e.getMessage()); + } + } + + protected String getHostName() throws UnknownHostException { + return InetAddress.getLocalHost().getHostName(); + } + + protected void haveALittleSleepInMs(final long timeInMs) throws InterruptedException { + TimeUnit.MILLISECONDS.sleep(timeInMs); + } +}
\ No newline at end of file diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml index 27a15b6b51..26cc9e034a 100644 --- a/cps-application/src/main/resources/application.yml +++ b/cps-application/src/main/resources/application.yml @@ -2,7 +2,7 @@ # Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021-2022 Bell Canada # Modifications Copyright (C) 2024 TechMahindra Ltd -# Modifications Copyright (C) 2021-2025 Nordix Foundation +# Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,7 +45,7 @@ spring: hibernate.jdbc.batch_size: 100 datasource: - url: jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb + url: jdbc:postgresql://${DB_HOST:localhost}:${DB_PORT:5432}/cpsdb username: ${DB_USERNAME} password: ${DB_PASSWORD} driverClassName: org.postgresql.Driver @@ -106,6 +106,7 @@ app: cm-subscription-dmi-out: ${CM_SUBSCRIPTION_DMI_OUT_TOPIC:dmi-ncmp-cm-avc-subscription} cm-subscription-ncmp-out: ${CM_SUBSCRIPTION_NCMP_OUT_TOPIC:subscription-response} cm-events-topic: ${NCMP_CM_EVENTS_TOPIC:cm-events} + inventory-events-topic: ncmp-inventory-events lcm: events: topic: ${LCM_EVENTS_TOPIC:ncmp-events} @@ -116,11 +117,9 @@ app: topic: ${DMI_DEVICE_HEARTBEAT_TOPIC:dmi-device-heartbeat} cps: data-updated: - change-event-notifications-enabled: ${CPS_CHANGE_EVENT_NOTIFICATIONS_ENABLED:true} + change-event-notifications-enabled: ${CPS_CHANGE_EVENT_NOTIFICATIONS_ENABLED:false} topic: ${CPS_CHANGE_EVENT_TOPIC:cps-data-updated-events} - - notification: enabled: true async: @@ -144,14 +143,9 @@ springdoc: - name: cps-ncmp-inventory url: /api-docs/cps-ncmp/openapi-inventory.yaml -security: - # comma-separated uri patterns which do not require authorization - permit-uri: /actuator/**,/swagger-ui.html,/swagger-ui/**,/swagger-resources/**,/api-docs/**,/v3/api-docs/** - auth: - username: ${CPS_USERNAME:cpsuser} - password: ${CPS_PASSWORD:cpsr0cks!} - cps: + monitoring: + micrometer-jvm-extras: false tracing: sampler: jaeger_remote: @@ -172,7 +166,7 @@ management: endpoints: web: exposure: - include: info,health,loggers,prometheus,metrics + include: info,health,loggers,prometheus,metrics,heapdump,threaddump endpoint: health: show-details: always @@ -235,8 +229,10 @@ ncmp: timers: advised-modules-sync: + initial-delay-ms: 40000 sleep-time-ms: 5000 cm-handle-data-sync: + initial-delay-ms: 40000 sleep-time-ms: 30000 subscription-forwarding: dmi-response-timeout-ms: 30000 @@ -245,10 +241,6 @@ ncmp: trust-level: dmi-availability-watchdog-ms: 30000 - modules-sync-watchdog: - async-executor: - parallelism-level: 10 - model-loader: maximum-attempt-count: 20 diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java b/cps-application/src/test/groovy/org/onap/cps/ApplicationSpec.groovy index 2875511c1e..3ad5fadfd3 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java +++ b/cps-application/src/test/groovy/org/onap/cps/ApplicationSpec.groovy @@ -1,13 +1,12 @@ -/*- +/* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,14 +17,18 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.ri.repository; +package org.onap.cps + -import java.util.Collection; -import java.util.List; -import org.onap.cps.api.model.ModuleReference; +import spock.lang.Specification -public interface YangResourceNativeRepository { +class ApplicationSpec extends Specification { - List<Integer> getResourceIdsByModuleReferences(Collection<ModuleReference> moduleReferences); + def 'Starting CPS application.'() { + when: 'start the application' + Application.main() + then: 'no exception is thrown' + noExceptionThrown() + } } diff --git a/cps-application/src/test/groovy/org/onap/cps/config/MicroMeterConfigSpec.groovy b/cps-application/src/test/groovy/org/onap/cps/config/MicroMeterConfigSpec.groovy index da3afc6f2c..29cb65cfbb 100644 --- a/cps-application/src/test/groovy/org/onap/cps/config/MicroMeterConfigSpec.groovy +++ b/cps-application/src/test/groovy/org/onap/cps/config/MicroMeterConfigSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2025 Nordix Foundation. + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,35 +20,24 @@ package org.onap.cps.config -import com.hazelcast.map.IMap import io.micrometer.core.instrument.simple.SimpleMeterRegistry import spock.lang.Specification class MicroMeterConfigSpec extends Specification { - def cmHandlesByState = Mock(IMap) - def objectUnderTest = new MicroMeterConfig(cmHandlesByState) + def objectUnderTest = new MicroMeterConfig() def simpleMeterRegistry = new SimpleMeterRegistry() def 'Creating a timed aspect.'() { - expect: ' a timed aspect can be created' + expect: 'a timed aspect can be created' assert objectUnderTest.timedAspect(simpleMeterRegistry) != null } - def 'Creating gauges for cm handle states.'() { - given: 'cache returns value for each state' - cmHandlesByState.get(_) >> 1 - when: 'gauges for each state are created' - objectUnderTest.advisedCmHandles(simpleMeterRegistry) - objectUnderTest.readyCmHandles(simpleMeterRegistry) - objectUnderTest.lockedCmHandles(simpleMeterRegistry) - objectUnderTest.deletingCmHandles(simpleMeterRegistry) - then: 'each state has the correct value when queried' - def states = ["ADVISED", "READY", "LOCKED", "DELETING"] - states.each { state -> - def gaugeValue = simpleMeterRegistry.get("cmHandlesByState").tag("state",state).gauge().value() - assert gaugeValue == 1 - } + def 'Creating JVM process metrics.'() { + expect: 'process memory metrics can be created' + assert objectUnderTest.processMemoryMetrics() != null + and: 'process thread metrics can be created' + assert objectUnderTest.processThreadMetrics() != null } } diff --git a/cps-application/src/test/groovy/org/onap/cps/rest/controller/ControllerSecuritySpec.groovy b/cps-application/src/test/groovy/org/onap/cps/rest/controller/ControllerSecuritySpec.groovy deleted file mode 100755 index b86f824888..0000000000 --- a/cps-application/src/test/groovy/org/onap/cps/rest/controller/ControllerSecuritySpec.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.rest.controller - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest -import org.springframework.http.HttpStatus -import org.springframework.test.web.servlet.MockMvc -import spock.lang.Ignore -import spock.lang.Specification - -@WebMvcTest(TestController) -class ControllerSecuritySpec extends Specification { - - @Autowired - MockMvc mvc - - def testEndpoint = '/test' - - def 'Get request with authentication'() { - when: 'request is sent with authentication' - def response = mvc.perform( - get(testEndpoint).header("Authorization", 'Basic Y3BzdXNlcjpjcHNyMGNrcyE=') - ).andReturn().response - then: 'HTTP OK status code is returned' - assert response.status == HttpStatus.OK.value() - } - - @Ignore // CPS-2126 - def 'Get request without authentication is not authorized'() { - when: 'request is sent without authentication' - def response = mvc.perform(get(testEndpoint)).andReturn().response - then: 'HTTP Unauthorized status code is returned' - assert response.status == HttpStatus.UNAUTHORIZED.value() - } - - @Ignore // CPS-2126 - def 'Get request with invalid authentication is not authorized'() { - when: 'request is sent with invalid authentication' - def response = mvc.perform( - get(testEndpoint).header("Authorization", 'Basic invalid auth') - ).andReturn().response - then: 'HTTP Unauthorized status code is returned' - assert response.status == HttpStatus.UNAUTHORIZED.value() - } -} diff --git a/cps-application/src/test/groovy/org/onap/cps/startup/InstanceStartupDelayManagerSpec.groovy b/cps-application/src/test/groovy/org/onap/cps/startup/InstanceStartupDelayManagerSpec.groovy new file mode 100644 index 0000000000..0ad02f4551 --- /dev/null +++ b/cps-application/src/test/groovy/org/onap/cps/startup/InstanceStartupDelayManagerSpec.groovy @@ -0,0 +1,58 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.startup + +import spock.lang.Specification + +class InstanceStartupDelayManagerSpec extends Specification { + + def objectUnderTest = Spy(InstanceStartupDelayManager) + + def 'Startup delay with real hostname.'() { + given: 'a hostname is resolved' + objectUnderTest.getHostName() >> 'hostX' + and: 'the expected delay is based on hash code with max of 5,000 ms' + def expectedDelay = Math.abs('hostX'.hashCode() % 5_000) + when: 'startup delay is called' + objectUnderTest.applyHostnameBasedStartupDelay() + then: 'the system will sleep for expected time' + 1 * objectUnderTest.haveALittleSleepInMs(expectedDelay) + } + + def 'Startup delay when hostname cannot be resolved.'() { + given: 'an exception is thrown while getting the hostname' + objectUnderTest.getHostName() >> { throw new Exception('some message') } + when: 'startup delay is called' + objectUnderTest.applyHostnameBasedStartupDelay() + then: 'system will not sleep' + 0 * objectUnderTest.haveALittleSleepInMs(_) + } + + def 'Startup delay when sleep is interrupted'() { + given: 'sleep method throws InterruptedException' + objectUnderTest.haveALittleSleepInMs(_) >> { throw new InterruptedException('some message') } + when: 'startup delay is called' + objectUnderTest.applyHostnameBasedStartupDelay() + then: 'interrupt exception is ignored' + noExceptionThrown() + } + +} diff --git a/cps-application/src/test/java/org/onap/cps/architecture/ArchitectureTestBase.java b/cps-application/src/test/java/org/onap/cps/architecture/ArchitectureTestBase.java index 1d39060024..28ff7c307c 100644 --- a/cps-application/src/test/java/org/onap/cps/architecture/ArchitectureTestBase.java +++ b/cps-application/src/test/java/org/onap/cps/architecture/ArchitectureTestBase.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,7 +35,9 @@ public class ArchitectureTestBase { "java..", "lombok..", "org.apache..", + "org.aspectj..", "org.mapstruct..", + "org.opendaylight..", "org.slf4j..", "org.springframework..", "reactor.." diff --git a/cps-application/src/test/resources/application.yml b/cps-application/src/test/resources/application.yml index 69e4febcff..e20aa0c4ed 100644 --- a/cps-application/src/test/resources/application.yml +++ b/cps-application/src/test/resources/application.yml @@ -1,11 +1,12 @@ -# ============LICENSE_START======================================================= -# Copyright (C) 2021 Pantheon.tech -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# ============LICENSE_START======================================================= +# Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -15,7 +16,242 @@ # SPDX-License-Identifier: Apache-2.0 # ============LICENSE_END========================================================= -security: - auth: - username: cpsuser - password: cpsr0cks! +# This is a full copy of the real application.yml except that liquibase.enabled is set to false! +# This is needed te be able to test Application.main () method (coverage and context loading test). +# Cannot use a custom profile. That would require the test to be a springboot test which is even harder to achieve + +rest: + api: + cps-base-path: /cps/api + ncmp-base-path: /ncmp + ncmp-inventory-base-path: /ncmpInventory + +spring: + main: + banner-mode: "off" + application: + name: "cps-application" + jpa: + show-sql: false + ddl-auto: create + open-in-view: false + properties: + hibernate.enable_lazy_load_no_trans: true + hibernate.dialect: org.hibernate.dialect.PostgreSQLDialect + # Please ensure these values match those used in integration-test/src/test/resources/application.yml + hibernate.id.new_generator_mappings: true + hibernate.jdbc.batch_size: 100 + + datasource: + url: jdbc:postgresql://${DB_HOST:localhost}:${DB_PORT:5432}/cpsdb + username: ${DB_USERNAME} + password: ${DB_PASSWORD} + driverClassName: org.postgresql.Driver + hikari: + minimumIdle: 5 + maximumPoolSize: 80 + idleTimeout: 60000 + connectionTimeout: 30000 + leakDetectionThreshold: 30000 + pool-name: CpsDatabasePool + + cache: + type: caffeine + cache-names: yangSchema + caffeine: + spec: maximumSize=10000,expireAfterAccess=10m + + liquibase: + enabled: false + + servlet: + multipart: + enabled: true + max-file-size: 100MB + max-request-size: 100MB + + kafka: + bootstrap-servers: ${KAFKA_BOOTSTRAP_SERVER:localhost:9092} + security: + protocol: PLAINTEXT + producer: + value-serializer: io.cloudevents.kafka.CloudEventSerializer + client-id: cps-core + consumer: + group-id: ${NCMP_CONSUMER_GROUP_ID:ncmp-group} + key-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer + value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer + properties: + spring.deserializer.key.delegate.class: org.apache.kafka.common.serialization.StringDeserializer + spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer + spring.json.use.type.headers: false + + jackson: + default-property-inclusion: NON_NULL + serialization: + FAIL_ON_EMPTY_BEANS: false + sql: + init: + mode: ALWAYS +app: + ncmp: + async-m2m: + topic: ${NCMP_ASYNC_M2M_TOPIC:ncmp-async-m2m} + avc: + cm-subscription-ncmp-in: ${CM_SUBSCRIPTION_NCMP_IN_TOPIC:subscription} + cm-subscription-dmi-in: ${CM_SUBSCRIPTION_DMI_IN_TOPIC:ncmp-dmi-cm-avc-subscription} + cm-subscription-dmi-out: ${CM_SUBSCRIPTION_DMI_OUT_TOPIC:dmi-ncmp-cm-avc-subscription} + cm-subscription-ncmp-out: ${CM_SUBSCRIPTION_NCMP_OUT_TOPIC:subscription-response} + cm-events-topic: ${NCMP_CM_EVENTS_TOPIC:cm-events} + inventory-events-topic: ncmp-inventory-events + lcm: + events: + topic: ${LCM_EVENTS_TOPIC:ncmp-events} + dmi: + cm-events: + topic: ${DMI_CM_EVENTS_TOPIC:dmi-cm-events} + device-heartbeat: + topic: ${DMI_DEVICE_HEARTBEAT_TOPIC:dmi-device-heartbeat} + cps: + data-updated: + change-event-notifications-enabled: ${CPS_CHANGE_EVENT_NOTIFICATIONS_ENABLED:false} + topic: ${CPS_CHANGE_EVENT_TOPIC:cps-data-updated-events} + +notification: + enabled: true + async: + executor: + core-pool-size: 2 + max-pool-size: 10 + queue-capacity: 500 + wait-for-tasks-to-complete-on-shutdown: true + thread-name-prefix: Async- + time-out-value-in-ms: 60000 + +springdoc: + swagger-ui: + disable-swagger-default-url: true + urlsPrimaryName: cps-core + urls: + - name: cps-core + url: /api-docs/cps-core/openapi.yaml + - name: cps-ncmp + url: /api-docs/cps-ncmp/openapi.yaml + - name: cps-ncmp-inventory + url: /api-docs/cps-ncmp/openapi-inventory.yaml + +cps: + monitoring: + micrometer-jvm-extras: false + tracing: + sampler: + jaeger_remote: + endpoint: ${ONAP_OTEL_SAMPLER_JAEGER_REMOTE_ENDPOINT:http://onap-otel-collector:14250} + exporter: + endpoint: ${ONAP_OTEL_EXPORTER_ENDPOINT:http://onap-otel-collector:4317} + protocol: ${ONAP_OTEL_EXPORTER_PROTOCOL:grpc} + enabled: ${ONAP_TRACING_ENABLED:false} + excluded-observation-names: ${ONAP_EXCLUDED_OBSERVATION_NAMES:tasks.scheduled.execution} + +# Actuator +management: + tracing: + propagation: + produce: ${ONAP_PROPAGATOR_PRODUCE:[W3C]} + sampling: + probability: 1.0 + endpoints: + web: + exposure: + include: info,health,loggers,prometheus,metrics,heapdump,threaddump + endpoint: + health: + show-details: always + # kubernetes probes: liveness and readiness + probes: + enabled: true + + info: + git: + enabled: true + mode: full + +logging: + format: json + level: + org: + springframework: INFO + onap: + cps: INFO +ncmp: + policy-executor: + enabled: ${POLICY_SERVICE_ENABLED:false} + defaultDecision: ${POLICY_SERVICE_DEFAULT_DECISION:"allow"} + server: + address: ${POLICY_SERVICE_URL:http://policy-executor-stub} + port: ${POLICY_SERVICE_PORT:8093} + httpclient: + all-services: + maximumInMemorySizeInMegabytes: 16 + maximumConnectionsTotal: 100 + pendingAcquireMaxCount: 50 + connectionTimeoutInSeconds: 30 + readTimeoutInSeconds: 30 + writeTimeoutInSeconds: 30 + responseTimeoutInSeconds: 60 + dmi: + httpclient: + data-services: + maximumInMemorySizeInMegabytes: 16 + maximumConnectionsTotal: 100 + pendingAcquireMaxCount: 50 + connectionTimeoutInSeconds: 30 + readTimeoutInSeconds: 30 + writeTimeoutInSeconds: 30 + responseTimeoutInSeconds: 60 + model-services: + maximumInMemorySizeInMegabytes: 16 + maximumConnectionsTotal: 100 + pendingAcquireMaxCount: 50 + connectionTimeoutInSeconds: 30 + readTimeoutInSeconds: 30 + writeTimeoutInSeconds: 30 + responseTimeoutInSeconds: 60 + auth: + username: ${DMI_USERNAME:cpsuser} + password: ${DMI_PASSWORD:cpsr0cks!} + enabled: ${DMI_AUTH_ENABLED:true} + api: + base-path: dmi + + timers: + advised-modules-sync: + initial-delay-ms: 40000 + sleep-time-ms: 5000 + cm-handle-data-sync: + initial-delay-ms: 40000 + sleep-time-ms: 30000 + subscription-forwarding: + dmi-response-timeout-ms: 30000 + model-loader: + retry-time-ms: 1000 + trust-level: + dmi-availability-watchdog-ms: 30000 + + model-loader: + maximum-attempt-count: 20 + +# Custom Hazelcast Config. +hazelcast: + cluster-name: ${CPS_NCMP_CACHES_CLUSTER_NAME:"cps-and-ncmp-common-cache-cluster"} + instance-config-name: ${CPS_NCMP_INSTANCE_CONFIG_NAME:"cps-and-ncmp-hazelcast-instance-config"} + mode: + kubernetes: + enabled: ${HAZELCAST_MODE_KUBERNETES_ENABLED:false} + service-name: ${CPS_NCMP_SERVICE_NAME:"cps-and-ncmp-service"} + +otel: + exporter: + otlp: + traces: + protocol: ${ONAP_OTEL_EXPORTER_OTLP_TRACES_PROTOCOL:grpc} diff --git a/cps-bom/pom.xml b/cps-bom/pom.xml index 1bb8308e07..208004b21e 100644 --- a/cps-bom/pom.xml +++ b/cps-bom/pom.xml @@ -25,7 +25,7 @@ <modelVersion>4.0.0</modelVersion> <groupId>org.onap.cps</groupId> <artifactId>cps-bom</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <packaging>pom</packaging> <description>This artifact contains dependencyManagement declarations of all published CPS components.</description> diff --git a/cps-dependencies/pom.xml b/cps-dependencies/pom.xml index 5783ef7fcd..f7df9e1b28 100644 --- a/cps-dependencies/pom.xml +++ b/cps-dependencies/pom.xml @@ -27,7 +27,7 @@ <modelVersion>4.0.0</modelVersion> <groupId>org.onap.cps</groupId> <artifactId>cps-dependencies</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <packaging>pom</packaging> <name>${project.groupId}:${project.artifactId}</name> @@ -86,14 +86,14 @@ <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-dependencies</artifactId> - <version>3.4.1</version> + <version>3.4.4</version> <type>pom</type> <scope>import</scope> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-cache</artifactId> - <version>3.4.1</version> + <version>3.4.4</version> </dependency> <dependency> <groupId>org.springframework.cloud</groupId> @@ -146,7 +146,7 @@ <dependency> <groupId>com.hazelcast</groupId> <artifactId>hazelcast-spring</artifactId> - <version>5.3.7</version> + <version>5.5.0</version> </dependency> <dependency> <groupId>com.squareup.okhttp3</groupId> @@ -173,6 +173,11 @@ <scope>import</scope> </dependency> <dependency> + <groupId>io.github.mweirauch</groupId> + <artifactId>micrometer-jvm-extras</artifactId> + <version>0.2.2</version> + </dependency> + <dependency> <groupId>io.gsonfire</groupId> <artifactId>gson-fire</artifactId> <version>1.9.0</version> diff --git a/cps-events/pom.xml b/cps-events/pom.xml index 9d49f181fb..9067e55b7e 100644 --- a/cps-events/pom.xml +++ b/cps-events/pom.xml @@ -24,7 +24,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-events/src/main/resources/schemas/updatenode/cps-data-updated-event-schema-1.0.0.json b/cps-events/src/main/resources/schemas/cps.dataupdated/cps-data-updated-event-schema-1.0.0.json index a3eaf63fa4..a3eaf63fa4 100644 --- a/cps-events/src/main/resources/schemas/updatenode/cps-data-updated-event-schema-1.0.0.json +++ b/cps-events/src/main/resources/schemas/cps.dataupdated/cps-data-updated-event-schema-1.0.0.json diff --git a/cps-ncmp-events/pom.xml b/cps-ncmp-events/pom.xml index bb45493554..04a699e4df 100644 --- a/cps-ncmp-events/pom.xml +++ b/cps-ncmp-events/pom.xml @@ -1,7 +1,7 @@ <?xml version="1.0" encoding="UTF-8"?> <!-- ============LICENSE_START======================================================= - Copyright (c) 2022-2023 Nordix Foundation. + Copyright (c) 2022-2025 Nordix Foundation. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> @@ -39,6 +39,10 @@ <groupId>jakarta.validation</groupId> <artifactId>jakarta.validation-api</artifactId> </dependency> + <dependency> + <groupId>org.projectlombok</groupId> + <artifactId>lombok</artifactId> + </dependency> </dependencies> <build> diff --git a/cps-ncmp-events/src/main/java/org/onap/cps/ncmp/events/NcmpEventDataSchema.java b/cps-ncmp-events/src/main/java/org/onap/cps/ncmp/events/NcmpEventDataSchema.java new file mode 100644 index 0000000000..9cdb6d76ee --- /dev/null +++ b/cps-ncmp-events/src/main/java/org/onap/cps/ncmp/events/NcmpEventDataSchema.java @@ -0,0 +1,41 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.events; + +import lombok.Getter; + +@Getter +public enum NcmpEventDataSchema { + + BATCH_RESPONSE_V1("urn:cps:org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent:1.0.0"), + SUBSCRIPTIONS_V1("urn:cps:org.onap.ncmp.events.subscription:1.0.0"), + MOI_CHANGES_V1("urn:cps:org.onap.cps.ncmp.events.moi-changes:1.0.0"), + INVENTORY_EVENTS_V1("urn:cps:org.onap.cps.ncmp.events:inventory-event:1.0.0"), + CM_HANDLE_TRUST_LEVEL_V1("urn:cps:org.onap.cps.ncmp.dmi.events:cm-handle-trust-level:1.0.0"); + + private final String dataSchema; + + NcmpEventDataSchema(final String dataSchema) { + this.dataSchema = dataSchema; + } + + +} diff --git a/cps-ncmp-events/src/main/resources/schemas/dmidataavc/avc-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/dmi/cm-events/avc-event-schema-1.0.0.json index 474520d142..474520d142 100644 --- a/cps-ncmp-events/src/main/resources/schemas/dmidataavc/avc-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/dmi/cm-events/avc-event-schema-1.0.0.json diff --git a/cps-ncmp-events/src/main/resources/schemas/trustlevel/device-trust-level-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/dmi/device-heartbeat/device-trust-level-event-schema-1.0.0.json index e1796fbc73..e1796fbc73 100644 --- a/cps-ncmp-events/src/main/resources/schemas/trustlevel/device-trust-level-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/dmi/device-heartbeat/device-trust-level-event-schema-1.0.0.json diff --git a/cps-ncmp-events/src/main/resources/schemas/dmi/ves-events/ves-event-schema-30.2.1.json b/cps-ncmp-events/src/main/resources/schemas/dmi/ves-events/ves-event-schema-30.2.1.json new file mode 100644 index 0000000000..67324b7a57 --- /dev/null +++ b/cps-ncmp-events/src/main/resources/schemas/dmi/ves-events/ves-event-schema-30.2.1.json @@ -0,0 +1,3092 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "VES Event Listener Common Event Format", + "type": "object", + "javaType": "org.onap.cps.ncmp.events.ves30_2_1.VesEventSchema", + "properties": { + "event": { + "$ref": "#/definitions/event" + }, + "eventList": { + "$ref": "#/definitions/eventList" + } + }, + "definitions": { + "schemaHeaderBlock": { + "description": "schema date, version, author and associated API", + "type": "object", + "properties": { + "associatedApi": { + "description": "VES Event Listener", + "type": "string" + }, + "lastUpdatedBy": { + "description": "damian.nowak@nokia.com", + "type": "string" + }, + "schemaDate": { + "description": "Jan 04, 2021", + "type": "string" + }, + "schemaVersion": { + "description": "30.2.1", + "type": "number" + } + } + }, + "schemaLicenseAndCopyrightNotice": { + "description": "Copyright (c) 2020, AT&T Intellectual Property. All rights reserved. Modification Copyright (c) 2021, Nokia Solutions and Networks.", + "type": "object", + "properties": { + "apacheLicense2.0": { + "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:", + "type": "string" + }, + "licenseUrl": { + "description": "http://www.apache.org/licenses/LICENSE-2.0", + "type": "string" + }, + "asIsClause": { + "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", + "type": "string" + }, + "permissionsAndLimitations": { + "description": "See the License for the specific language governing permissions and limitations under the License.", + "type": "string" + } + } + }, + "arrayOfJsonObject": { + "description": "array of json objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "arrayOfNamedHashMap": { + "description": "array of named hashMaps", + "type": "array", + "items": { + "$ref": "#/definitions/namedHashMap" + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { + "type": "string" + }, + "numberInUse": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "codecIdentifier", + "numberInUse" + ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurement", + "mobileFlow", + "notification", + "other", + "perf3gpp", + "pnfRegistration", + "sipSignaling", + "stateChange", + "stndDefined", + "syslog", + "thresholdCrossingAlert", + "voiceQuality" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventName": { + "description": "unique event name", + "type": "string" + }, + "eventType": { + "description": "for example - applicationNf, guestOS, hostOS, platform", + "type": "string" + }, + "internalHeaderFields": { + "$ref": "#/definitions/internalHeaderFields" + }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "nfcNamingCode": { + "description": "3 character network function component type, aligned with vfc naming standards", + "type": "string" + }, + "nfNamingCode": { + "description": "4 character network function type, aligned with nf naming standards", + "type": "string" + }, + "nfVendorName": { + "description": "network function vendor name", + "type": "string" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "stndDefinedNamespace": { + "description": "Standards organization defined event namespace; expected usage includes event routing by the event listener", + "type": "string" + }, + "timeZoneOffset": { + "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm", + "type": "string" + }, + "version": { + "description": "version of the event header", + "type": "string", + "enum": [ + "4.0", + "4.0.1", + "4.1" + ] + }, + "vesEventListenerVersion": { + "description": "version of the VES Event Listener API", + "type": "string", + "enum": [ + "7.0", + "7.0.1", + "7.1", + "7.1.1", + "7.2", + "7.2.1" + ] + } + }, + "additionalProperties": false, + "required": [ + "domain", + "eventId", + "eventName", + "lastEpochMicrosec", + "priority", + "reportingEntityName", + "sequence", + "sourceName", + "startEpochMicrosec", + "version", + "vesEventListenerVersion" + ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { + "type": "string", + "enum": [ + "CRIT", + "MAJ" + ] + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + }, + "thresholdCrossed": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "criticality", + "hashMap", + "thresholdCrossed" + ] + }, + "cpuUsage": { + "description": "usage of an identified CPU", + "type": "object", + "properties": { + "cpuCapacityContention": { + "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandAvg": { + "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandMhz": { + "description": "CPU demand in megahertz", + "type": "number" + }, + "cpuDemandPct": { + "description": "CPU demand as a percentage of the provisioned capacity", + "type": "number" + }, + "cpuIdentifier": { + "description": "cpu identifer", + "type": "string" + }, + "cpuIdle": { + "description": "percentage of CPU time spent in the idle task", + "type": "number" + }, + "cpuLatencyAvg": { + "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs", + "type": "number" + }, + "cpuOverheadAvg": { + "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuSwapWaitTime": { + "description": "swap wait time. in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuUsageInterrupt": { + "description": "percentage of time spent servicing interrupts", + "type": "number" + }, + "cpuUsageNice": { + "description": "percentage of time spent running user space processes that have been niced", + "type": "number" + }, + "cpuUsageSoftIrq": { + "description": "percentage of time spent handling soft irq interrupts", + "type": "number" + }, + "cpuUsageSteal": { + "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", + "type": "number" + }, + "cpuUsageSystem": { + "description": "percentage of time spent on system tasks running the kernel", + "type": "number" + }, + "cpuUsageUser": { + "description": "percentage of time spent running un-niced user space processes", + "type": "number" + }, + "cpuWait": { + "description": "percentage of CPU time spent waiting for I/O operations to complete", + "type": "number" + }, + "percentUsage": { + "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "cpuIdentifier", + "percentUsage" + ] + }, + "diskUsage": { + "description": "usage of an identified disk", + "type": "object", + "properties": { + "diskBusResets": { + "description": "number of bus resets over the measurementInterval", + "type": "number" + }, + "diskCommandsAborted": { + "description": "number of disk commands aborted over the measurementInterval", + "type": "number" + }, + "diskCommandsAvg": { + "description": "average number of commands per second over the measurementInterval", + "type": "number" + }, + "diskFlushRequests": { + "description": "total flush requests of the disk cache over the measurementInterval", + "type": "number" + }, + "diskFlushTime": { + "description": "milliseconds spent on disk cache flushing over the measurementInterval", + "type": "number" + }, + "diskIdentifier": { + "description": "disk identifier", + "type": "string" + }, + "diskIoTimeAvg": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", + "type": "number" + }, + "diskIoTimeLast": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMax": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMin": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadAvg": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadLast": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMax": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMin": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteAvg": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteLast": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMax": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMin": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadAvg": { + "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadLast": { + "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMax": { + "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMin": { + "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteAvg": { + "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteLast": { + "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMax": { + "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMin": { + "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadAvg": { + "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadLast": { + "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMax": { + "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMin": { + "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteAvg": { + "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteLast": { + "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMax": { + "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMin": { + "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsAvg": { + "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsLast": { + "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMax": { + "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMin": { + "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskReadCommandsAvg": { + "description": "average number of read commands issued per second to the disk over the measurementInterval", + "type": "number" + }, + "diskTime": { + "description": "nanoseconds spent on disk cache reads/writes within the measurement interval", + "type": "number" + }, + "diskTimeReadAvg": { + "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadLast": { + "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMax": { + "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMin": { + "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteAvg": { + "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteLast": { + "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMax": { + "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMin": { + "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTotalReadLatencyAvg": { + "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskTotalWriteLatencyAvg": { + "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskWeightedIoTimeAvg": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeLast": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMax": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMin": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval", + "type": "number" + }, + "diskWriteCommandsAvg": { + "description": "average number of write commands issued per second to the disk over the measurementInterval", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "diskIdentifier" + ] + }, + "endOfCallVqmSummaries": { + "description": "provides end of call voice quality metrics", + "type": "object", + "properties": { + "adjacencyName": { + "description": " adjacency name", + "type": "string" + }, + "endpointAverageJitter": { + "description": "endpoint average jitter", + "type": "number" + }, + "endpointDescription": { + "description": "either Caller or Callee", + "type": "string", + "enum": [ + "Caller", + "Callee" + ] + }, + "endpointMaxJitter": { + "description": "endpoint maximum jitter", + "type": "number" + }, + "endpointRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsLost": { + "description": "endpoint RTP octets lost", + "type": "number" + }, + "endpointRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsSent": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsLost": { + "description": "endpoint RTP packets lost", + "type": "number" + }, + "endpointRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsSent": { + "description": "", + "type": "number" + }, + "localAverageJitter": { + "description": "Local average jitter", + "type": "number" + }, + "localAverageJitterBufferDelay": { + "description": "Local average jitter delay", + "type": "number" + }, + "localMaxJitter": { + "description": "Local maximum jitter", + "type": "number" + }, + "localMaxJitterBufferDelay": { + "description": "Local maximum jitter delay", + "type": "number" + }, + "localRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpOctetsLost": { + "description": "Local RTP octets lost", + "type": "number" + }, + "localRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "localRtpOctetsSent": { + "description": "", + "type": "number" + }, + "localRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpPacketsLost": { + "description": "Local RTP packets lost", + "type": "number" + }, + "localRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "localRtpPacketsSent": { + "description": "", + "type": "number" + }, + "mosCqe": { + "description": "1-5 1dp", + "type": "number" + }, + "oneWayDelay": { + "description": "one-way path delay in milliseconds", + "type": "number" + }, + "packetLossPercent": { + "description": "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", + "type": "number" + }, + "rFactor": { + "description": "0-100", + "type": "number" + }, + "roundTripDelay": { + "description": "millisecs", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "adjacencyName", + "endpointDescription" + ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { + "$ref": "#/definitions/commonEventHeader" + }, + "faultFields": { + "$ref": "#/definitions/faultFields" + }, + "heartbeatFields": { + "$ref": "#/definitions/heartbeatFields" + }, + "measurementFields": { + "$ref": "#/definitions/measurementFields" + }, + "mobileFlowFields": { + "$ref": "#/definitions/mobileFlowFields" + }, + "notificationFields": { + "$ref": "#/definitions/notificationFields" + }, + "otherFields": { + "$ref": "#/definitions/otherFields" + }, + "perf3gppFields": { + "$ref": "#/definitions/perf3gppFields" + }, + "pnfRegistrationFields": { + "$ref": "#/definitions/pnfRegistrationFields" + }, + "sipSignalingFields": { + "$ref": "#/definitions/sipSignalingFields" + }, + "stateChangeFields": { + "$ref": "#/definitions/stateChangeFields" + }, + "stndDefinedFields": { + "$ref": "#/definitions/stndDefinedFields" + }, + "syslogFields": { + "$ref": "#/definitions/syslogFields" + }, + "thresholdCrossingAlertFields": { + "$ref": "#/definitions/thresholdCrossingAlertFields" + }, + "voiceQualityFields": { + "$ref": "#/definitions/voiceQualityFields" + } + }, + "additionalProperties": false, + "required": [ + "commonEventHeader" + ] + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventCategory": { + "description": "Event category, for example: license, link, routing, security, signaling", + "type": "string" + }, + "eventSeverity": { + "description": "event severity", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "additionalProperties": false, + "required": [ + "alarmCondition", + "eventSeverity", + "eventSourceType", + "faultFieldsVersion", + "specificProblem", + "vfStatus" + ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { + "type": "number" + }, + "blockIops": { + "type": "number" + }, + "blockUsed": { + "type": "number" + }, + "ephemeralConfigured": { + "type": "number" + }, + "ephemeralIops": { + "type": "number" + }, + "ephemeralUsed": { + "type": "number" + }, + "filesystemName": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "blockConfigured", + "blockIops", + "blockUsed", + "ephemeralConfigured", + "ephemeralIops", + "ephemeralUsed", + "filesystemName" + ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "$ref": "#/definitions/hashMap" + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "$ref": "#/definitions/hashMap" + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "$ref": "#/definitions/hashMap" + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "avgBitErrorRate", + "avgPacketDelayVariation", + "avgPacketLatency", + "avgReceiveThroughput", + "avgTransmitThroughput", + "flowActivationEpoch", + "flowActivationMicrosec", + "flowDeactivationEpoch", + "flowDeactivationMicrosec", + "flowDeactivationTime", + "flowStatus", + "maxPacketDelayVariation", + "numActivationFailures", + "numBitErrors", + "numBytesReceived", + "numBytesTransmitted", + "numDroppedPackets", + "numL7BytesReceived", + "numL7BytesTransmitted", + "numLostPackets", + "numOutOfOrderPackets", + "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", + "numTimeouts", + "numTunneledL7BytesReceived", + "roundTripTime", + "timeToFirstByte" + ] + }, + "hashMap": { + "description": "an associative array which is an array of key:value pairs", + "type": "object", + "additionalProperties": { + "type": "string" + }, + "default": {} + }, + "heartbeatFields": { + "description": "optional field block for fields specific to heartbeat events", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "heartbeatFieldsVersion": { + "description": "version of the heartbeatFields block", + "type": "string", + "enum": [ + "3.0" + ] + }, + "heartbeatInterval": { + "description": "current heartbeat interval in seconds", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "heartbeatFieldsVersion", + "heartbeatInterval" + ] + }, + "hugePages": { + "description": "metrics on system hugepages", + "type": "object", + "properties": { + "bytesFree": { + "description": "number of free hugepages in bytes", + "type": "number" + }, + "bytesUsed": { + "description": "number of used hugepages in bytes", + "type": "number" + }, + "hugePagesIdentifier": { + "description": "hugePages identifier", + "type": "string" + }, + "percentFree": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "percentUsed": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "vmPageNumberFree": { + "description": "number of free vmPages in numbers", + "type": "number" + }, + "vmPageNumberUsed": { + "description": "number of used vmPages in numbers", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "hugePagesIdentifier" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "ipmi": { + "description": "intelligent platform management interface metrics", + "type": "object", + "properties": { + "exitAirTemperature": { + "description": "system fan exit air flow temperature in celsius", + "type": "number" + }, + "frontPanelTemperature": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "ioModuleTemperature": { + "description": "io module temperature in celsius", + "type": "number" + }, + "ipmiBaseboardTemperatureArray": { + "description": "array of ipmiBaseboardTemperature objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardTemperature" + } + }, + "ipmiBaseboardVoltageRegulatorArray": { + "description": "array of ipmiBaseboardVoltageRegulator objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardVoltageRegulator" + } + }, + "ipmiBatteryArray": { + "description": "array of ipmiBattery objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBattery" + } + }, + "ipmiFanArray": { + "description": "array of ipmiFan objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiFan" + } + }, + "ipmiHsbpArray": { + "description": "array of ipmiHsbp objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiHsbp" + } + }, + "ipmiGlobalAggregateTemperatureMarginArray": { + "description": "array of ipmiGlobalAggregateTemperatureMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin" + } + }, + "ipmiNicArray": { + "description": "array of ipmiNic objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiNic" + } + }, + "ipmiPowerSupplyArray": { + "description": "array of ipmiPowerSupply objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiPowerSupply" + } + }, + "ipmiProcessorArray": { + "description": "array of ipmiProcessor objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiProcessor" + } + }, + "systemAirflow": { + "description": "airfflow in cubic feet per minute (cfm)", + "type": "number" + } + }, + "additionalProperties": false + }, + "ipmiBaseboardTemperature": { + "description": "intelligent platform management interface (ipmi) baseboard temperature metrics", + "type": "object", + "properties": { + "baseboardTemperatureIdentifier": { + "description": "identifier for the location where the temperature is taken", + "type": "string" + }, + "baseboardTemperature": { + "description": "baseboard temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "baseboardTemperatureIdentifier" + ] + }, + "ipmiBaseboardVoltageRegulator": { + "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics", + "type": "object", + "properties": { + "baseboardVoltageRegulatorIdentifier": { + "description": "identifier for the baseboard voltage regulator", + "type": "string" + }, + "voltageRegulatorTemperature": { + "description": "voltage regulator temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "baseboardVoltageRegulatorIdentifier" + ] + }, + "ipmiBattery": { + "description": "intelligent platform management interface (ipmi) battery metrics", + "type": "object", + "properties": { + "batteryIdentifier": { + "description": "identifier for the battery", + "type": "string" + }, + "batteryType": { + "description": "type of battery", + "type": "string" + }, + "batteryVoltageLevel": { + "description": "battery voltage level", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "batteryIdentifier" + ] + }, + "ipmiFan": { + "description": "intelligent platform management interface (ipmi) fan metrics", + "type": "object", + "properties": { + "fanIdentifier": { + "description": "identifier for the fan", + "type": "string" + }, + "fanSpeed": { + "description": "fan speed in revolutions per minute (rpm)", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "fanIdentifier" + ] + }, + "ipmiGlobalAggregateTemperatureMargin": { + "description": "intelligent platform management interface (ipmi) global aggregate temperature margin", + "type": "object", + "properties": { + "globalAggregateTemperatureMarginIdentifier": { + "description": "identifier for the ipmi global aggregate temperature margin metrics", + "type": "string" + }, + "globalAggregateTemperatureMargin": { + "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "globalAggregateTemperatureMarginIdentifier", + "globalAggregateTemperatureMargin" + ] + }, + "ipmiHsbp": { + "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics", + "type": "object", + "properties": { + "hsbpIdentifier": { + "description": "identifier for the hot swap backplane power unit", + "type": "string" + }, + "hsbpTemperature": { + "description": "hot swap backplane power temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "hsbpIdentifier" + ] + }, + "ipmiNic": { + "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics", + "type": "object", + "properties": { + "nicIdentifier": { + "description": "identifier for the network interface control card", + "type": "string" + }, + "nicTemperature": { + "description": "nic temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "nicIdentifier" + ] + }, + "ipmiPowerSupply": { + "description": "intelligent platform management interface (ipmi) power supply metrics", + "type": "object", + "properties": { + "powerSupplyIdentifier": { + "description": "identifier for the power supply", + "type": "string" + }, + "powerSupplyInputPower": { + "description": "input power in watts", + "type": "number" + }, + "powerSupplyCurrentOutputPercent": { + "description": "current output voltage as a percentage of the design specified level", + "type": "number" + }, + "powerSupplyTemperature": { + "description": "power supply temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "powerSupplyIdentifier" + ] + }, + "ipmiProcessor": { + "description": "intelligent platform management interface processor metrics", + "type": "object", + "properties": { + "processorIdentifier": { + "description": "identifier for an ipmi processor", + "type": "string" + }, + "processorThermalControlPercent": { + "description": "io module temperature in celsius", + "type": "number" + }, + "processorDtsThermalMargin": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "processorDimmAggregateThermalMarginArray": { + "description": "array of processorDimmAggregateThermalMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/processorDimmAggregateThermalMargin" + } + } + }, + "additionalProperties": false, + "required": [ + "processorIdentifier" + ] + }, + "jsonObject": { + "description": "json object schema, name and other meta-information along with one or more object instances", + "type": "object", + "properties": { + "objectInstances": { + "description": "one or more instances of the jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObjectInstance" + } + }, + "objectName": { + "description": "name of the JSON Object", + "type": "string" + }, + "objectSchema": { + "description": "json schema for the object", + "type": "string" + }, + "objectSchemaUrl": { + "description": "Url to the json schema for the object", + "type": "string" + }, + "nfSubscribedObjectName": { + "description": "name of the object associated with the nfSubscriptonId", + "type": "string" + }, + "nfSubscriptionId": { + "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "objectInstances", + "objectName" + ] + }, + "jsonObjectInstance": { + "description": "meta-information about an instance of a jsonObject along with the actual object instance", + "type": "object", + "properties": { + "jsonObject": { + "$ref": "#/definitions/jsonObject" + }, + "objectInstance": { + "description": "an instance conforming to the jsonObject objectSchema", + "type": "object" + }, + "objectInstanceEpochMicrosec": { + "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "objectKeys": { + "description": "an ordered set of keys that identifies this particular instance of jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/key" + } + } + }, + "additionalProperties": false + }, + "key": { + "description": "tuple which provides the name of a key along with its value and relative order", + "type": "object", + "properties": { + "keyName": { + "description": "name of the key", + "type": "string" + }, + "keyOrder": { + "description": "relative sequence or order of the key with respect to other keys", + "type": "integer" + }, + "keyValue": { + "description": "value of the key", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "keyName" + ] + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { + "type": "number" + }, + "highEndOfLatencyBucket": { + "type": "number" + }, + "lowEndOfLatencyBucket": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "countsInTheBucket" + ] + }, + "load": { + "description": "/proc/loadavg cpu utilization and io utilization metrics", + "type": "object", + "properties": { + "longTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg", + "type": "number" + }, + "midTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg", + "type": "number" + }, + "shortTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg", + "type": "number" + } + }, + "additionalProperties": false + }, + "machineCheckException": { + "description": "metrics on vm machine check exceptions", + "type": "object", + "properties": { + "correctedMemoryErrors": { + "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via  ECC) over the measurementInterval", + "type": "number" + }, + "correctedMemoryErrorsIn1Hr": { + "description": "total hardware errors that were corrected by the hardware over the last one hour", + "type": "number" + }, + "uncorrectedMemoryErrors": { + "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval", + "type": "number" + }, + "uncorrectedMemoryErrorsIn1Hr": { + "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the machine check exception", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "vmIdentifier" + ] + }, + "measDataCollection": { + "description": "3GPP measurement collection structure aligned with 3GPP PM format", + "type": "object", + "properties": { + "formatVersion": { + "description": "3gpp PM reporting file format version from pre-standard TS 28.550 v2.0.0", + "type": "string" + }, + "granularityPeriod": { + "description": "granularity period for the PM report in seconds", + "type": "number" + }, + "measInfoList": { + "description": "array of measurements", + "type": "array", + "items": { + "$ref": "#/definitions/measInfo" + } + }, + "measObjInstIdList": { + "description": "array of monitored object local distinguished name ids per 3GPP TS 32.300", + "type": "array", + "items": { + "type": "string" + } + }, + "measuredEntityDn": { + "description": "distinguished name per 3GPP TS 28.550", + "type": "string" + }, + "measuredEntitySoftwareVersion": { + "description": "software version for the NF providing the PM data as specified in 3GPP TS 28.550", + "type": "string" + }, + "measuredEntityUserName": { + "description": "user definable name for the measured object per 3GPP TS 28.550", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "granularityPeriod", + "measInfoList", + "measuredEntityDn" + ] + }, + "measInfo": { + "description": "measurement information.", + "type": "object", + "properties": { + "jobId": { + "description": "name of the measurement job", + "type": "string" + }, + "measInfoId": { + "description": "measurement group identifier", + "oneOf": [ + { + "$ref": "#/definitions/measInfoIdInteger" + }, + { + "$ref": "#/definitions/measInfoIdString" + } + ] + }, + "measTypes": { + "oneOf": [ + { + "$ref": "#/definitions/measTypesInteger" + }, + { + "$ref": "#/definitions/measTypesString" + } + ] + }, + "measValuesList": { + "description": "an array of measurement values", + "type": "array", + "items": { + "$ref": "#/definitions/measValues" + } + } + }, + "additionalProperties": false, + "required": [ + "measTypes", + "measValuesList" + ] + }, + "measInfoIdInteger": { + "description": "integer measurement group identifier", + "type": "object", + "properties": { + "iMeasInfoId": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "iMeasInfoId" + ] + }, + "measInfoIdString": { + "description": "string measurement group identifier", + "type": "object", + "properties": { + "sMeasInfoId": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "sMeasInfoId" + ] + }, + "measResultInteger": { + "description": "integer 3GPP PM measurement result", + "type": "object", + "properties": { + "p": { + "description": "integer reference to the counter", + "type": "integer" + }, + "iValue": { + "description": "integer counter value", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "p", + "iValue" + ] + }, + "measResultNull": { + "description": "null 3GPP PM measurement result", + "type": "object", + "properties": { + "p": { + "description": "integer reference to the counter", + "type": "integer" + }, + "isNull": { + "description": "true if the counter has no value", + "type": "string", + "enum": [ + "true", + "false" + ] + } + }, + "additionalProperties": false, + "required": [ + "p", + "isNull" + ] + }, + "measResultNumber": { + "description": "number 3GPP PM measurement result", + "type": "object", + "properties": { + "p": { + "description": "integer reference to the counter", + "type": "integer" + }, + "rValue": { + "description": "numeric counter value", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "p", + "rValue" + ] + }, + "measResultString": { + "description": "string 3GPP PM measurement result", + "type": "object", + "properties": { + "p": { + "description": "integer reference to the counter", + "type": "integer" + }, + "sValue": { + "description": "string counter value", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "p", + "sValue" + ] + }, + "measTypesInteger": { + "description": "object containing an array of integer measurement identifiers associated with the measurement results", + "type": "object", + "properties": { + "iMeasTypesList": { + "type": "array", + "items": { + "type": "integer" + } + } + }, + "additionalProperties": false, + "required": [ + "iMeasTypesList" + ] + }, + "measTypesString": { + "description": "object containing an array of string measurement identifiers associated with the measurement results", + "type": "object", + "properties": { + "sMeasTypesList": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "sMeasTypesList" + ] + }, + "measurementFields": { + "description": "measurement fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "additionalMeasurements": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "additionalObjects": { + "$ref": "#/definitions/arrayOfJsonObject" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or xNF over the measurementInterval", + "type": "integer" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF", + "type": "integer" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "diskUsageArray": { + "description": "usage of an array of disks", + "type": "array", + "items": { + "$ref": "#/definitions/diskUsage" + } + }, + "featureUsageArray": { + "$ref": "#/definitions/hashMap" + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the xNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "hugePagesArray": { + "description": "array of metrics on hugepPages", + "type": "array", + "items": { + "$ref": "#/definitions/hugePages" + } + }, + "ipmi": { + "$ref": "#/definitions/ipmi" + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "loadArray": { + "description": "array of system load metrics", + "type": "array", + "items": { + "$ref": "#/definitions/load" + } + }, + "machineCheckExceptionArray": { + "description": "array of machine check exceptions", + "type": "array", + "items": { + "$ref": "#/definitions/machineCheckException" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementFieldsVersion": { + "description": "version of the measurementFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "memoryUsageArray": { + "description": "memory usage of an array of VMs", + "type": "array", + "items": { + "$ref": "#/definitions/memoryUsage" + } + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "integer" + }, + "requestRate": { + "description": "peak rate of service requests per second to the xNF over the measurementInterval", + "type": "number" + }, + "nfcScalingMetric": { + "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC", + "type": "integer" + }, + "nicPerformanceArray": { + "description": "usage of an array of network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/nicPerformance" + } + }, + "processStatsArray": { + "description": "array of metrics on system processes", + "type": "array", + "items": { + "$ref": "#/definitions/processStats" + } + } + }, + "additionalProperties": false, + "required": [ + "measurementInterval", + "measurementFieldsVersion" + ] + }, + "measValues": { + "description": "3GPP measurement values", + "type": "object", + "properties": { + "measObjAddlFlds": { + "$ref": "#/definitions/hashMap" + }, + "measObjInstId": { + "description": "monitored object local distinguished name per 3GPP TS 32.300 and 3GPP TS 32.432", + "type": "string" + }, + "measResults": { + "description": "array of results", + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#/definitions/measResultInteger" + }, + { + "$ref": "#/definitions/measResultNull" + }, + { + "$ref": "#/definitions/measResultNumber" + }, + { + "$ref": "#/definitions/measResultString" + } + ] + } + }, + "suspectFlag": { + "description": "indicates if the values are suspect", + "type": "string", + "enum": [ + "true", + "false" + ] + } + }, + "additionalProperties": false, + "required": [ + "measObjInstId", + "measResults" + ] + }, + "memoryUsage": { + "description": "memory usage of an identified virtual machine", + "type": "object", + "properties": { + "memoryBuffered": { + "description": "kibibytes of temporary storage for raw disk blocks", + "type": "number" + }, + "memoryCached": { + "description": "kibibytes of memory used for cache", + "type": "number" + }, + "memoryConfigured": { + "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running", + "type": "number" + }, + "memoryDemand": { + "description": "host demand in kibibytes", + "type": "number" + }, + "memoryFree": { + "description": "kibibytes of physical RAM left unused by the system", + "type": "number" + }, + "memoryLatencyAvg": { + "description": "Percentage of time the VM is waiting to access swapped or compressed memory", + "type": "number" + }, + "memorySharedAvg": { + "description": "shared memory in kilobytes", + "type": "number" + }, + "memorySlabRecl": { + "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", + "type": "number" + }, + "memorySlabUnrecl": { + "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", + "type": "number" + }, + "memorySwapInAvg": { + "description": "Amount of memory swapped-in from host cache in kibibytes", + "type": "number" + }, + "memorySwapInRateAvg": { + "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second", + "type": "number" + }, + "memorySwapOutAvg": { + "description": "Amount of memory swapped-out to host cache in kibibytes", + "type": "number" + }, + "memorySwapOutRateAvg": { + "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second", + "type": "number" + }, + "memorySwapUsedAvg": { + "description": "space used for caching swapped pages in the host cache in kibibytes", + "type": "number" + }, + "memoryUsed": { + "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", + "type": "number" + }, + "percentMemoryUsage": { + "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the memory metrics", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "memoryFree", + "memoryUsed", + "vmIdentifier" + ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { + "$ref": "#/definitions/gtpPerFlowMetrics" + }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "integer" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "flowDirection", + "gtpPerFlowMetrics", + "ipProtocolType", + "ipVersion", + "mobileFlowFieldsVersion", + "otherEndpointIpAddress", + "otherEndpointPort", + "reportingEndpointIpAddr", + "reportingEndpointPort" + ] + }, + "namedHashMap": { + "description": "a hashMap which is associated with and described by a name", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + } + }, + "additionalProperties": false, + "required": [ + "name", + "hashMap" + ] + }, + "nicPerformance": { + "description": "describes the performance and errors of an identified network interface card", + "type": "object", + "properties": { + "administrativeState": { + "description": "administrative state", + "type": "string", + "enum": [ + "inService", + "outOfService" + ] + }, + "nicIdentifier": { + "description": "nic identification", + "type": "string" + }, + "operationalState": { + "description": "operational state", + "type": "string", + "enum": [ + "inService", + "outOfService" + ] + }, + "receivedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedBroadcastPacketsDelta": { + "description": "Count of broadcast packets received within the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsDelta": { + "description": "Count of discarded packets received within the measurement interval", + "type": "number" + }, + "receivedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedErrorPacketsDelta": { + "description": "Count of error packets received within the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsDelta": { + "description": "Count of multicast packets received within the measurement interval", + "type": "number" + }, + "receivedOctetsAccumulated": { + "description": "Cumulative count of octets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedOctetsDelta": { + "description": "Count of octets received within the measurement interval", + "type": "number" + }, + "receivedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedPercentDiscard": { + "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "receivedPercentError": { + "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "receivedTotalPacketsDelta": { + "description": "Count of all packets received within the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsDelta": { + "description": "Count of unicast packets received within the measurement interval", + "type": "number" + }, + "receivedUtilization": { + "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "speed": { + "description": "Speed configured in mbps", + "type": "number" + }, + "transmittedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsDelta": { + "description": "Count of broadcast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsDelta": { + "description": "Count of discarded packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsDelta": { + "description": "Count of error packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsDelta": { + "description": "Count of multicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedOctetsAccumulated": { + "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedOctetsDelta": { + "description": "Count of octets transmitted within the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsDelta": { + "description": "Count of all packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsDelta": { + "description": "Count of unicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedPercentDiscard": { + "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedPercentError": { + "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedUtilization": { + "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "valuesAreSuspect": { + "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", + "type": "string", + "enum": [ + "true", + "false" + ] + } + }, + "additionalProperties": false, + "required": [ + "nicIdentifier", + "valuesAreSuspect" + ] + }, + "notificationFields": { + "description": "notification fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "arrayOfNamedHashMap": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "changeContact": { + "description": "identifier for a contact related to the change", + "type": "string" + }, + "changeIdentifier": { + "description": "system or session identifier associated with the change", + "type": "string" + }, + "changeType": { + "description": "describes what has changed for the entity", + "type": "string" + }, + "newState": { + "description": "new state of the entity", + "type": "string" + }, + "oldState": { + "description": "previous state of the entity", + "type": "string" + }, + "notificationFieldsVersion": { + "description": "version of the notificationFields block", + "type": "string", + "enum": [ + "2.0" + ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "changeIdentifier", + "changeType", + "notificationFieldsVersion" + ] + }, + "otherFields": { + "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "arrayOfNamedHashMap": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + }, + "jsonObjects": { + "$ref": "#/definitions/arrayOfJsonObject" + }, + "otherFieldsVersion": { + "description": "version of the otherFields block", + "type": "string", + "enum": [ + "3.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "otherFieldsVersion" + ] + }, + "perf3gppFields": { + "description": "fields for 3GPP PM format events, based on 3GPP TS 28.550, belonging to the 'perf3gpp' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "eventAddlFields": { + "$ref": "#/definitions/hashMap" + }, + "measDataCollection": { + "$ref": "#/definitions/measDataCollection" + }, + "perf3gppFieldsVersion": { + "description": "version of the perf3gppFields block", + "type": "string", + "enum": [ + "1.0", + "1.0.1" + ] + } + }, + "additionalProperties": false, + "required": [ + "measDataCollection", + "perf3gppFieldsVersion" + ] + }, + "pnfRegistrationFields": { + "description": "hardware device registration fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "lastServiceDate": { + "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017", + "type": "string" + }, + "macAddress": { + "description": "MAC address of OAM interface of the unit", + "type": "string" + }, + "manufactureDate": { + "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016", + "type": "string" + }, + "modelNumber": { + "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model", + "type": "string" + }, + "oamV4IpAddress": { + "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF", + "type": "string", + "format":"ipv4" + }, + "oamV6IpAddress": { + "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF", + "type": "string", + "format":"ipv6" + }, + "pnfRegistrationFieldsVersion": { + "description": "version of the pnfRegistrationFields block", + "type": "string", + "enum": [ + "2.0", + "2.1" + ] + }, + "serialNumber": { + "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3", + "type": "string" + }, + "softwareVersion": { + "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201", + "type": "string" + }, + "unitFamily": { + "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU", + "type": "string" + }, + "unitType": { + "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale", + "type": "string" + }, + "vendorName": { + "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "pnfRegistrationFieldsVersion" + ] + }, + "processorDimmAggregateThermalMargin": { + "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics", + "type": "object", + "properties": { + "processorDimmAggregateThermalMarginIdentifier": { + "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module", + "type": "string" + }, + "thermalMargin": { + "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "processorDimmAggregateThermalMarginIdentifier", + "thermalMargin" + ] + }, + "processStats": { + "description": "metrics on system processes", + "type": "object", + "properties": { + "forkRate": { + "description": "the number of threads created since the last reboot", + "type": "number" + }, + "processIdentifier": { + "description": "processIdentifier", + "type": "string" + }, + "psStateBlocked": { + "description": "the number of processes in a blocked state", + "type": "number" + }, + "psStatePaging": { + "description": "the number of processes in a paging state", + "type": "number" + }, + "psStateRunning": { + "description": "the number of processes in a running state", + "type": "number" + }, + "psStateSleeping": { + "description": "the number of processes in a sleeping state", + "type": "number" + }, + "psStateStopped": { + "description": "the number of processes in a stopped state", + "type": "number" + }, + "psStateZombie": { + "description": "the number of processes in a zombie state", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "processIdentifier" + ] + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "messageId", + "text" + ] + }, + "sipSignalingFields": { + "description": "sip signaling fields", + "type": "object", + "properties": { + "additionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "compressedSip": { + "description": "the full SIP request/response including headers and bodies", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "localIpAddress": { + "description": "IP address on xNF", + "type": "string" + }, + "localPort": { + "description": "port on xNF", + "type": "string" + }, + "remoteIpAddress": { + "description": "IP address of peer endpoint", + "type": "string" + }, + "remotePort": { + "description": "port of peer endpoint", + "type": "string" + }, + "sipSignalingFieldsVersion": { + "description": "version of the sipSignalingFields block", + "type": "string", + "enum": [ + "3.0" + ] + }, + "summarySip": { + "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + } + }, + "additionalProperties": false, + "required": [ + "correlator", + "localIpAddress", + "localPort", + "remoteIpAddress", + "remotePort", + "sipSignalingFieldsVersion", + "vendorNfNameFields" + ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "newState", + "oldState", + "stateChangeFieldsVersion", + "stateInterface" + ] + }, + "stndDefinedFields": { + "description": "stndDefined fields", + "type": "object", + "properties": { + "schemaReference": { + "description": "a uri of a standards-defined JSON object schema; used to valide the stndDefinedFields.data property contents", + "type": "string", + "format": "uri" + }, + "data": { + "description": "a native standards-defined JSON notification", + "type": "object" + }, + "stndDefinedFieldsVersion": { + "description": "version of stndDefinedFields block", + "type": "string", + "enum": [ + "1.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "data", + "stndDefinedFieldsVersion" + ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "integer" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogMsgHost": { + "description": "hostname parsed from non-VES syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "integer" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string", + "enum": [ + "Alert", + "Critical", + "Debug", + "Emergency", + "Error", + "Info", + "Notice", + "Warning" + ] + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogTs": { + "description": "timestamp parsed from non-VES syslog message", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "eventSourceType", + "syslogFieldsVersion", + "syslogMsg", + "syslogTag" + ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { + "type": "string" + } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "string", + "enum": [ + "4.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp", + "thresholdCrossingFieldsVersion" + ] + }, + "vendorNfNameFields": { + "description": "provides vendor, nf and nfModule identifying information", + "type": "object", + "properties": { + "vendorName": { + "description": "network function vendor name", + "type": "string" + }, + "nfModuleName": { + "description": "name of the nfModule generating the event", + "type": "string" + }, + "nfName": { + "description": "name of the network function generating the event", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "vendorName" + ] + }, + "voiceQualityFields": { + "description": "provides statistics related to customer facing voice products", + "type": "object", + "properties": { + "additionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "calleeSideCodec": { + "description": "callee codec for the call", + "type": "string" + }, + "callerSideCodec": { + "description": "caller codec for the call", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "endOfCallVqmSummaries": { + "$ref": "#/definitions/endOfCallVqmSummaries" + }, + "phoneNumber": { + "description": "phone number associated with the correlator", + "type": "string" + }, + "midCallRtcp": { + "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + }, + "voiceQualityFieldsVersion": { + "description": "version of the voiceQualityFields block", + "type": "string", + "enum": [ + "4.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "calleeSideCodec", + "callerSideCodec", + "correlator", + "midCallRtcp", + "vendorNfNameFields", + "voiceQualityFieldsVersion" + ] + } + } +} diff --git a/cps-ncmp-events/src/main/resources/schemas/async/data-operation-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json index c2915187c7..c2915187c7 100644 --- a/cps-ncmp-events/src/main/resources/schemas/async/data-operation-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json diff --git a/cps-ncmp-events/src/main/resources/schemas/dmi-async-request-response-event-schema-v1.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/dmi-async-request-response-event-schema-v1.json index 2340a4bc4c..4db07bec95 100644 --- a/cps-ncmp-events/src/main/resources/schemas/dmi-async-request-response-event-schema-v1.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/dmi-async-request-response-event-schema-v1.json @@ -6,6 +6,7 @@ "DmiAsyncRequestResponseEvent": { "description": "The payload for NCMP async request response event.", "type": "object", + "javaType" : "org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent", "properties": { "eventId": { "description": "The unique id identifying the event generated by DMI.", diff --git a/docs/schemas/ncmp-async-request-response-event-schema-v1.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/ncmp-async-request-response-event-schema-v1.json index 51c2cf4d40..32b7becd05 100644 --- a/docs/schemas/ncmp-async-request-response-event-schema-v1.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/async-m2m/ncmp-async-request-response-event-schema-v1.json @@ -6,6 +6,7 @@ "NcmpAsyncRequestResponseEvent": { "description": "The payload for CPS async request response event.", "type": "object", + "javaType" : "org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent", "properties": { "eventId": { "description": "The unique id identifying the event generated by DMI.", diff --git a/cps-ncmp-events/src/main/resources/schemas/ncmpdataavc/avc-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/avc-event-schema-1.0.0.json index d24ec2c737..5094e1808f 100644 --- a/cps-ncmp-events/src/main/resources/schemas/ncmpdataavc/avc-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/avc-event-schema-1.0.0.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "urn:cps:org.onap.cps.ncmp.events:avc-event-schema:1.0.0", + "$id": "urn:cps:org.onap.cps.ncmp.events.inventory-event-schema:1.0.0", "$ref": "#/definitions/AvcEvent", "definitions": { "AvcEvent": { diff --git a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/dmi-in-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/dmi-in-event-schema-1.0.0.json index 93ec216e3d..fb86c5ddb4 100644 --- a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/dmi-in-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/dmi-in-event-schema-1.0.0.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-dmi-in-event-schema:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/DmiInEvent", "definitions": { "DmiInEvent": { diff --git a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/dmi-out-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/dmi-out-event-schema-1.0.0.json index 0910de1529..b47a70cd90 100644 --- a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/dmi-out-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/dmi-out-event-schema-1.0.0.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-dmi-out-event-schema:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/DmiOutEvent", "definitions": { "DmiOutEvent": { diff --git a/docs/schemas/ncmp-in-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/ncmp-in-event-schema-1.0.0.json index f8b6c2e680..d4e8519956 100644 --- a/docs/schemas/ncmp-in-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/ncmp-in-event-schema-1.0.0.json @@ -1,5 +1,5 @@ { - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-ncmp-in-event:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/NcmpInEvent", "$schema": "https://json-schema.org/draft/2019-09/schema", "definitions": { diff --git a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/ncmp-out-event-schema-1.0.0.json b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/ncmp-out-event-schema-1.0.0.json index 11dc4e1114..f1dabc17a3 100644 --- a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/ncmp-out-event-schema-1.0.0.json +++ b/cps-ncmp-events/src/main/resources/schemas/ncmp/avc/ncmp-out-event-schema-1.0.0.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-ncmp-out-event-schema:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/NcmpOutEvent", "definitions": { "NcmpOutEvent": { @@ -30,17 +30,17 @@ "acceptedTargets": { "type": "object", "existingJavaType": "java.util.Collection<String>", - "description": "Unique Collection of accepted targets" + "description": "Collection of accepted targets" }, "rejectedTargets": { "type": "object", "existingJavaType": "java.util.Collection<String>", - "description": "Unique Collection of rejected targets" + "description": "Collection of rejected targets" }, "pendingTargets": { "type": "object", "existingJavaType": "java.util.Collection<String>", - "description": "Unique Collection of pending targets" + "description": "Collection of pending targets" } }, "required": [ diff --git a/cps-ncmp-rest-stub/cps-ncmp-rest-stub-app/pom.xml b/cps-ncmp-rest-stub/cps-ncmp-rest-stub-app/pom.xml index 690ec01b8f..754b24476d 100644 --- a/cps-ncmp-rest-stub/cps-ncmp-rest-stub-app/pom.xml +++ b/cps-ncmp-rest-stub/cps-ncmp-rest-stub-app/pom.xml @@ -22,7 +22,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-ncmp-rest-stub</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> </parent> <artifactId>cps-ncmp-rest-stub-app</artifactId> diff --git a/cps-ncmp-rest-stub/cps-ncmp-rest-stub-service/pom.xml b/cps-ncmp-rest-stub/cps-ncmp-rest-stub-service/pom.xml index bba80cdcf1..712f1d922a 100644 --- a/cps-ncmp-rest-stub/cps-ncmp-rest-stub-service/pom.xml +++ b/cps-ncmp-rest-stub/cps-ncmp-rest-stub-service/pom.xml @@ -21,7 +21,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-ncmp-rest-stub</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> </parent> <artifactId>cps-ncmp-rest-stub-service</artifactId> diff --git a/cps-ncmp-rest-stub/pom.xml b/cps-ncmp-rest-stub/pom.xml index 056e52a4f0..e9f503ca1a 100644 --- a/cps-ncmp-rest-stub/pom.xml +++ b/cps-ncmp-rest-stub/pom.xml @@ -22,7 +22,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-ncmp-rest/docs/openapi/components.yaml b/cps-ncmp-rest/docs/openapi/components.yaml index a449c633c8..e4283d9404 100644 --- a/cps-ncmp-rest/docs/openapi/components.yaml +++ b/cps-ncmp-rest/docs/openapi/components.yaml @@ -364,7 +364,7 @@ components: example: '(fields=NRCellDU/attributes/cellLocalId)' resourceIdentifier: type: string - example: 'NRCellDU/attributes/cellLocalId' + example: '/ManagedElement=NRNode1/GNBDUFunction=1' targetIds: type: array items: @@ -378,10 +378,12 @@ components: description: Sample request body for GNBDUFunction configuration value: gnbdu3gpp:GNBDUFunction: - gNBId: 12345 - gNBIdLength: 32 - gNBDUId: 67890 - gNBDUName: "DU-1" + id: 12345 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 67890 + gNBDUName: "DU-1" rimRSReportConf: reportIndicator: "enabled" reportInterval: 1000 @@ -407,10 +409,12 @@ components: target: /gnbdu3gpp:GNBDUFunction value: gnbdu3gpp:GNBDUFunction: - gNBId: 54321 - gNBIdLength: 32 - gNBDUId: 98765 - gNBDUName: "DU-2" + id: 54321 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 98765 + gNBDUName: "DU-2" rimRSReportConf: reportIndicator: "disabled" reportInterval: 2000 @@ -429,10 +433,12 @@ components: description: Sample response with GNBDUFunction configuration value: gnbdu3gpp:GNBDUFunction: - gNBId: 12345 - gNBIdLength: 32 - gNBDUId: 67890 - gNBDUName: "DU-1" + id: 12345 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 67890 + gNBDUName: "DU-1" rimRSReportConf: reportIndicator: "enabled" reportInterval: 1000 @@ -568,25 +574,16 @@ components: cpsPathInQuery: name: cps-path in: query - description: | - The `cps-path` parameter allows referencing elements in the GNBDUFunctionConfig data model. - For more details on cps path, please refer to: - [CPS Path Documentation](https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html). - Example paths: - - Root GNBDUFunction: `/GNBDUFunction` - - Specific gNB ID: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Config: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + description: For more details on cps path, please refer https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html required: false schema: type: string - default: "/GNBDUFunction" + default: / examples: - GNBDUFunction Root: - value: "//GNBDUFunction" - Specific gNB ID: - value: "//GNBDUFunction[@gNBId='1001']" - RIM-RS Reporting Config: - value: "//GNBDUFunction[@gNBId='1001']/rimRSReportConf" + container cps path: + value: '//GNBDUFunction' + list attributes cps path: + value: "//GNBDUFunction[@id='1001']" dmiPluginIdentifierInQuery: name: dmi-plugin-identifier in: query @@ -601,25 +598,14 @@ components: description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` required: true schema: type: string examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: \GNBDUFunction[@gNBId='1001'] - sample 3: - value: - resourceIdentifier: \GNBDUFunction[@gNBId='1001']\rimRSReportConf - sample 4: - value: - resourceIdentifier: parent=GNBDUFunction,child=gNBId:1001 + resourceIdentifier: '/ManagedElement=node1/GNBDUFunction=1' optionsParamInQuery: name: options in: query @@ -627,8 +613,8 @@ components: The `options` parameter specifies additional query options. It is mandatory to wrap key(s)=value(s) in parentheses `()`. Examples for GNBDUFunctionConfig queries: - Limit depth of returned sub-tree: `(depth=2)` - - Select specific fields: `(fields=gNBId,gNBDUName)` - - Combine options: `(depth=3,fields=gNBId,gNBDUName)` + - Select specific fields: `(fields=attributes(gNBId;gNBDUName))` + - Combine options: `(depth=3,fields=attributes(gNBId;gNBDUName))` required: false schema: type: string @@ -638,10 +624,10 @@ components: options: (depth=2) Select Specific Fields: value: - options: (fields=gNBId,gNBDUName) + options: (fields=attributes(gNBId;gNBDUName)) Combine Depth and Fields: value: - options: (depth=3,fields=gNBId,gNBDUName) + options: (depth=3,fields=attributes(gNBId;gNBDUName)) topicParamInQuery: name: topic in: query diff --git a/cps-ncmp-rest/docs/openapi/ncmp.yml b/cps-ncmp-rest/docs/openapi/ncmp.yml index 15b8b37231..6dd91136bb 100755 --- a/cps-ncmp-rest/docs/openapi/ncmp.yml +++ b/cps-ncmp-rest/docs/openapi/ncmp.yml @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2021-2024 Nordix Foundation +# Copyright (C) 2021-2024 OpenInfra Foundation Europe. All rights reserved. # Modifications Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021-2022 Bell Canada # ================================================================================ @@ -86,6 +86,8 @@ resourceDataForCmHandle: $ref: 'components.yaml#/components/responses/BadRequest' 403: $ref: 'components.yaml#/components/responses/Forbidden' + 409: + $ref: 'components.yaml#/components/responses/Conflict' 500: $ref: 'components.yaml#/components/responses/InternalServerError' 502: @@ -125,6 +127,8 @@ resourceDataForCmHandle: $ref: 'components.yaml#/components/responses/BadRequest' 403: $ref: 'components.yaml#/components/responses/Forbidden' + 409: + $ref: 'components.yaml#/components/responses/Conflict' 500: $ref: 'components.yaml#/components/responses/InternalServerError' 502: @@ -158,6 +162,8 @@ resourceDataForCmHandle: $ref: 'components.yaml#/components/responses/BadRequest' 403: $ref: 'components.yaml#/components/responses/Forbidden' + 409: + $ref: 'components.yaml#/components/responses/Conflict' 500: $ref: 'components.yaml#/components/responses/InternalServerError' 502: @@ -184,6 +190,8 @@ resourceDataForCmHandle: $ref: 'components.yaml#/components/responses/Forbidden' 404: $ref: 'components.yaml#/components/responses/NotFound' + 409: + $ref: 'components.yaml#/components/responses/Conflict' 500: $ref: 'components.yaml#/components/responses/InternalServerError' 502: diff --git a/cps-ncmp-rest/docs/openapi/openapi-inventory.yml b/cps-ncmp-rest/docs/openapi/openapi-inventory.yml index b52a7fbfa7..7658075043 100755 --- a/cps-ncmp-rest/docs/openapi/openapi-inventory.yml +++ b/cps-ncmp-rest/docs/openapi/openapi-inventory.yml @@ -1,7 +1,7 @@ -# ============LICENSE_START======================================================= +# ============LICENSE_START============================================================== # Copyright (C) 2021 Bell Canada -# Modifications Copyright (C) 2022-2024 Nordix Foundation -# ================================================================================ +# Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. +# ======================================================================================= # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -15,20 +15,15 @@ # limitations under the License. # # SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END========================================================= +# ============LICENSE_END================================================================ openapi: 3.0.3 info: title: NCMP Inventory API description: NCMP Inventory API - version: "3.6.0" + version: "3.6.2" servers: - url: /ncmpInventory -components: - securitySchemes: - basicAuth: - type: http - scheme: basic paths: /v1/ch: $ref: 'ncmp-inventory.yml#/updateDmiRegistration' @@ -38,6 +33,3 @@ paths: /v1/ch/searches: $ref: 'ncmp-inventory.yml#/searchCmHandleIds' - -security: - - basicAuth: [] diff --git a/cps-ncmp-rest/docs/openapi/openapi.yml b/cps-ncmp-rest/docs/openapi/openapi.yml index 3faf3c181e..1f69cc681f 100755 --- a/cps-ncmp-rest/docs/openapi/openapi.yml +++ b/cps-ncmp-rest/docs/openapi/openapi.yml @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2021-2024 Nordix Foundation +# Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. # Modifications Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021 Bell Canada # ================================================================================ @@ -22,14 +22,9 @@ openapi: 3.0.3 info: title: NCMP to CPS Proxy API description: NCMP to CPS Proxy API - version: "3.6.0" + version: "3.6.2" servers: - url: /ncmp -components: - securitySchemes: - basicAuth: - type: http - scheme: basic paths: /v1/ch/{cm-handle}/data/ds/{datastore-name}: $ref: 'ncmp.yml#/resourceDataForCmHandle' @@ -63,5 +58,3 @@ paths: /v1/ch/{cm-handle}/data-sync: $ref: 'ncmp.yml#/setDataSyncEnabledFlag' -security: - - basicAuth: [] diff --git a/cps-ncmp-rest/pom.xml b/cps-ncmp-rest/pom.xml index 4e2c48dd6f..4084f9b21b 100644 --- a/cps-ncmp-rest/pom.xml +++ b/cps-ncmp-rest/pom.xml @@ -27,7 +27,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java new file mode 100644 index 0000000000..d259d91796 --- /dev/null +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java @@ -0,0 +1,79 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.controller; + +import io.swagger.v3.oas.annotations.Hidden; +import java.util.List; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.api.datajobs.DataJobService; +import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata; +import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest; +import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest; +import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Controller responsible for handling data job write operations. + * This class exposes an API endpoint that accepts a write request for a data job and processes it. + */ +@Slf4j +@RestController +@RequestMapping("/do-not-use/dataJobs") +@RequiredArgsConstructor +public class DataJobControllerForTest { + + private final DataJobService dataJobService; + + /** + * Handles POST requests to write a data job. This endpoint is unsupported and intended for testing purposes only. + * This internal endpoint processes a data job write request by extracting necessary metadata and data + * from the request body and delegating the operation to the {@link DataJobService}. + * <p><b>Note:</b> The {@link DataJobRequest} parameter is created and used for testing purposes only. + * In a production environment, data job write operations are not triggered through internal workflows.</p> + * + * @param authorization The optional authorization token sent in the request header. + * @param dataJobId The unique identifier for the data job, extracted from the URL path. + * @param dataJobRequest The request payload containing metadata and data for the data job write operation. + * @return A {@link ResponseEntity} containing a list of {@link SubJobWriteResponse} objects representing the + * status of each sub-job within the data job, or an error response with an appropriate HTTP status code. + */ + @PostMapping("/{dataJobId}/write") + @Hidden + public ResponseEntity<List<SubJobWriteResponse>> writeDataJob(@RequestHeader(value = "Authorization", + required = false) final String authorization, + @PathVariable("dataJobId") final String dataJobId, + @RequestBody final DataJobRequest dataJobRequest) { + log.info("Internal API: writeDataJob invoked for {}", dataJobId); + final DataJobMetadata dataJobMetadata = dataJobRequest.dataJobMetadata(); + final DataJobWriteRequest dataJobWriteRequest = dataJobRequest.dataJobWriteRequest(); + final List<SubJobWriteResponse> subJobWriteResponses = dataJobService.writeDataJob(authorization, dataJobId, + dataJobMetadata, dataJobWriteRequest); + return ResponseEntity.ok(subJobWriteResponses); + } +} + diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java index 317f6b70e1..387f48acb5 100755..100644 --- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe * Modifications Copyright (C) 2021 highstreet technologies GmbH * Modifications Copyright (C) 2021-2022 Bell Canada * ================================================================================ @@ -57,6 +57,7 @@ import org.onap.cps.ncmp.rest.model.RestOutputCmHandle; import org.onap.cps.ncmp.rest.model.RestOutputCmHandleCompositeState; import org.onap.cps.ncmp.rest.model.RestOutputCmHandlePublicProperties; import org.onap.cps.ncmp.rest.util.CmHandleStateMapper; +import org.onap.cps.ncmp.rest.util.CountCmHandleSearchExecution; import org.onap.cps.ncmp.rest.util.DataOperationRequestMapper; import org.onap.cps.ncmp.rest.util.DeprecationHelper; import org.onap.cps.ncmp.rest.util.NcmpRestInputMapper; @@ -256,14 +257,15 @@ public class NetworkCmProxyController implements NetworkCmProxyApi { */ @Override @SuppressWarnings("deprecation") // mapOldConditionProperties method will be removed in Release 12 + @CountCmHandleSearchExecution(methodName = "searchCmHandles", interfaceName = "CPS-E-05", + description = "Search for cm handles within CPS-E-05 interface") public ResponseEntity<List<RestOutputCmHandle>> searchCmHandles( final CmHandleQueryParameters cmHandleQueryParameters) { final CmHandleQueryApiParameters cmHandleQueryApiParameters = deprecationHelper.mapOldConditionProperties(cmHandleQueryParameters); - final Collection<NcmpServiceCmHandle> cmHandles = networkCmProxyInventoryFacade - .executeCmHandleSearch(cmHandleQueryApiParameters); final List<RestOutputCmHandle> restOutputCmHandles = - cmHandles.stream().map(this::toRestOutputCmHandle).collect(Collectors.toList()); + networkCmProxyInventoryFacade.executeCmHandleSearch(cmHandleQueryApiParameters) + .map(this::toRestOutputCmHandle).collectList().block(); return ResponseEntity.ok(restOutputCmHandles); } @@ -276,6 +278,8 @@ public class NetworkCmProxyController implements NetworkCmProxyApi { * @return collection of cm handle ids */ @Override + @CountCmHandleSearchExecution(methodName = "searchCmHandleIds", interfaceName = "CPS-E-05", + description = "Search for cm handles within CPS-E-05 interface") public ResponseEntity<List<String>> searchCmHandleIds(final CmHandleQueryParameters cmHandleQueryParameters, final Boolean outputAlternateId) { final CmHandleQueryApiParameters cmHandleQueryApiParameters = diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyInventoryController.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyInventoryController.java index 0e27ba9355..5de8c12044 100755..100644 --- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyInventoryController.java +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyInventoryController.java @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021-2022 Bell Canada - * Modifications Copyright (C) 2022-2024 Nordix Foundation + * Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ import org.onap.cps.ncmp.rest.model.CmHandleQueryParameters; import org.onap.cps.ncmp.rest.model.CmHandlerRegistrationErrorResponse; import org.onap.cps.ncmp.rest.model.DmiPluginRegistrationErrorResponse; import org.onap.cps.ncmp.rest.model.RestDmiPluginRegistration; +import org.onap.cps.ncmp.rest.util.CountCmHandleSearchExecution; import org.onap.cps.ncmp.rest.util.NcmpRestInputMapper; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -60,6 +61,8 @@ public class NetworkCmProxyInventoryController implements NetworkCmProxyInventor * @return list of cm handle IDs */ @Override + @CountCmHandleSearchExecution(methodName = "searchCmHandleIds", interfaceName = "CPS-NCMP-I-01", + description = "Search for cm handle ids within CPS-NCMP-I-01 interface") public ResponseEntity<List<String>> searchCmHandleIds(final CmHandleQueryParameters cmHandleQueryParameters, final Boolean outputAlternateId) { final CmHandleQueryServiceParameters cmHandleQueryServiceParameters = ncmpRestInputMapper diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounter.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounter.java new file mode 100644 index 0000000000..ecd248d89f --- /dev/null +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounter.java @@ -0,0 +1,84 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.util; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.MeterRegistry; +import jakarta.validation.Valid; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.aspectj.lang.JoinPoint; +import org.aspectj.lang.annotation.Aspect; +import org.aspectj.lang.annotation.Before; +import org.onap.cps.ncmp.rest.model.CmHandleQueryParameters; +import org.onap.cps.ncmp.rest.model.ConditionProperties; +import org.springframework.stereotype.Component; + +@Aspect +@Component +@RequiredArgsConstructor +@Slf4j +public class CmHandleSearchExecutionCounter { + + private static final String NO_CONDITION = "NONE"; + + private final MeterRegistry meterRegistry; + + /** + * Counts the number of invocations of the methods annotated with @CountCmHandleSearchExecution based on the search + * conditions dynamically added. If search is executed without condition then it would be tagged as NONE, otherwise + * the conditions are concatenated with _ as separator. + * + * @param joinPoint join point + * @param countCmHandleSearchExecution count the cm handle search conditions + */ + @Before("@annotation(countCmHandleSearchExecution)") + public void cmHandleSearchExecutionCounter(final JoinPoint joinPoint, + final CountCmHandleSearchExecution countCmHandleSearchExecution) { + final Object[] args = joinPoint.getArgs(); + + if (args.length == 0 || !(args[0] instanceof CmHandleQueryParameters cmHandleQueryParameters)) { + log.warn("Method {} is missing required CmHandleQueryParameters argument", joinPoint.getSignature()); + return; + } + + final String conditionTag = Optional.ofNullable(cmHandleQueryParameters.getCmHandleQueryParameters()) + .filter(conditionTypes -> !conditionTypes.isEmpty()) + .map(CmHandleSearchExecutionCounter::conditionTag) + .orElse(NO_CONDITION); + + Counter.builder("cm_handle_search_invocations") + .tag("method", countCmHandleSearchExecution.methodName()) + .tag("cps-interface", countCmHandleSearchExecution.interfaceName()) + .tag("conditions", conditionTag) + .description("Number of invocations of search methods based on condition types") + .register(meterRegistry) + .increment(); + } + + private static String conditionTag(final List<@Valid ConditionProperties> conditionTypes) { + return conditionTypes.stream().map(ConditionProperties::getConditionName).sorted() + .collect(Collectors.joining("_")); + } +} diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CountCmHandleSearchExecution.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CountCmHandleSearchExecution.java new file mode 100644 index 0000000000..d04a2ea515 --- /dev/null +++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/util/CountCmHandleSearchExecution.java @@ -0,0 +1,52 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.util; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface CountCmHandleSearchExecution { + + /** + * Capture the method name for which the number of invocations needs to be tracked. + * + * @return the search method name + */ + String methodName(); + + /** + * Capture the CPS and NCMP interface name of the called method. + * + * @return the CPS and NCMP interface name + */ + String interfaceName(); + + /** + * Capture the description to facilitate metric scraping. + * + * @return the description of the metric. + */ + String description(); +} diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy new file mode 100644 index 0000000000..6fc4a699e5 --- /dev/null +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy @@ -0,0 +1,50 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.controller + +import org.onap.cps.ncmp.api.datajobs.DataJobService +import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata +import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest +import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest +import org.onap.cps.ncmp.api.datajobs.models.WriteOperation +import org.springframework.http.HttpStatus +import spock.lang.Specification + +class DataJobControllerForTestSpec extends Specification { + + DataJobService mockDataJobService = Mock() + + def objectUnderTest = new DataJobControllerForTest(mockDataJobService) + + def 'Write Data Job request'() { + given: 'a valid datajob write request' + def dataJobMetadata = new DataJobMetadata('some destination', 'some accept type', 'some content type') + def writeOperations = [ new WriteOperation('/path/to/node', 'create', 'op123', 'value1') ] + def dataJobWriteRequest = new DataJobWriteRequest(writeOperations) + def dataJobRequest = new DataJobRequest(dataJobMetadata, dataJobWriteRequest) + when: 'write data job is called' + def result = objectUnderTest.writeDataJob('my authorization', 'my job', dataJobRequest) + then: 'response is 200 OK' + assert result.statusCode == HttpStatus.OK + and: 'the service method is called once with expected parameters' + 1 * mockDataJobService.writeDataJob('my authorization', 'my job', dataJobMetadata, dataJobWriteRequest) + } +} diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy index c3aca5a99b..e934530d6f 100644 --- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy @@ -2,7 +2,7 @@ * ============LICENSE_START======================================================= * Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021 highstreet technologies GmbH - * Modifications Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021-2022 Bell Canada. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +31,7 @@ import com.fasterxml.jackson.databind.ObjectMapper import groovy.json.JsonSlurper import org.mapstruct.factory.Mappers import org.onap.cps.TestUtils -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.impl.NetworkCmProxyInventoryFacadeImpl import org.onap.cps.ncmp.api.inventory.models.CompositeState import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle @@ -59,6 +59,7 @@ import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.test.web.servlet.MockMvc +import reactor.core.publisher.Flux import reactor.core.publisher.Mono import spock.lang.Shared import spock.lang.Specification @@ -133,7 +134,7 @@ class NetworkCmProxyControllerSpec extends Specification { } def cleanup() { - ((Logger) LoggerFactory.getLogger(EventsPublisher.class)).detachAndStopAllAppenders() + ((Logger) LoggerFactory.getLogger(EventsProducer.class)).detachAndStopAllAppenders() } def 'Get Resource Data from pass-through operational.'() { @@ -275,7 +276,7 @@ class NetworkCmProxyControllerSpec extends Specification { cmHandle2.alternateId = 'someAlternateId' cmHandle2.moduleSetTag = 'someModuleSetTag' cmHandle2.dataProducerIdentifier = 'someDataProducerIdentifier' - mockNetworkCmProxyInventoryFacade.executeCmHandleSearch(_) >> [cmHandle1, cmHandle2] + mockNetworkCmProxyInventoryFacade.executeCmHandleSearch(_) >> Flux.fromIterable([cmHandle1, cmHandle2]) when: 'the searches api is invoked' def response = mvc.perform(post(searchesEndpoint).contentType(MediaType.APPLICATION_JSON).content(jsonString)).andReturn().response then: 'response status returns OK' @@ -352,7 +353,7 @@ class NetworkCmProxyControllerSpec extends Specification { cmHandle2.cmHandleId = 'ch-2' cmHandle2.publicProperties = [color: 'green'] cmHandle2.currentTrustLevel = TrustLevel.NONE - mockNetworkCmProxyInventoryFacade.executeCmHandleSearch(_) >> [cmHandle1, cmHandle2] + mockNetworkCmProxyInventoryFacade.executeCmHandleSearch(_) >> Flux.fromIterable([cmHandle1, cmHandle2]) when: 'the searches api is invoked' def response = mvc.perform(post(searchesEndpoint).contentType(MediaType.APPLICATION_JSON).content(jsonString)).andReturn().response then: 'an empty cm handle identifier is returned' diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy index aad04a18ae..3a9a0bb09c 100644 --- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 highstreet technologies GmbH - * Modifications Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -102,6 +102,9 @@ class NetworkCmProxyRestExceptionHandlerSpec extends Specification { @SpringBean NcmpPassthroughResourceRequestHandler StubbedNcmpPassthroughResourceRequestHandler = Stub() + @SpringBean + DataJobControllerForTest stubbedDataJobControllerForTest = Stub() + @Value('${rest.api.ncmp-base-path}') def basePathNcmp diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounterSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounterSpec.groovy new file mode 100644 index 0000000000..bdadfc8689 --- /dev/null +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/util/CmHandleSearchExecutionCounterSpec.groovy @@ -0,0 +1,128 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.rest.util + +import io.micrometer.core.instrument.simple.SimpleMeterRegistry +import org.aspectj.lang.JoinPoint +import org.aspectj.lang.Signature +import org.onap.cps.ncmp.rest.model.CmHandleQueryParameters +import org.onap.cps.ncmp.rest.model.ConditionProperties +import spock.lang.Specification + +class CmHandleSearchExecutionCounterSpec extends Specification { + + def meterRegistry = new SimpleMeterRegistry() + def mockJoinPoint = Mock(JoinPoint) + def mockCountCmHandleSearchExecutionAnnotation = Mock(CountCmHandleSearchExecution) + def mockSignature = Mock(Signature) + + def objectUnderTest = new CmHandleSearchExecutionCounter(meterRegistry) + + def setup() { + mockCountCmHandleSearchExecutionAnnotation.methodName() >> 'testMethod' + mockCountCmHandleSearchExecutionAnnotation.interfaceName() >> 'testInterface' + mockSignature.toString() >> 'testSignature' + mockJoinPoint.getSignature() >> mockSignature + } + + def 'should track search with conditions'() { + given: 'CmHandleQueryParameters with conditions' + def cmHandleQueryParameters = new CmHandleQueryParameters() + def condition1 = new ConditionProperties(conditionName: 'condition1') + def condition2 = new ConditionProperties(conditionName: 'condition2') + cmHandleQueryParameters.addCmHandleQueryParametersItem(condition1).addCmHandleQueryParametersItem(condition2) + and: 'joinPoint returns the parameters' + mockJoinPoint.getArgs() >> [cmHandleQueryParameters] + when: 'the annotated method is called' + objectUnderTest.cmHandleSearchExecutionCounter(mockJoinPoint, mockCountCmHandleSearchExecutionAnnotation) + then: 'the counter should be registered' + def counter = findCounter('cm_handle_search_invocations', [ + 'method' : 'testMethod', + 'cps-interface': 'testInterface', + 'conditions' : 'condition1_condition2' + ]) + and: 'is incremented once' + assert counter.count() == 1 + } + + def 'should track search with no conditions as NONE'() { + given: 'empty CmHandleQueryParameters' + def cmHandleQueryParameters = new CmHandleQueryParameters() + and: 'joinPoint returns the parameters' + mockJoinPoint.getArgs() >> [cmHandleQueryParameters] + when: 'the annotated method is called' + objectUnderTest.cmHandleSearchExecutionCounter(mockJoinPoint, mockCountCmHandleSearchExecutionAnnotation) + then: 'the counter should be registered with NONE tag' + def counter = findCounter('cm_handle_search_invocations', [ + method : 'testMethod', + 'cps-interface': 'testInterface', + conditions : 'NONE' + ]) + and: 'is incremented once' + assert counter.count() == 1 + } + + def 'should not create counter when args are empty'() { + given: 'joinPoint with empty args' + mockJoinPoint.getArgs() >> [] + when: 'the aspect method is called' + objectUnderTest.cmHandleSearchExecutionCounter(mockJoinPoint, mockCountCmHandleSearchExecutionAnnotation) + then: 'no counter should be registered' + assert meterRegistry.find('cm_handle_search_invocations').counters().isEmpty() + } + + def 'should not create counter when first arg is not CmHandleQueryParameters'() { + given: 'joinPoint with non-CmHandleQueryParameters arg' + mockJoinPoint.getArgs() >> ['not a CmHandleQueryParameters'] + when: 'the aspect method is called' + objectUnderTest.cmHandleSearchExecutionCounter(mockJoinPoint, mockCountCmHandleSearchExecutionAnnotation) + then: 'no counter should be registered' + assert meterRegistry.find('cm_handle_search_invocations').counters().isEmpty() + } + + def 'should sort condition names alphabetically'() { + given: 'CmHandleQueryParameters with unsorted conditions' + def cmHandleQueryParameters = new CmHandleQueryParameters() + def condition1 = new ConditionProperties(conditionName: 'zCondition') + def condition2 = new ConditionProperties(conditionName: 'aCondition') + cmHandleQueryParameters.addCmHandleQueryParametersItem(condition1).addCmHandleQueryParametersItem(condition2) + and: 'joinPoint returns our parameters' + mockJoinPoint.getArgs() >> [cmHandleQueryParameters] + when: 'the aspect method is called' + objectUnderTest.cmHandleSearchExecutionCounter(mockJoinPoint, mockCountCmHandleSearchExecutionAnnotation) + then: 'the counter should be registered with alphabetically sorted tags' + def counter = findCounter('cm_handle_search_invocations', [ + 'method' : 'testMethod', + 'cps-interface': 'testInterface', + 'conditions' : 'aCondition_zCondition' + ]) + and: 'counter is incremented once' + assert counter.count() == 1 + } + + def findCounter(name, tags) { + def counterSearch = meterRegistry.find(name) + tags.each { key, value -> + counterSearch = counterSearch.tag(key, value) + } + return counterSearch.counter() + } +}
\ No newline at end of file diff --git a/cps-ncmp-service/pom.xml b/cps-ncmp-service/pom.xml index eb0aed1e7c..fbc30eba04 100644 --- a/cps-ncmp-service/pom.xml +++ b/cps-ncmp-service/pom.xml @@ -27,7 +27,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java new file mode 100644 index 0000000000..fe73a601b9 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java @@ -0,0 +1,24 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.datajobs.models; + +public record DataJobRequest(DataJobMetadata dataJobMetadata, DataJobWriteRequest dataJobWriteRequest) { +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DmiWriteOperation.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DmiWriteOperation.java index 7e9ca7988b..2119f817e7 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DmiWriteOperation.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DmiWriteOperation.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation. + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,8 +20,6 @@ package org.onap.cps.ncmp.api.datajobs.models; -import java.util.Map; - /** * Describes the write data job operation to be forwarded to dmi. * @@ -32,12 +30,10 @@ import java.util.Map; * @param moduleSetTag The module set tag of the CM Handle. * @param value The value to be written depends on the type of operation. * @param operationId Unique identifier of the operation within the request. - * @param privateProperties Contains the private properties of a Cm Handle. */ public record DmiWriteOperation( String path, String op, String moduleSetTag, Object value, - String operationId, - Map<String, String> privateProperties) {} + String operationId) {} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java index 9bfb775d55..876a5e7c40 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/NetworkCmProxyInventoryFacade.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ import org.onap.cps.ncmp.api.inventory.models.CompositeState; import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration; import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistrationResponse; import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; +import reactor.core.publisher.Flux; public interface NetworkCmProxyInventoryFacade { @@ -96,9 +97,9 @@ public interface NetworkCmProxyInventoryFacade { * Retrieve cm handles with details for the given query parameters. * * @param cmHandleQueryApiParameters cm handle query parameters - * @return cm handles with details + * @return cm handle objects as a reactive stream (flux) */ - Collection<NcmpServiceCmHandle> executeCmHandleSearch(final CmHandleQueryApiParameters cmHandleQueryApiParameters); + Flux<NcmpServiceCmHandle> executeCmHandleSearch(final CmHandleQueryApiParameters cmHandleQueryApiParameters); /** * Retrieve cm handle ids for the given query parameters. diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/CmHandleStateGaugeConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/CmHandleStateGaugeConfig.java new file mode 100644 index 0000000000..88bd1333cd --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/CmHandleStateGaugeConfig.java @@ -0,0 +1,124 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.config; + +import com.hazelcast.map.IMap; +import io.micrometer.core.instrument.Gauge; +import io.micrometer.core.instrument.MeterRegistry; +import lombok.RequiredArgsConstructor; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; + +@Configuration +@RequiredArgsConstructor +@DependsOn("cmHandleStateMonitor") +public class CmHandleStateGaugeConfig { + + private static final String STATE_TAG = "state"; + private static final String CM_HANDLE_STATE_GAUGE = "cps_ncmp_inventory_cm_handles_by_state"; + private final IMap<String, Integer> cmHandlesByState; + + /** + * Register gauge metric for cm handles with state 'advised'. + * + * @param meterRegistry meter registry + * @return cm handle state gauge + */ + @Bean + @TimedCustom(name = "cps_ncmp_inventory_cm_handles_by_state{state=ADVISED}", + description = "Current number of cm handles in advised state") + public Gauge advisedCmHandles(final MeterRegistry meterRegistry) { + return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, + value -> cmHandlesByState.get("advisedCmHandlesCount")) + .tag(STATE_TAG, "ADVISED") + .description("Current number of cm handles in advised state") + .register(meterRegistry); + } + + /** + * Register gauge metric for cm handles with state 'ready'. + * + * @param meterRegistry meter registry + * @return cm handle state gauge + */ + @Bean + @TimedCustom(name = "cps_ncmp_inventory_cm_handles_by_state{state=READY}", + description = "Current number of cm handles in ready state") + public Gauge readyCmHandles(final MeterRegistry meterRegistry) { + return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, + value -> cmHandlesByState.get("readyCmHandlesCount")) + .tag(STATE_TAG, "READY") + .description("Current number of cm handles in ready state") + .register(meterRegistry); + } + + /** + * Register gauge metric for cm handles with state 'locked'. + * + * @param meterRegistry meter registry + * @return cm handle state gauge + */ + @Bean + @TimedCustom(name = "cps_ncmp_inventory_cm_handles_by_state{state=LOCKED}", + description = "Current number of cm handles in locked state") + public Gauge lockedCmHandles(final MeterRegistry meterRegistry) { + return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, + value -> cmHandlesByState.get("lockedCmHandlesCount")) + .tag(STATE_TAG, "LOCKED") + .description("Current number of cm handles in locked state") + .register(meterRegistry); + } + + /** + * Register gauge metric for cm handles with state 'deleting'. + * + * @param meterRegistry meter registry + * @return cm handle state gauge + */ + @Bean + @TimedCustom(name = "cps_ncmp_inventory_cm_handles_by_state{state=DELETING}", + description = "Current number of cm handles in deleting state") + public Gauge deletingCmHandles(final MeterRegistry meterRegistry) { + return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, + value -> cmHandlesByState.get("deletingCmHandlesCount")) + .tag(STATE_TAG, "DELETING") + .description("Current number of cm handles in deleting state") + .register(meterRegistry); + } + + /** + * Register gauge metric for cm handles with state 'deleted'. + * + * @param meterRegistry meter registry + * @return cm handle state gauge + */ + @Bean + @TimedCustom(name = "cps_ncmp_inventory_cm_handles_by_state{state=DELETED}", + description = "Number of cm handles that have been deleted since the application started") + public Gauge deletedCmHandles(final MeterRegistry meterRegistry) { + return Gauge.builder(CM_HANDLE_STATE_GAUGE, cmHandlesByState, + value -> cmHandlesByState.get("deletedCmHandlesCount")) + .tag(STATE_TAG, "DELETED") + .description("Number of cm handles that have been deleted since the application started") + .register(meterRegistry); + } +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/KafkaConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/KafkaConfig.java index 3d3c3db482..8475be6f6a 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/KafkaConfig.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/KafkaConfig.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,7 +47,7 @@ import org.springframework.kafka.support.serializer.JsonSerializer; /** * kafka Configuration for legacy and cloud events. * - * @param <T> valid legacy event to be published over the wire. + * @param <T> valid legacy event to be sent over the wire. */ @Configuration @EnableKafka diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/ServiceConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/ServiceConfig.java index 775e9d7b14..3c98d69554 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/ServiceConfig.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/ServiceConfig.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024-2025 Nordix Foundation. + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,7 +29,7 @@ public abstract class ServiceConfig { private String connectionProviderName = ""; private int maximumInMemorySizeInMegabytes = 1; private int maximumConnectionsTotal = 1; - private int pendingAcquireMaxCount = 1; + private int pendingAcquireMaxCount = 10; private Integer connectionTimeoutInSeconds = 1; private long readTimeoutInSeconds = 1; private long writeTimeoutInSeconds = 1; diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/TimedCustom.java index 410dcc2e26..7219147538 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/config/TimedCustom.java @@ -1,6 +1,6 @@ -/*- +/* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2025 Nordix Foundation. + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,19 +18,30 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.ri.repository; - -import java.util.List; - -public interface SchemaSetYangResourceRepository { +package org.onap.cps.ncmp.config; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +/** + * Custom annotation to enable metric scraping. + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface TimedCustom { /** - * Link yang resources (ids) with a schema set (id). + * Stores the name for a metric. * - * @param schemaSetId the schema set id - * @param yangResourceIds list of yang resource ids + * @return the name of the metric. */ - void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List<Integer> yangResourceIds); + String name(); + /** + * Stores the description for a metric. + * + * @return the description of the metric. + */ + String description(); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/NetworkCmProxyInventoryFacadeImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/NetworkCmProxyInventoryFacadeImpl.java index 118c2bba70..7130afdcfd 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/NetworkCmProxyInventoryFacadeImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/NetworkCmProxyInventoryFacadeImpl.java @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 highstreet technologies GmbH - * Modifications Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada * Modifications Copyright (C) 2023 TechMahindra Ltd. @@ -52,6 +52,7 @@ import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher; import org.onap.cps.ncmp.impl.utils.YangDataConverter; import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Service; +import reactor.core.publisher.Flux; @Service @RequiredArgsConstructor @@ -118,15 +119,12 @@ public class NetworkCmProxyInventoryFacadeImpl implements NetworkCmProxyInventor } @Override - public Collection<NcmpServiceCmHandle> executeCmHandleSearch( + public Flux<NcmpServiceCmHandle> executeCmHandleSearch( final CmHandleQueryApiParameters cmHandleQueryApiParameters) { - final CmHandleQueryServiceParameters cmHandleQueryServiceParameters = jsonObjectMapper.convertToValueType( - cmHandleQueryApiParameters, CmHandleQueryServiceParameters.class); + final CmHandleQueryServiceParameters cmHandleQueryServiceParameters = + jsonObjectMapper.convertToValueType(cmHandleQueryApiParameters, CmHandleQueryServiceParameters.class); validateCmHandleQueryParameters(cmHandleQueryServiceParameters, CmHandleQueryConditions.ALL_CONDITION_NAMES); - final Collection<NcmpServiceCmHandle> ncmpServiceCmHandles = - parameterizedCmHandleQueryService.queryCmHandles(cmHandleQueryServiceParameters); - trustLevelManager.applyEffectiveTrustLevels(ncmpServiceCmHandles); - return ncmpServiceCmHandles; + return parameterizedCmHandleQueryService.queryCmHandles(cmHandleQueryServiceParameters); } @Override diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/AlternateIdCacheConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/AlternateIdCacheConfig.java new file mode 100644 index 0000000000..14936c81d7 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/AlternateIdCacheConfig.java @@ -0,0 +1,44 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.impl.cache; + +import com.hazelcast.config.MapConfig; +import com.hazelcast.map.IMap; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class AlternateIdCacheConfig extends HazelcastCacheConfig { + + private static final MapConfig cmHandleIdPerAlternateIdMapConfig = + createMapConfig("cmHandleIdPerAlternateIdMapConfig"); + + /** + * Distributed instance used for mapping alternate id to cm handle id. + * + * @return configured map of cm handle id by alternate id + */ + @Bean + public IMap<String, String> cmHandleIdPerAlternateId() { + return getOrCreateHazelcastInstance(cmHandleIdPerAlternateIdMapConfig).getMap("cmHandleIdPerAlternateId"); + } + +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfig.java index 75007e2e35..1a7ef758d8 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfig.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfig.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================== - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,6 @@ import com.hazelcast.config.MapConfig; import com.hazelcast.config.NamedConfig; import com.hazelcast.config.NearCacheConfig; import com.hazelcast.config.QueueConfig; -import com.hazelcast.config.RestEndpointGroup; import com.hazelcast.config.SetConfig; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; @@ -61,7 +60,6 @@ public class HazelcastCacheConfig { config.setClusterName(clusterName); config.setClassLoader(Dataspace.class.getClassLoader()); configureDataStructures(namedConfig, config); - exposeClusterInformation(config); updateDiscoveryMode(config); return config; } @@ -130,9 +128,4 @@ public class HazelcastCacheConfig { } } - protected void exposeClusterInformation(final Config config) { - config.getNetworkConfig().getRestApiConfig().setEnabled(true) - .enableGroups(RestEndpointGroup.HEALTH_CHECK, RestEndpointGroup.CLUSTER_READ); - } - } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumer.java index 2d1f64802b..eca8380756 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ import io.cloudevents.CloudEvent; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.kafka.annotation.KafkaListener; @@ -43,7 +43,7 @@ public class CmAvcEventConsumer { @Value("${app.ncmp.avc.cm-events-topic}") private String cmEventsTopicName; - private final EventsPublisher<CloudEvent> eventsPublisher; + private final EventsProducer<CloudEvent> eventsProducer; /** * Incoming Cm AvcEvent in the form of Consumer Record, it will be forwarded as is to a target topic. @@ -58,6 +58,6 @@ public class CmAvcEventConsumer { final CloudEvent outgoingAvcEvent = cmAvcEventAsConsumerRecord.value(); final String outgoingAvcEventKey = cmAvcEventAsConsumerRecord.key(); log.debug("Consuming AVC event with key : {} and value : {}", outgoingAvcEventKey, outgoingAvcEvent); - eventsPublisher.publishCloudEvent(cmEventsTopicName, outgoingAvcEventKey, outgoingAvcEvent); + eventsProducer.sendCloudEvent(cmEventsTopicName, outgoingAvcEventKey, outgoingAvcEvent); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducer.java index c62916f05c..baa9926a40 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,14 +20,14 @@ package org.onap.cps.ncmp.impl.cmnotificationsubscription.dmi; +import static org.onap.cps.ncmp.events.NcmpEventDataSchema.SUBSCRIPTIONS_V1; + import io.cloudevents.CloudEvent; -import io.cloudevents.core.builder.CloudEventBuilder; -import java.net.URI; -import java.util.UUID; +import java.util.Map; import lombok.RequiredArgsConstructor; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_dmi.DmiInEvent; -import org.onap.cps.utils.JsonObjectMapper; +import org.onap.cps.ncmp.utils.events.NcmpEvent; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; @@ -37,34 +37,35 @@ import org.springframework.stereotype.Component; @ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) public class DmiInEventProducer { - private final EventsPublisher<CloudEvent> eventsPublisher; - private final JsonObjectMapper jsonObjectMapper; + private final EventsProducer<CloudEvent> eventsProducer; @Value("${app.ncmp.avc.cm-subscription-dmi-in}") private String dmiInEventTopic; /** - * Publish the event to the provided dmi plugin with key as subscription id and the event is in Cloud Event format. + * Send the event to the provided dmi plugin with key as subscription id and the event is in Cloud Event format. * * @param subscriptionId Cm Subscription Id * @param dmiPluginName Dmi Plugin Name * @param eventType Type of event * @param dmiInEvent Cm Notification Subscription event for Dmi */ - public void publishDmiInEvent(final String subscriptionId, final String dmiPluginName, - final String eventType, final DmiInEvent dmiInEvent) { - eventsPublisher.publishCloudEvent(dmiInEventTopic, subscriptionId, + public void sendDmiInEvent(final String subscriptionId, final String dmiPluginName, + final String eventType, final DmiInEvent dmiInEvent) { + eventsProducer.sendCloudEvent(dmiInEventTopic, subscriptionId, buildAndGetDmiInEventAsCloudEvent(subscriptionId, dmiPluginName, eventType, dmiInEvent)); } - private CloudEvent buildAndGetDmiInEventAsCloudEvent(final String subscriptionId, - final String dmiPluginName, final String eventType, final DmiInEvent dmiInEvent) { - return CloudEventBuilder.v1().withId(UUID.randomUUID().toString()).withType(eventType) - .withSource(URI.create("NCMP")) - .withDataSchema(URI.create("org.onap.ncmp.dmi.cm.subscription:1.0.0")) - .withExtension("correlationid", subscriptionId.concat("#").concat(dmiPluginName)) - .withData(jsonObjectMapper.asJsonBytes(dmiInEvent)).build(); + private CloudEvent buildAndGetDmiInEventAsCloudEvent(final String subscriptionId, final String dmiPluginName, + final String eventType, final DmiInEvent dmiInEvent) { + return NcmpEvent.builder() + .type(eventType) + .dataSchema(SUBSCRIPTIONS_V1.getDataSchema()) + .extensions(Map.of("correlationid", String.join("#", subscriptionId, dmiPluginName))) + .data(dmiInEvent) + .build() + .asCloudEvent(); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumer.java index 98c66afe30..d5e7106795 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -105,7 +105,7 @@ public class DmiOutEventConsumer { private void handleEventsStatusPerDmi(final String subscriptionId, final String eventType) { final Map<String, DmiCmSubscriptionDetails> dmiSubscriptionsPerDmi = dmiCacheHandler.get(subscriptionId); final NcmpOutEvent ncmpOutEvent = ncmpOutEventMapper.toNcmpOutEvent(subscriptionId, dmiSubscriptionsPerDmi); - ncmpOutEventProducer.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false); + ncmpOutEventProducer.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false); } private boolean checkStatusCodeAndMessage(final NcmpResponseStatus ncmpResponseStatus, diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImpl.java index 1b368dde6c..f6ac0cf699 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -75,7 +75,7 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { handleNewCmSubscription(subscriptionId); scheduleNcmpOutEventResponse(subscriptionId, "subscriptionCreateResponse"); } else { - rejectAndPublishCreateRequest(subscriptionId, predicates); + rejectAndSendCreateRequest(subscriptionId, predicates); } } @@ -87,7 +87,7 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { getLastRemainingAndOverlappingSubscriptionsPerDmi(subscriptionDataNodes); dmiCacheHandler.add(subscriptionId, mergeDmiCmSubscriptionDetailsPerDmiMaps(dmiCmSubscriptionTuple)); if (dmiCmSubscriptionTuple.lastRemainingSubscriptionsPerDmi().isEmpty()) { - acceptAndPublishDeleteRequest(subscriptionId); + acceptAndSendDeleteRequest(subscriptionId); } else { sendSubscriptionDeleteRequestToDmi(subscriptionId, dmiCmSubscriptionDetailsPerDmiMapper.toDmiCmSubscriptionsPerDmi( @@ -122,19 +122,19 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { } private void scheduleNcmpOutEventResponse(final String subscriptionId, final String eventType) { - ncmpOutEventProducer.publishNcmpOutEvent(subscriptionId, eventType, null, true); + ncmpOutEventProducer.sendNcmpOutEvent(subscriptionId, eventType, null, true); } - private void rejectAndPublishCreateRequest(final String subscriptionId, final List<Predicate> predicates) { + private void rejectAndSendCreateRequest(final String subscriptionId, final List<Predicate> predicates) { final Set<String> subscriptionTargetFilters = predicates.stream().flatMap(predicate -> predicate.getTargetFilter().stream()) .collect(Collectors.toSet()); final NcmpOutEvent ncmpOutEvent = ncmpOutEventMapper.toNcmpOutEventForRejectedRequest(subscriptionId, new ArrayList<>(subscriptionTargetFilters)); - ncmpOutEventProducer.publishNcmpOutEvent(subscriptionId, "subscriptionCreateResponse", ncmpOutEvent, false); + ncmpOutEventProducer.sendNcmpOutEvent(subscriptionId, "subscriptionCreateResponse", ncmpOutEvent, false); } - private void acceptAndPublishDeleteRequest(final String subscriptionId) { + private void acceptAndSendDeleteRequest(final String subscriptionId) { final Set<String> dmiServiceNames = dmiCacheHandler.get(subscriptionId).keySet(); for (final String dmiServiceName : dmiServiceNames) { dmiCacheHandler.updateDmiSubscriptionStatus(subscriptionId, dmiServiceName, @@ -143,7 +143,7 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { } final NcmpOutEvent ncmpOutEvent = ncmpOutEventMapper.toNcmpOutEvent(subscriptionId, dmiCacheHandler.get(subscriptionId)); - ncmpOutEventProducer.publishNcmpOutEvent(subscriptionId, "subscriptionDeleteResponse", ncmpOutEvent, + ncmpOutEventProducer.sendNcmpOutEvent(subscriptionId, "subscriptionDeleteResponse", ncmpOutEvent, false); } @@ -158,15 +158,15 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { if (dmiCmSubscriptionPredicates.isEmpty()) { acceptAndPersistCmSubscriptionPerDmi(subscriptionId, dmiPluginName); } else { - publishDmiInEventPerDmi(subscriptionId, dmiPluginName, dmiCmSubscriptionPredicates); + sendDmiInEventPerDmi(subscriptionId, dmiPluginName, dmiCmSubscriptionPredicates); } }); } - private void publishDmiInEventPerDmi(final String subscriptionId, final String dmiPluginName, - final List<DmiCmSubscriptionPredicate> dmiCmSubscriptionPredicates) { + private void sendDmiInEventPerDmi(final String subscriptionId, final String dmiPluginName, + final List<DmiCmSubscriptionPredicate> dmiCmSubscriptionPredicates) { final DmiInEvent dmiInEvent = dmiInEventMapper.toDmiInEvent(dmiCmSubscriptionPredicates); - dmiInEventProducer.publishDmiInEvent(subscriptionId, dmiPluginName, + dmiInEventProducer.sendDmiInEvent(subscriptionId, dmiPluginName, "subscriptionCreateRequest", dmiInEvent); } @@ -183,7 +183,7 @@ public class CmSubscriptionHandlerImpl implements CmSubscriptionHandler { final DmiInEvent dmiInEvent = dmiInEventMapper.toDmiInEvent( dmiCmSubscriptionDetails.getDmiCmSubscriptionPredicates()); - dmiInEventProducer.publishDmiInEvent(subscriptionId, + dmiInEventProducer.sendDmiInEvent(subscriptionId, dmiPluginName, "subscriptionDeleteRequest", dmiInEvent); }); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducer.java index 3371d59f7a..639fb65296 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,11 +20,10 @@ package org.onap.cps.ncmp.impl.cmnotificationsubscription.ncmp; +import static org.onap.cps.ncmp.events.NcmpEventDataSchema.SUBSCRIPTIONS_V1; + import io.cloudevents.CloudEvent; -import io.cloudevents.core.builder.CloudEventBuilder; -import java.net.URI; import java.util.Map; -import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -32,10 +31,10 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.impl.cmnotificationsubscription.cache.DmiCacheHandler; import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_client.NcmpOutEvent; -import org.onap.cps.utils.JsonObjectMapper; +import org.onap.cps.ncmp.utils.events.NcmpEvent; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; @@ -52,8 +51,7 @@ public class NcmpOutEventProducer { @Value("${ncmp.timers.subscription-forwarding.dmi-response-timeout-ms}") private Integer dmiOutEventTimeoutInMs; - private final EventsPublisher<CloudEvent> eventsPublisher; - private final JsonObjectMapper jsonObjectMapper; + private final EventsProducer<CloudEvent> eventsProducer; private final NcmpOutEventMapper ncmpOutEventMapper; private final DmiCacheHandler dmiCacheHandler; private final ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); @@ -61,7 +59,7 @@ public class NcmpOutEventProducer { new ConcurrentHashMap<>(); /** - * Publish the event to the client who requested the subscription with key as subscription id and event is Cloud + * Send the event to the client who requested the subscription with key as subscription id and event is Cloud * Event compliant. * * @param subscriptionId Cm Subscription Id @@ -69,71 +67,70 @@ public class NcmpOutEventProducer { * @param ncmpOutEvent Cm Notification Subscription Event for the * client * @param isScheduledEvent Determines if the event is to be scheduled - * or published now + * or send now */ - public void publishNcmpOutEvent(final String subscriptionId, final String eventType, - final NcmpOutEvent ncmpOutEvent, final boolean isScheduledEvent) { + public void sendNcmpOutEvent(final String subscriptionId, final String eventType, + final NcmpOutEvent ncmpOutEvent, final boolean isScheduledEvent) { final String taskKey = subscriptionId.concat(eventType); if (isScheduledEvent && !scheduledTasksPerSubscriptionIdAndEventType.containsKey(taskKey)) { - final ScheduledFuture<?> scheduledFuture = scheduleAndPublishNcmpOutEvent(subscriptionId, eventType); + final ScheduledFuture<?> scheduledFuture = scheduleAndSendNcmpOutEvent(subscriptionId, eventType); scheduledTasksPerSubscriptionIdAndEventType.putIfAbsent(taskKey, scheduledFuture); log.debug("Scheduled the Cm Subscription Event for subscriptionId : {} and eventType : {}", subscriptionId, eventType); } else { cancelScheduledTask(taskKey); if (ncmpOutEvent != null) { - publishNcmpOutEventNow(subscriptionId, eventType, ncmpOutEvent); - log.debug("Published Cm Subscription Event on demand for subscriptionId : {} and eventType : {}", + sendNcmpOutEventNow(subscriptionId, eventType, ncmpOutEvent); + log.debug("Sent Cm Subscription Event on demand for subscriptionId : {} and eventType : {}", subscriptionId, eventType); } } } - private ScheduledFuture<?> scheduleAndPublishNcmpOutEvent(final String subscriptionId, final String eventType) { + /** + * Get an NCMP out event as cloud event. + * + * @param subscriptionId subscription id + * @param eventType event type + * @param ncmpOutEvent cm notification subscription NCMP out event + * @return cm notification subscription NCMP out event as cloud event + */ + public static CloudEvent buildAndGetNcmpOutEventAsCloudEvent( + final String subscriptionId, final String eventType, final NcmpOutEvent ncmpOutEvent) { + + return NcmpEvent.builder() + .type(eventType) + .dataSchema(SUBSCRIPTIONS_V1.getDataSchema()) + .extensions(Map.of("correlationid", subscriptionId)) + .data(ncmpOutEvent) + .build() + .asCloudEvent(); + } + + private ScheduledFuture<?> scheduleAndSendNcmpOutEvent(final String subscriptionId, final String eventType) { final NcmpOutEventPublishingTask ncmpOutEventPublishingTask = - new NcmpOutEventPublishingTask(ncmpOutEventTopic, subscriptionId, eventType, eventsPublisher, - jsonObjectMapper, ncmpOutEventMapper, dmiCacheHandler); + new NcmpOutEventPublishingTask(ncmpOutEventTopic, subscriptionId, eventType, eventsProducer, + ncmpOutEventMapper, dmiCacheHandler); return scheduledExecutorService.schedule(ncmpOutEventPublishingTask, dmiOutEventTimeoutInMs, TimeUnit.MILLISECONDS); } private void cancelScheduledTask(final String taskKey) { - final ScheduledFuture<?> scheduledFuture = scheduledTasksPerSubscriptionIdAndEventType.get(taskKey); if (scheduledFuture != null) { scheduledFuture.cancel(true); scheduledTasksPerSubscriptionIdAndEventType.remove(taskKey); } - } - - private void publishNcmpOutEventNow(final String subscriptionId, final String eventType, - final NcmpOutEvent ncmpOutEvent) { + private void sendNcmpOutEventNow(final String subscriptionId, final String eventType, + final NcmpOutEvent ncmpOutEvent) { final CloudEvent ncmpOutEventAsCloudEvent = - buildAndGetNcmpOutEventAsCloudEvent(jsonObjectMapper, subscriptionId, eventType, ncmpOutEvent); - eventsPublisher.publishCloudEvent(ncmpOutEventTopic, subscriptionId, ncmpOutEventAsCloudEvent); + buildAndGetNcmpOutEventAsCloudEvent(subscriptionId, eventType, ncmpOutEvent); + eventsProducer.sendCloudEvent(ncmpOutEventTopic, subscriptionId, ncmpOutEventAsCloudEvent); dmiCacheHandler.removeAcceptedAndRejectedDmiSubscriptionEntries(subscriptionId); } - /** - * Get an NCMP out event as cloud event. - * - * @param jsonObjectMapper JSON object mapper - * @param subscriptionId subscription id - * @param eventType event type - * @param ncmpOutEvent cm notification subscription NCMP out event - * @return cm notification subscription NCMP out event as cloud event - */ - public static CloudEvent buildAndGetNcmpOutEventAsCloudEvent(final JsonObjectMapper jsonObjectMapper, - final String subscriptionId, final String eventType, final NcmpOutEvent ncmpOutEvent) { - - return CloudEventBuilder.v1().withId(UUID.randomUUID().toString()).withType(eventType) - .withSource(URI.create("NCMP")).withDataSchema(URI.create("org.onap.ncmp.cm.subscription:1.0.0")) - .withExtension("correlationid", subscriptionId) - .withData(jsonObjectMapper.asJsonBytes(ncmpOutEvent)).build(); - } - } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventPublishingTask.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventPublishingTask.java index f8f253d275..80d7981db9 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventPublishingTask.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventPublishingTask.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,11 +26,10 @@ import io.cloudevents.CloudEvent; import java.util.Map; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.impl.cmnotificationsubscription.cache.DmiCacheHandler; import org.onap.cps.ncmp.impl.cmnotificationsubscription.models.DmiCmSubscriptionDetails; import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_client.NcmpOutEvent; -import org.onap.cps.utils.JsonObjectMapper; @Slf4j @RequiredArgsConstructor @@ -39,13 +38,12 @@ public class NcmpOutEventPublishingTask implements Runnable { private final String topicName; private final String subscriptionId; private final String eventType; - private final EventsPublisher<CloudEvent> eventsPublisher; - private final JsonObjectMapper jsonObjectMapper; + private final EventsProducer<CloudEvent> eventsProducer; private final NcmpOutEventMapper ncmpOutEventMapper; private final DmiCacheHandler dmiCacheHandler; /** - * Delegating the responsibility of publishing NcmpOutEvent as a separate task which will + * Delegating the responsibility of sending NcmpOutEvent as a separate task which will * be called after a specified delay. */ @Override @@ -54,9 +52,8 @@ public class NcmpOutEventPublishingTask implements Runnable { dmiCacheHandler.get(subscriptionId); final NcmpOutEvent ncmpOutEvent = ncmpOutEventMapper.toNcmpOutEvent(subscriptionId, dmiSubscriptionsPerDmi); - eventsPublisher.publishCloudEvent(topicName, subscriptionId, - buildAndGetNcmpOutEventAsCloudEvent(jsonObjectMapper, subscriptionId, eventType, - ncmpOutEvent)); + eventsProducer.sendCloudEvent(topicName, subscriptionId, + buildAndGetNcmpOutEventAsCloudEvent(subscriptionId, eventType, ncmpOutEvent)); dmiCacheHandler.removeAcceptedAndRejectedDmiSubscriptionEntries(subscriptionId); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/DmiDataOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/DmiDataOperations.java index 189239ceb2..ed67be64d5 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/DmiDataOperations.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/DmiDataOperations.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -34,7 +34,6 @@ import java.util.Set; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.onap.cps.api.exceptions.CpsException; -import org.onap.cps.api.exceptions.DataValidationException; import org.onap.cps.ncmp.api.NcmpResponseStatus; import org.onap.cps.ncmp.api.data.models.CmResourceAddress; import org.onap.cps.ncmp.api.data.models.DataOperationRequest; @@ -51,6 +50,7 @@ import org.onap.cps.ncmp.impl.dmi.DmiRestClient; import org.onap.cps.ncmp.impl.inventory.InventoryPersistence; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.ncmp.impl.models.DmiRequestBody; +import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher; import org.onap.cps.ncmp.impl.utils.http.RestServiceUrlTemplateBuilder; import org.onap.cps.ncmp.impl.utils.http.UrlTemplateParameters; import org.onap.cps.utils.JsonObjectMapper; @@ -70,6 +70,7 @@ import reactor.core.publisher.Mono; public class DmiDataOperations { private final InventoryPersistence inventoryPersistence; + private final AlternateIdMatcher alternateIdMatcher; private final JsonObjectMapper jsonObjectMapper; private final DmiProperties dmiProperties; private final DmiRestClient dmiRestClient; @@ -79,7 +80,7 @@ public class DmiDataOperations { * This method fetches the resource data from the operational data store for a given CM handle * identifier on the specified resource using the DMI client. * - * @param cmResourceAddress Target datastore, CM handle, and resource identifier. + * @param cmResourceAddress Target datastore, CM handle reference, and resource identifier. * @param options Options query string. * @param topic Topic name for triggering asynchronous responses. * @param requestId Request ID for asynchronous responses. @@ -94,7 +95,8 @@ public class DmiDataOperations { final String topic, final String requestId, final String authorization) { - final YangModelCmHandle yangModelCmHandle = resolveYangModelCmHandleFromCmHandleReference(cmResourceAddress); + final YangModelCmHandle yangModelCmHandle = getYangModelCmHandle( + cmResourceAddress.resolveCmHandleReferenceToId()); final CmHandleState cmHandleState = yangModelCmHandle.getCompositeState().getCmHandleState(); validateIfCmHandleStateReady(yangModelCmHandle, cmHandleState); final String jsonRequestBody = getDmiRequestBody(READ, requestId, null, null, yangModelCmHandle); @@ -123,7 +125,7 @@ public class DmiDataOperations { PASSTHROUGH_OPERATIONAL.getDatastoreName(), yangModelCmHandle, "/", null, null); return dmiRestClient.synchronousPostOperationWithJsonData(DATA, urlTemplateParameters, jsonRequestBody, READ, - null); + DmiRestClient.NO_AUTHORIZATION); } /** @@ -140,10 +142,10 @@ public class DmiDataOperations { final String requestId, final String authorization) { - final Set<String> cmHandlesReferences = getDistinctCmHandleReferences(dataOperationRequest); + final Set<String> cmHandleIds = getDistinctCmHandleIds(dataOperationRequest); final Collection<YangModelCmHandle> yangModelCmHandles - = inventoryPersistence.getYangModelCmHandlesFromCmHandleReferences(cmHandlesReferences); + = inventoryPersistence.getYangModelCmHandles(cmHandleIds); final Map<String, List<DmiDataOperation>> operationsOutPerDmiServiceName = DmiDataOperationsHelper.processPerDefinitionInDataOperationsRequest(topicParamInQuery, @@ -157,22 +159,22 @@ public class DmiDataOperations { * This method creates the resource data from pass-through running data store for given cm handle * identifier on given resource using dmi client. * - * @param cmHandleId network resource identifier - * @param resourceId resource identifier - * @param operationType operation enum - * @param requestData the request data - * @param dataType data type - * @param authorization contents of Authorization header, or null if not present + * @param cmHandleReference network resource identifier + * @param resourceId resource identifier + * @param operationType operation enum + * @param requestData the request data + * @param dataType data type + * @param authorization contents of Authorization header, or null if not present * @return {@code ResponseEntity} response entity */ - public ResponseEntity<Object> writeResourceDataPassThroughRunningFromDmi(final String cmHandleId, + public ResponseEntity<Object> writeResourceDataPassThroughRunningFromDmi(final String cmHandleReference, final String resourceId, final OperationType operationType, final String requestData, final String dataType, final String authorization) { final CmResourceAddress cmResourceAddress = - new CmResourceAddress(PASSTHROUGH_RUNNING.getDatastoreName(), cmHandleId, resourceId); + new CmResourceAddress(PASSTHROUGH_RUNNING.getDatastoreName(), cmHandleReference, resourceId); final YangModelCmHandle yangModelCmHandle = getYangModelCmHandle(cmResourceAddress.resolveCmHandleReferenceToId()); @@ -248,10 +250,11 @@ public class DmiDataOperations { } } - private static Set<String> getDistinctCmHandleReferences(final DataOperationRequest dataOperationRequest) { + private Set<String> getDistinctCmHandleIds(final DataOperationRequest dataOperationRequest) { return dataOperationRequest.getDataOperationDefinitions().stream() - .flatMap(dataOperationDefinition -> - dataOperationDefinition.getCmHandleReferences().stream()).collect(Collectors.toSet()); + .flatMap(it -> it.getCmHandleReferences().stream()) + .map(alternateIdMatcher::getCmHandleId) + .collect(Collectors.toSet()); } private void asyncSendMultipleRequest(final String requestId, final String topicParamInQuery, @@ -281,16 +284,6 @@ public class DmiDataOperations { }).subscribe(); } - private YangModelCmHandle resolveYangModelCmHandleFromCmHandleReference(final CmResourceAddress cmResourceAddress) { - String cmHandleId = cmResourceAddress.getCmHandleReference(); - try { - return getYangModelCmHandle(cmHandleId); - } catch (final DataValidationException ignored) { - cmHandleId = cmResourceAddress.resolveCmHandleReferenceToId(); - return getYangModelCmHandle(cmHandleId); - } - } - private String createDmiDataOperationRequestAsJsonString( final List<DmiDataOperation> dmiDataOperationRequestBodies) { final DmiDataOperationRequest dmiDataOperationRequest = DmiDataOperationRequest.builder() @@ -316,7 +309,7 @@ public class DmiDataOperations { cmHandleIdsPerResponseCodesPerOperation.add(dmiDataOperationRequestBody, Map.of(dmiClientRequestException.getNcmpResponseStatus(), cmHandleIds)); }); - DmiDataOperationsHelper.publishErrorMessageToClientTopic(topicName, requestId, + DmiDataOperationsHelper.sendErrorMessageToClientTopic(topicName, requestId, cmHandleIdsPerResponseCodesPerOperation); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/NcmpCachedResourceRequestHandler.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/NcmpCachedResourceRequestHandler.java index 2d33234478..1b5dd2f853 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/NcmpCachedResourceRequestHandler.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/NcmpCachedResourceRequestHandler.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,7 +47,8 @@ public class NcmpCachedResourceRequestHandler extends NcmpDatastoreRequestHandle */ public Collection<DataNode> executeRequest(final String cmHandleId, final String resourceIdentifier, final boolean includeDescendants) { - final FetchDescendantsOption fetchDescendantsOption = getFetchDescendantsOption(includeDescendants); + final FetchDescendantsOption fetchDescendantsOption + = FetchDescendantsOption.getFetchDescendantsOption(includeDescendants); return networkCmProxyQueryService.queryResourceDataOperational(cmHandleId, resourceIdentifier, fetchDescendantsOption); } @@ -59,7 +60,8 @@ public class NcmpCachedResourceRequestHandler extends NcmpDatastoreRequestHandle final String requestId, final boolean includeDescendants, final String authorization) { - final FetchDescendantsOption fetchDescendantsOption = getFetchDescendantsOption(includeDescendants); + final FetchDescendantsOption fetchDescendantsOption + = FetchDescendantsOption.getFetchDescendantsOption(includeDescendants); final DataNode dataNode = cpsDataService.getDataNodes(cmResourceAddress.getDatastoreName(), cmResourceAddress.resolveCmHandleReferenceToId(), @@ -68,8 +70,4 @@ public class NcmpCachedResourceRequestHandler extends NcmpDatastoreRequestHandle return Mono.justOrEmpty(dataNode); } - private static FetchDescendantsOption getFetchDescendantsOption(final boolean includeDescendants) { - return includeDescendants ? FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS - : FetchDescendantsOption.OMIT_DESCENDANTS; - } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumer.java index 6f368da2d0..22f20c8784 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ import io.cloudevents.kafka.impl.KafkaHeaders; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; @@ -39,11 +39,11 @@ import org.springframework.stereotype.Component; @ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) public class DataOperationEventConsumer { - private final EventsPublisher<CloudEvent> eventsPublisher; + private final EventsProducer<CloudEvent> eventsProducer; /** - * Consume the DataOperation cloud event published by producer to topic 'async-m2m.topic' - * and publish the same to client specified topic. + * Consume the DataOperation cloud event sent by producer to topic 'async-m2m.topic' + * and send the same to client specified topic. * * @param dataOperationEventConsumerRecord consuming event as a ConsumerRecord. */ @@ -52,12 +52,12 @@ public class DataOperationEventConsumer { filter = "includeDataOperationEventsOnly", groupId = "ncmp-data-operation-event-group", containerFactory = "cloudEventConcurrentKafkaListenerContainerFactory") - public void consumeAndPublish(final ConsumerRecord<String, CloudEvent> dataOperationEventConsumerRecord) { + public void consumeAndSend(final ConsumerRecord<String, CloudEvent> dataOperationEventConsumerRecord) { log.debug("Consuming event payload {} ...", dataOperationEventConsumerRecord.value()); final String eventTarget = KafkaHeaders.getParsedKafkaHeader( dataOperationEventConsumerRecord.headers(), "ce_destination"); final String correlationId = KafkaHeaders.getParsedKafkaHeader( dataOperationEventConsumerRecord.headers(), "ce_correlationid"); - eventsPublisher.publishCloudEvent(eventTarget, correlationId, dataOperationEventConsumerRecord.value()); + eventsProducer.sendCloudEvent(eventTarget, correlationId, dataOperationEventConsumerRecord.value()); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/AsyncRestRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java index f14bb15842..2575508807 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/AsyncRestRequestResponseEventConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ package org.onap.cps.ncmp.impl.data.async; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent; import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -36,9 +36,9 @@ import org.springframework.stereotype.Component; @Slf4j @RequiredArgsConstructor @ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) -public class AsyncRestRequestResponseEventConsumer { +public class DmiAsyncRequestResponseEventConsumer { - private final EventsPublisher<NcmpAsyncRequestResponseEvent> eventsPublisher; + private final EventsProducer<NcmpAsyncRequestResponseEvent> eventsProducer; private final NcmpAsyncRequestResponseEventMapper ncmpAsyncRequestResponseEventMapper; /** @@ -50,12 +50,13 @@ public class AsyncRestRequestResponseEventConsumer { topics = "${app.ncmp.async-m2m.topic}", filter = "includeNonCloudEventsOnly", groupId = "ncmp-async-rest-request-event-group", + containerFactory = "legacyEventConcurrentKafkaListenerContainerFactory", properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"}) public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) { log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent); final NcmpAsyncRequestResponseEvent ncmpAsyncRequestResponseEvent = ncmpAsyncRequestResponseEventMapper.toNcmpAsyncEvent(dmiAsyncRequestResponseEvent); - eventsPublisher.publishEvent(ncmpAsyncRequestResponseEvent.getEventTarget(), + eventsProducer.sendEvent(ncmpAsyncRequestResponseEvent.getEventTarget(), ncmpAsyncRequestResponseEvent.getEventId(), ncmpAsyncRequestResponseEvent); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DataOperationEventCreator.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DataOperationEventCreator.java index d74abb9935..14e2eda2d2 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DataOperationEventCreator.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DataOperationEventCreator.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ package org.onap.cps.ncmp.impl.data.utils; +import static org.onap.cps.ncmp.events.NcmpEventDataSchema.BATCH_RESPONSE_V1; + import io.cloudevents.CloudEvent; import java.util.ArrayList; import java.util.HashMap; @@ -27,7 +29,6 @@ import java.util.List; import java.util.Map; import lombok.AccessLevel; import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; import org.onap.cps.ncmp.api.NcmpResponseStatus; import org.onap.cps.ncmp.events.async1_0_0.Data; import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent; @@ -36,7 +37,6 @@ import org.onap.cps.ncmp.impl.data.models.DmiDataOperation; import org.onap.cps.ncmp.utils.events.NcmpEvent; import org.springframework.util.MultiValueMap; -@Slf4j @NoArgsConstructor(access = AccessLevel.PRIVATE) public class DataOperationEventCreator { @@ -57,8 +57,13 @@ public class DataOperationEventCreator { final Data data = createPayloadFromDataOperationResponses(cmHandleIdsPerResponseCodesPerOperation); dataOperationEvent.setData(data); final Map<String, String> extensions = createDataOperationExtensions(requestId, clientTopic); - return NcmpEvent.builder().type(DataOperationEvent.class.getName()) - .data(dataOperationEvent).extensions(extensions).build().asCloudEvent(); + return NcmpEvent.builder() + .type(DataOperationEvent.class.getName()) + .data(dataOperationEvent) + .dataSchema(BATCH_RESPONSE_V1.getDataSchema()) + .extensions(extensions) + .build() + .asCloudEvent(); } private static Data createPayloadFromDataOperationResponses(final MultiValueMap<DmiDataOperation, diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelper.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelper.java index cb435f4a84..ee3f6fec70 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelper.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelper.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,7 +34,7 @@ import java.util.Set; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.api.NcmpResponseStatus; import org.onap.cps.ncmp.api.data.models.DataOperationDefinition; import org.onap.cps.ncmp.api.data.models.DataOperationRequest; @@ -114,7 +114,7 @@ public class DmiDataOperationsHelper { DmiDataOperation.buildDmiDataOperationRequestBodyWithoutCmHandles(dataOperationDefinitionIn), CM_HANDLES_NOT_READY, nonReadyCmHandleReferences); } - publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleReferencesPerResponseCodesPerOperation); + sendErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleReferencesPerResponseCodesPerOperation); return dmiDataOperationsOutPerDmiServiceName; } @@ -127,24 +127,24 @@ public class DmiDataOperationsHelper { } /** - * Creates data operation cloud event and publish it to client topic. + * Creates data operation cloud event and sends it to client topic. * * @param clientTopic client given topic * @param requestId unique identifier per request * @param cmHandleIdsPerResponseCodesPerOperation list of cm handle ids per operation with response code */ - public static void publishErrorMessageToClientTopic(final String clientTopic, - final String requestId, - final MultiValueMap<DmiDataOperation, + public static void sendErrorMessageToClientTopic(final String clientTopic, + final String requestId, + final MultiValueMap<DmiDataOperation, Map<NcmpResponseStatus, List<String>>> cmHandleIdsPerResponseCodesPerOperation) { if (!cmHandleIdsPerResponseCodesPerOperation.isEmpty()) { final CloudEvent dataOperationCloudEvent = DataOperationEventCreator.createDataOperationEvent(clientTopic, requestId, cmHandleIdsPerResponseCodesPerOperation); - final EventsPublisher<CloudEvent> eventsPublisher = CpsApplicationContext.getCpsBean(EventsPublisher.class); - log.warn("publishing error message to client topic: {} ,requestId: {}, data operation cloud event id: {}", + final EventsProducer<CloudEvent> eventsProducer = CpsApplicationContext.getCpsBean(EventsProducer.class); + log.warn("sending error message to client topic: {} ,requestId: {}, data operation cloud event id: {}", clientTopic, requestId, dataOperationCloudEvent.getId()); - eventsPublisher.publishCloudEvent(clientTopic, requestId, dataOperationCloudEvent); + eventsProducer.sendCloudEvent(clientTopic, requestId, dataOperationCloudEvent); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java index 04c3ad2fc6..56352c1c81 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,6 +31,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest; import org.onap.cps.ncmp.api.datajobs.models.DmiWriteOperation; import org.onap.cps.ncmp.api.datajobs.models.ProducerKey; import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse; +import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Service; @Slf4j @@ -40,6 +41,7 @@ public class DataJobServiceImpl implements DataJobService { private final DmiSubJobRequestHandler dmiSubJobClient; private final WriteRequestExaminer writeRequestExaminer; + private final JsonObjectMapper jsonObjectMapper; @Override public void readDataJob(final String authorization, @@ -54,14 +56,25 @@ public class DataJobServiceImpl implements DataJobService { final String dataJobId, final DataJobMetadata dataJobMetadata, final DataJobWriteRequest dataJobWriteRequest) { - log.info("data job id for write operation is: {}", dataJobId); + + log.info("Data Job ID: {} - Total operations received: {}", dataJobId, dataJobWriteRequest.data().size()); + logJsonRepresentation("Initiating WRITE operation for Data Job ID: " + dataJobId, dataJobWriteRequest); final Map<ProducerKey, List<DmiWriteOperation>> dmiWriteOperationsPerProducerKey = writeRequestExaminer.splitDmiWriteOperationsFromRequest(dataJobId, dataJobWriteRequest); - return dmiSubJobClient.sendRequestsToDmi(authorization, - dataJobId, - dataJobMetadata, - dmiWriteOperationsPerProducerKey); + final List<SubJobWriteResponse> subJobWriteResponses = dmiSubJobClient.sendRequestsToDmi(authorization, + dataJobId, dataJobMetadata, dmiWriteOperationsPerProducerKey); + + log.info("Data Job ID: {} - Received {} sub-job(s) from DMI.", dataJobId, subJobWriteResponses.size()); + logJsonRepresentation("Finalized subJobWriteResponses for Data Job ID: " + dataJobId, subJobWriteResponses); + return subJobWriteResponses; + } + + private void logJsonRepresentation(final String description, final Object object) { + if (log.isDebugEnabled()) { + final String objectAsJsonString = jsonObjectMapper.asJsonString(object); + log.debug("{} (JSON): {}", description, objectAsJsonString); + } } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandler.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandler.java index a118d53e7e..d74863a710 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandler.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandler.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -79,8 +79,11 @@ public class DmiSubJobRequestHandler { jsonObjectMapper.asJsonString(subJobWriteRequest), OperationType.CREATE, authorization); - final SubJobWriteResponse subJobWriteResponse = jsonObjectMapper - .convertToValueType(responseEntity.getBody(), SubJobWriteResponse.class); + final Map<String, String> responseAsKeyValuePairs = jsonObjectMapper + .convertToValueType(responseEntity.getBody(), Map.class); + final String subJobId = responseAsKeyValuePairs.get("subJobId"); + final SubJobWriteResponse subJobWriteResponse = new SubJobWriteResponse(subJobId, + producerKey.dmiServiceName(), producerKey.dataProducerIdentifier()); log.debug("Sub job write response: {}", subJobWriteResponse); subJobWriteResponses.add(subJobWriteResponse); }); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminer.java index 429a3790d4..f189edc89a 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ package org.onap.cps.ncmp.impl.datajobs; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import lombok.RequiredArgsConstructor; @@ -31,7 +30,9 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest; import org.onap.cps.ncmp.api.datajobs.models.DmiWriteOperation; import org.onap.cps.ncmp.api.datajobs.models.ProducerKey; import org.onap.cps.ncmp.api.datajobs.models.WriteOperation; -import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; +import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; +import org.onap.cps.ncmp.impl.dmi.DmiServiceNameResolver; +import org.onap.cps.ncmp.impl.inventory.ParameterizedCmHandleQueryService; import org.onap.cps.ncmp.impl.models.RequiredDmiService; import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher; import org.springframework.stereotype.Service; @@ -42,6 +43,7 @@ import org.springframework.stereotype.Service; public class WriteRequestExaminer { private final AlternateIdMatcher alternateIdMatcher; + private final ParameterizedCmHandleQueryService parameterizedCmHandleQueryService; private static final String PATH_SEPARATOR = "/"; /** @@ -52,25 +54,35 @@ public class WriteRequestExaminer { * @return {@code Map} map of Dmi Write Operations per Producer Key */ public Map<ProducerKey, List<DmiWriteOperation>> splitDmiWriteOperationsFromRequest( - final String dataJobId, - final DataJobWriteRequest dataJobWriteRequest) { + final String dataJobId, final DataJobWriteRequest dataJobWriteRequest) { final Map<ProducerKey, List<DmiWriteOperation>> dmiWriteOperationsPerProducerKey = new HashMap<>(); + final Map<String, NcmpServiceCmHandle> cmHandlePerAlternateId = getAllNcmpServiceCmHandlesWithoutProperties(); for (final WriteOperation writeOperation : dataJobWriteRequest.data()) { - examineWriteOperation(dataJobId, dmiWriteOperationsPerProducerKey, writeOperation); + examineWriteOperation(dataJobId, dmiWriteOperationsPerProducerKey, writeOperation, cmHandlePerAlternateId); } return dmiWriteOperationsPerProducerKey; } + private Map<String, NcmpServiceCmHandle> getAllNcmpServiceCmHandlesWithoutProperties() { + final Map<String, NcmpServiceCmHandle> ncmpServiceCmHandles = new HashMap<>(); + for (final NcmpServiceCmHandle ncmpServiceCmHandle + : parameterizedCmHandleQueryService.getAllCmHandlesWithoutProperties()) { + ncmpServiceCmHandles.put(ncmpServiceCmHandle.getAlternateId(), ncmpServiceCmHandle); + } + return ncmpServiceCmHandles; + } + private void examineWriteOperation(final String dataJobId, final Map<ProducerKey, List<DmiWriteOperation>> dmiWriteOperationsPerProducerKey, - final WriteOperation writeOperation) { + final WriteOperation writeOperation, + final Map<String, NcmpServiceCmHandle> cmHandlePerAlternateId) { log.debug("data job id for write operation is: {}", dataJobId); - final YangModelCmHandle yangModelCmHandle = alternateIdMatcher - .getYangModelCmHandleByLongestMatchingAlternateId(writeOperation.path(), PATH_SEPARATOR); + final NcmpServiceCmHandle ncmpServiceCmHandle = alternateIdMatcher + .getCmHandleByLongestMatchingAlternateId(writeOperation.path(), PATH_SEPARATOR, cmHandlePerAlternateId); - final DmiWriteOperation dmiWriteOperation = createDmiWriteOperation(writeOperation, yangModelCmHandle); + final DmiWriteOperation dmiWriteOperation = createDmiWriteOperation(writeOperation, ncmpServiceCmHandle); - final ProducerKey producerKey = createProducerKey(yangModelCmHandle); + final ProducerKey producerKey = createProducerKey(ncmpServiceCmHandle); final List<DmiWriteOperation> dmiWriteOperations; if (dmiWriteOperationsPerProducerKey.containsKey(producerKey)) { dmiWriteOperations = dmiWriteOperationsPerProducerKey.get(producerKey); @@ -81,27 +93,19 @@ public class WriteRequestExaminer { dmiWriteOperations.add(dmiWriteOperation); } - private ProducerKey createProducerKey(final YangModelCmHandle yangModelCmHandle) { - return new ProducerKey(yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA), - yangModelCmHandle.getDataProducerIdentifier()); + private ProducerKey createProducerKey(final NcmpServiceCmHandle ncmpServiceCmHandle) { + final String dmiDataServiceName = + DmiServiceNameResolver.resolveDmiServiceName(RequiredDmiService.DATA, ncmpServiceCmHandle); + return new ProducerKey(dmiDataServiceName, ncmpServiceCmHandle.getDataProducerIdentifier()); } private DmiWriteOperation createDmiWriteOperation(final WriteOperation writeOperation, - final YangModelCmHandle yangModelCmHandle) { + final NcmpServiceCmHandle ncmpServiceCmHandle) { return new DmiWriteOperation( writeOperation.path(), writeOperation.op(), - yangModelCmHandle.getModuleSetTag(), + ncmpServiceCmHandle.getModuleSetTag(), writeOperation.value(), - writeOperation.operationId(), - getPrivatePropertiesFromDataNode(yangModelCmHandle)); + writeOperation.operationId()); } - - private Map<String, String> getPrivatePropertiesFromDataNode(final YangModelCmHandle yangModelCmHandle) { - final Map<String, String> cmHandleDmiProperties = new LinkedHashMap<>(); - yangModelCmHandle.getDmiProperties() - .forEach(dmiProperty -> cmHandleDmiProperties.put(dmiProperty.getName(), dmiProperty.getValue())); - return cmHandleDmiProperties; - } - } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/dmi/DmiRestClient.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/dmi/DmiRestClient.java index a177272dff..060051eca7 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/dmi/DmiRestClient.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/dmi/DmiRestClient.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -53,8 +53,9 @@ import reactor.core.publisher.Mono; @Slf4j public class DmiRestClient { + public static final String NO_AUTHORIZATION = null; + private static final String NOT_SPECIFIED = ""; - private static final String NO_AUTHORIZATION = null; private final DmiProperties dmiProperties; private final JsonObjectMapper jsonObjectMapper; @@ -150,7 +151,7 @@ public class DmiRestClient { * * @param urlTemplateParameters The URL template parameters for the DMI data job status endpoint. * @param authorization The authorization token to be added to the request headers. - * @return A Mono emitting the status of the data job as a String. + * @return A Mono emitting the status of the data job in JSON format. * @throws DmiClientRequestException If there is an error during the DMI request. */ public Mono<String> getDataJobStatus(final UrlTemplateParameters urlTemplateParameters, @@ -160,8 +161,7 @@ public class DmiRestClient { .uri(urlTemplateParameters.urlTemplate(), urlTemplateParameters.urlVariables()) .headers(httpHeaders -> configureHttpHeaders(httpHeaders, authorization)) .retrieve() - .bodyToMono(JsonNode.class) - .map(jsonNode -> jsonNode.path("status").asText()) + .bodyToMono(String.class) .onErrorMap(throwable -> handleDmiClientException(throwable, OperationType.READ.getOperationName())); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/AlternateIdChecker.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/AlternateIdChecker.java index a0ca44c9d0..aa7e261e8a 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/AlternateIdChecker.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/AlternateIdChecker.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================== - * Copyright (c) 2024 Nordix Foundation. + * Copyright (c) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,8 +20,10 @@ package org.onap.cps.ncmp.impl.inventory; +import com.hazelcast.map.IMap; import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; @@ -29,7 +31,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.onap.cps.api.exceptions.DataNodeNotFoundException; import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; -import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; @Service @@ -42,6 +44,8 @@ public class AlternateIdChecker { } private final InventoryPersistence inventoryPersistence; + @Qualifier("cmHandleIdPerAlternateId") + private final IMap<String, String> cmHandleIdPerAlternateId; private static final String NO_CURRENT_ALTERNATE_ID = ""; @@ -54,9 +58,9 @@ public class AlternateIdChecker { * @return collection of cm handles ids which are acceptable */ public Collection<String> getIdsOfCmHandlesWithRejectedAlternateId( - final Collection<NcmpServiceCmHandle> newNcmpServiceCmHandles, - final Operation operation) { - final Set<String> assignedAlternateIds = getAlternateIdsAlreadyInDb(newNcmpServiceCmHandles); + final Collection<NcmpServiceCmHandle> newNcmpServiceCmHandles, + final Operation operation) { + final Set<String> assignedAlternateIds = new HashSet<>(getAlternateIdsAlreadyInDb(newNcmpServiceCmHandles)); final Collection<String> rejectedCmHandleIds = new ArrayList<>(); for (final NcmpServiceCmHandle ncmpServiceCmHandle : newNcmpServiceCmHandles) { final String cmHandleId = ncmpServiceCmHandle.getCmHandleId(); @@ -97,9 +101,7 @@ public class AlternateIdChecker { .map(NcmpServiceCmHandle::getAlternateId) .filter(StringUtils::isNotBlank) .collect(Collectors.toSet()); - return inventoryPersistence.getYangModelCmHandleByAlternateIds(alternateIdsToCheck).stream() - .map(YangModelCmHandle::getAlternateId) - .collect(Collectors.toSet()); + return cmHandleIdPerAlternateId.getAll(alternateIdsToCheck).keySet(); } private String getCurrentAlternateId(final Operation operation, final String cmHandleId) { diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryService.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryService.java index 9447f13b68..15aa1213aa 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryService.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -84,6 +84,16 @@ public interface CmHandleQueryService { Collection<DataNode> queryNcmpRegistryByCpsPath(String cpsPath, FetchDescendantsOption fetchDescendantsOption); /** + * Method to return data nodes representing the cm handles. + * + * @param cpsPath cps path for which the cmHandle is requested + * @param queryResultLimit the maximum number of data nodes to return; if less than 1, returns all matching nodes + * @return a list of data nodes representing the cm handles. + */ + Collection<DataNode> queryNcmpRegistryByCpsPath(String cpsPath, FetchDescendantsOption fetchDescendantsOption, + int queryResultLimit); + + /** * Method to check the state of a cm handle with given id. * * @param cmHandleId cm handle id @@ -111,11 +121,22 @@ public interface CmHandleQueryService { boolean outputAlternateId); /** - * Get map of cmHandle references by DMI plugin identifier. + * Retrieves all CM handle references from the NCMP Inventory. + * Each CM handle reference represents a unique configuration management handle + * that can be identified by either its standard cm handle id or an alternate id. * - * @param dmiPluginIdentifier DMI plugin identifier - * @return map of cmHandle references key:CmHandleId Value:AlternateId + * @param outputAlternateId If {@code true}, returns alternate ids; if {@code false}, returns standard cm handle ids + * @return collection of cm handle references. Returns an empty collection if no references are found. + */ + Collection<String> getAllCmHandleReferences(boolean outputAlternateId); + + /** + * Retrieves all Cm handle references by cps path. + * + * @param cpsPath cps path for which the cmHandle is requested + * @param outputAlternateId If {@code true}, returns alternate ids; if {@code false}, returns standard cm handle ids + * @return collection of cm handle references. Returns an empty collection if no references are found. */ - Map<String, String> getCmHandleReferencesMapByDmiPluginIdentifier(String dmiPluginIdentifier); + Collection<String> getCmHandleReferencesByCpsPath(String cpsPath, boolean outputAlternateId); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java index 59d0f9704e..8d1d50ec15 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,19 +26,19 @@ import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DATASPACE_NA import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_ANCHOR; import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT; +import com.hazelcast.map.IMap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.stream.Collectors; +import java.util.Set; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsQueryService; import org.onap.cps.api.model.DataNode; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.cpspath.parser.CpsPathUtil; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.ncmp.api.inventory.DataStoreSyncState; import org.onap.cps.ncmp.api.inventory.models.CmHandleState; import org.onap.cps.ncmp.api.inventory.models.TrustLevel; @@ -46,16 +46,17 @@ import org.onap.cps.ncmp.impl.inventory.models.ModelledDmiServiceLeaves; import org.onap.cps.ncmp.impl.inventory.models.PropertyType; import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelCacheConfig; import org.onap.cps.ncmp.impl.utils.YangDataConverter; +import org.onap.cps.utils.CpsValidator; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; @RequiredArgsConstructor @Component public class CmHandleQueryServiceImpl implements CmHandleQueryService { - - private static final String DESCENDANT_PATH = "//"; private static final String ANCESTOR_CM_HANDLES = "/ancestor::cm-handles"; + public static final String CM_HANDLE_ID = "id"; private static final String ALTERNATE_ID = "alternate-id"; + private static final Integer NO_LIMIT = 0; private final CpsDataService cpsDataService; private final CpsQueryService cpsQueryService; @@ -63,7 +64,7 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { private final Map<String, TrustLevel> trustLevelPerDmiPlugin; @Qualifier(TrustLevelCacheConfig.TRUST_LEVEL_PER_CM_HANDLE) - private final Map<String, TrustLevel> trustLevelPerCmHandleId; + private final IMap<String, TrustLevel> trustLevelPerCmHandleId; private final CpsValidator cpsValidator; @@ -90,7 +91,8 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { @Override public Collection<String> queryCmHandleIdsByState(final CmHandleState cmHandleState) { final Collection<DataNode> cmHandlesAsDataNodes = - queryNcmpRegistryByCpsPath("//state[@cm-handle-state='" + cmHandleState + "']", OMIT_DESCENDANTS); + queryNcmpRegistryByCpsPath("//state[@cm-handle-state='" + cmHandleState + "']", + OMIT_DESCENDANTS); return cmHandlesAsDataNodes.stream() .map(DataNode::getXpath) .map(YangDataConverter::extractCmHandleIdFromXpath) @@ -100,13 +102,20 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { @Override public Collection<DataNode> queryNcmpRegistryByCpsPath(final String cpsPath, final FetchDescendantsOption fetchDescendantsOption) { + return queryNcmpRegistryByCpsPath(cpsPath, fetchDescendantsOption, NO_LIMIT); + } + + @Override + public Collection<DataNode> queryNcmpRegistryByCpsPath(final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption, + final int queryResultLimit) { return cpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, - fetchDescendantsOption); + fetchDescendantsOption, queryResultLimit); } @Override public Collection<DataNode> queryCmHandleAncestorsByCpsPath(final String cpsPath, - final FetchDescendantsOption fetchDescendantsOption) { + final FetchDescendantsOption fetchDescendantsOption) { if (CpsPathUtil.getCpsPathQuery(cpsPath).getXpathPrefix().endsWith("/cm-handles")) { return queryNcmpRegistryByCpsPath(cpsPath, fetchDescendantsOption); } @@ -128,50 +137,56 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { @Override public Collection<String> getCmHandleReferencesByDmiPluginIdentifier(final String dmiPluginIdentifier, - final boolean outputAlternateId) { + final boolean outputAlternateId) { final Collection<String> cmHandleReferences = new HashSet<>(); for (final ModelledDmiServiceLeaves modelledDmiServiceLeaf : ModelledDmiServiceLeaves.values()) { - for (final DataNode cmHandleAsDataNode: getCmHandlesByDmiPluginIdentifierAndDmiProperty( - dmiPluginIdentifier, - modelledDmiServiceLeaf.getLeafName())) { - if (outputAlternateId) { - cmHandleReferences.add(cmHandleAsDataNode.getLeaves().get(ALTERNATE_ID).toString()); - } else { - cmHandleReferences.add(cmHandleAsDataNode.getLeaves().get("id").toString()); - } - } + cmHandleReferences.addAll(getIdsByDmiPluginIdentifierAndDmiProperty( + dmiPluginIdentifier, modelledDmiServiceLeaf.getLeafName(), outputAlternateId)); } return cmHandleReferences; } @Override - public Map<String, String> getCmHandleReferencesMapByDmiPluginIdentifier(final String dmiPluginIdentifier) { - final Map<String, String> cmHandleReferencesMap = new HashMap<>(); - for (final ModelledDmiServiceLeaves modelledDmiServiceLeaf : ModelledDmiServiceLeaves.values()) { - for (final DataNode cmHandleAsDataNode: getCmHandlesByDmiPluginIdentifierAndDmiProperty( - dmiPluginIdentifier, - modelledDmiServiceLeaf.getLeafName())) { - cmHandleReferencesMap.put(cmHandleAsDataNode.getLeaves().get("id").toString(), - cmHandleAsDataNode.getLeaves().get(ALTERNATE_ID).toString()); - } + public Collection<String> getAllCmHandleReferences(final boolean outputAlternateId) { + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; + final String cpsPath = String.format("%s/cm-handles/@%s", NCMP_DMI_REGISTRY_PARENT, attributeName); + return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); + } + + @Override + public Collection<String> getCmHandleReferencesByCpsPath(final String cpsPath, final boolean outputAlternateId) { + final String cpsPathInQuery; + final String cpsPathInQueryWithAttribute; + if (CpsPathUtil.getCpsPathQuery(cpsPath).getXpathPrefix().endsWith("/cm-handles")) { + cpsPathInQuery = cpsPath; + } else { + cpsPathInQuery = cpsPath + ANCESTOR_CM_HANDLES; } - return cmHandleReferencesMap; + + if (outputAlternateId) { + cpsPathInQueryWithAttribute = cpsPathInQuery + "/@alternate-id"; + } else { + cpsPathInQueryWithAttribute = cpsPathInQuery + "/@id"; + } + return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, + cpsPathInQueryWithAttribute, String.class); } private Collection<String> getCmHandleReferencesByTrustLevel(final TrustLevel targetTrustLevel, final boolean outputAlternateId) { final Collection<String> selectedCmHandleReferences = new HashSet<>(); - for (final Map.Entry<String, TrustLevel> mapEntry : trustLevelPerDmiPlugin.entrySet()) { final String dmiPluginIdentifier = mapEntry.getKey(); final TrustLevel dmiTrustLevel = mapEntry.getValue(); final Map<String, String> candidateCmHandleReferences = - getCmHandleReferencesMapByDmiPluginIdentifier(dmiPluginIdentifier); + getCmHandleReferencesMapByDmiPluginIdentifier(dmiPluginIdentifier); + final Map<String, TrustLevel> trustLevelPerCmHandleIdInBatch = + trustLevelPerCmHandleId.getAll(candidateCmHandleReferences.keySet()); for (final Map.Entry<String, String> candidateCmHandleReference : candidateCmHandleReferences.entrySet()) { final TrustLevel candidateCmHandleTrustLevel = - trustLevelPerCmHandleId.get(candidateCmHandleReference.getKey()); + trustLevelPerCmHandleIdInBatch.get(candidateCmHandleReference.getKey()); final TrustLevel effectiveTrustlevel = - candidateCmHandleTrustLevel.getEffectiveTrustLevel(dmiTrustLevel); + candidateCmHandleTrustLevel.getEffectiveTrustLevel(dmiTrustLevel); if (targetTrustLevel.equals(effectiveTrustlevel)) { if (outputAlternateId) { selectedCmHandleReferences.add(candidateCmHandleReference.getValue()); @@ -184,36 +199,19 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { return selectedCmHandleReferences; } - private Collection<String> collectCmHandleReferencesFromDataNodes(final Collection<DataNode> dataNodes, - final boolean outputAlternateId) { - if (outputAlternateId) { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get(ALTERNATE_ID)).collect(Collectors.toSet()); - } else { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("id")).collect(Collectors.toSet()); - } - } - private Collection<String> queryCmHandleAnyProperties( - final Map<String, String> propertyQueryPairs, - final PropertyType propertyType, final boolean outputAlternateId) { + final Map<String, String> propertyQueryPairs, + final PropertyType propertyType, final boolean outputAlternateId) { if (propertyQueryPairs.isEmpty()) { return Collections.emptySet(); } Collection<String> cmHandleReferences = null; for (final Map.Entry<String, String> publicPropertyQueryPair : propertyQueryPairs.entrySet()) { - final String cpsPath = DESCENDANT_PATH + propertyType.getYangContainerName() + "[@name=\"" - + publicPropertyQueryPair.getKey() - + "\" and @value=\"" + publicPropertyQueryPair.getValue() + "\"]"; - - final Collection<DataNode> dataNodes = queryCmHandleAncestorsByCpsPath(cpsPath, - OMIT_DESCENDANTS); + final Collection<String> cmHandleReferencesToRetain = getCmHandleReferencesByProperties(propertyType, + publicPropertyQueryPair.getKey(), publicPropertyQueryPair.getValue(), outputAlternateId); if (cmHandleReferences == null) { - cmHandleReferences = collectCmHandleReferencesFromDataNodes(dataNodes, outputAlternateId); + cmHandleReferences = cmHandleReferencesToRetain; } else { - final Collection<String> cmHandleReferencesToRetain; - cmHandleReferencesToRetain = collectCmHandleReferencesFromDataNodes(dataNodes, outputAlternateId); cmHandleReferences.retainAll(cmHandleReferencesToRetain); } if (cmHandleReferences.isEmpty()) { @@ -223,13 +221,46 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService { return cmHandleReferences; } - private Collection<DataNode> getCmHandlesByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier, + private Set<String> getIdsByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier, + final String dmiProperty, + final boolean outputAlternateId) { + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; + final String cpsPath = String.format("%s/cm-handles[@%s='%s']/@%s", + NCMP_DMI_REGISTRY_PARENT, dmiProperty, dmiPluginIdentifier, attributeName); + return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); + } + + private Collection<DataNode> getDataNodesByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier, final String dmiProperty) { return cpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@" + dmiProperty + "='" + dmiPluginIdentifier + "']", OMIT_DESCENDANTS); } + private Map<String, String> getCmHandleReferencesMapByDmiPluginIdentifier(final String dmiPluginIdentifier) { + final Map<String, String> cmHandleReferencesMap = new HashMap<>(); + for (final ModelledDmiServiceLeaves modelledDmiServiceLeaf : ModelledDmiServiceLeaves.values()) { + final Collection<DataNode> cmHandlesAsDataNodes = getDataNodesByDmiPluginIdentifierAndDmiProperty( + dmiPluginIdentifier, modelledDmiServiceLeaf.getLeafName()); + for (final DataNode cmHandleAsDataNode : cmHandlesAsDataNodes) { + final String cmHandleId = cmHandleAsDataNode.getLeaves().get(CM_HANDLE_ID).toString(); + final String alternateId = cmHandleAsDataNode.getLeaves().get(ALTERNATE_ID).toString(); + cmHandleReferencesMap.put(cmHandleId, alternateId); + } + } + return cmHandleReferencesMap; + } + + private Collection<String> getCmHandleReferencesByProperties(final PropertyType propertyType, + final String propertyName, + final String propertyValue, + final boolean outputAlternateId) { + final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID; + final String cpsPath = String.format("//%s[@name='%s' and @value='%s']%s/@%s", + propertyType.getYangContainerName(), propertyName, propertyValue, ANCESTOR_CM_HANDLES, attributeName); + return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class); + } + private DataNode getCmHandleState(final String cmHandleId) { cpsValidator.validateNameCharacters(cmHandleId); final String xpath = NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@id='" + cmHandleId + "']/state"; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationService.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationService.java index e7fd247a08..6153fd679f 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationService.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2025 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada * Modifications Copyright (C) 2023 TechMahindra Ltd. @@ -23,7 +23,6 @@ package org.onap.cps.ncmp.impl.inventory; -import static org.onap.cps.ncmp.api.NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED; import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLES_NOT_FOUND; import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLES_NOT_READY; import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST; @@ -65,6 +64,7 @@ import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.ncmp.impl.inventory.sync.ModuleOperationsUtils; import org.onap.cps.ncmp.impl.inventory.sync.lcm.LcmEventsCmHandleStateHandler; import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelManager; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; @Slf4j @@ -72,7 +72,7 @@ import org.springframework.stereotype.Service; @RequiredArgsConstructor public class CmHandleRegistrationService { - private static final int DELETE_BATCH_SIZE = 100; + private static final int DELETE_BATCH_SIZE = 300; private final CmHandleRegistrationServicePropertyHandler cmHandleRegistrationServicePropertyHandler; private final InventoryPersistence inventoryPersistence; @@ -81,6 +81,8 @@ public class CmHandleRegistrationService { private final IMap<String, Object> moduleSyncStartedOnCmHandles; private final TrustLevelManager trustLevelManager; private final AlternateIdChecker alternateIdChecker; + @Qualifier("cmHandleIdPerAlternateId") + private final IMap<String, String> cmHandleIdPerAlternateId; /** * Registration of Created, Removed, Updated or Upgraded CM Handles. @@ -136,6 +138,27 @@ public class CmHandleRegistrationService { } } + /** + * Method to add alternate ids to cache by passing in yang model cm handle. + * Note: If alternate id does not exist for given cm handle, + * then the map is populated with cm handle id as both key and value. + * + * @param yangModelCmHandles collection of yang model cm handles + */ + public void addAlternateIdsToCache(final Collection<YangModelCmHandle> yangModelCmHandles) { + final Map<String, String> cmHandleIdPerAlternateIdToRegister = new HashMap<>(yangModelCmHandles.size()); + for (final YangModelCmHandle yangModelCmHandle: yangModelCmHandles) { + final String cmHandleId = yangModelCmHandle.getId(); + final String alternateId = yangModelCmHandle.getAlternateId(); + if (StringUtils.isNotBlank(alternateId)) { + cmHandleIdPerAlternateIdToRegister.put(alternateId, cmHandleId); + } else { + cmHandleIdPerAlternateIdToRegister.put(cmHandleId, cmHandleId); + } + } + cmHandleIdPerAlternateId.putAll(cmHandleIdPerAlternateIdToRegister); + } + protected void processRemovedCmHandles(final DmiPluginRegistration dmiPluginRegistration, final DmiPluginRegistrationResponse dmiPluginRegistrationResponse) { final List<String> toBeRemovedCmHandleIds = dmiPluginRegistration.getRemovedCmHandles(); @@ -169,6 +192,7 @@ public class CmHandleRegistrationService { } yangModelCmHandles.removeIf(yangModelCmHandle -> notDeletedCmHandles.contains(yangModelCmHandle.getId())); updateCmHandleStateBatch(yangModelCmHandles, CmHandleState.DELETED); + removeAlternateIdsFromCache(yangModelCmHandles); dmiPluginRegistrationResponse.setRemovedCmHandles(cmHandleRegistrationResponses); } @@ -187,9 +211,11 @@ public class CmHandleRegistrationService { processTrustLevels(ncmpServiceCmHandles, succeededCmHandleIds); } catch (final AlreadyDefinedException alreadyDefinedException) { + log.error("Error while creating CM handles", alreadyDefinedException); failedCmHandleRegistrationResponses.addAll(CmHandleRegistrationResponse.createFailureResponsesFromXpaths( alreadyDefinedException.getAlreadyDefinedObjectNames(), CM_HANDLE_ALREADY_EXIST)); } catch (final Exception exception) { + log.error("Error while creating CM handles", exception); final Collection<String> cmHandleIds = ncmpServiceCmHandles.stream().map(NcmpServiceCmHandle::getCmHandleId).collect(Collectors.toList()); failedCmHandleRegistrationResponses.addAll(CmHandleRegistrationResponse @@ -347,7 +373,7 @@ public class CmHandleRegistrationService { final Collection<String> rejectedCmHandleIds = alternateIdChecker .getIdsOfCmHandlesWithRejectedAlternateId(cmHandlesToBeCreated, AlternateIdChecker.Operation.CREATE); cmHandleRegistrationResponses.addAll(CmHandleRegistrationResponse.createFailureResponses( - rejectedCmHandleIds, ALTERNATE_ID_ALREADY_ASSOCIATED)); + rejectedCmHandleIds, CM_HANDLE_ALREADY_EXIST)); return rejectedCmHandleIds; } @@ -368,6 +394,7 @@ public class CmHandleRegistrationService { } } lcmEventsCmHandleStateHandler.initiateStateAdvised(yangModelCmHandlesToRegister); + addAlternateIdsToCache(yangModelCmHandlesToRegister); dmiPluginRegistrationResponse.setCreatedCmHandles(cmHandleRegistrationResponses); return succeededCmHandleIds; } @@ -384,4 +411,16 @@ public class CmHandleRegistrationService { ncmpServiceCmHandle.getDataProducerIdentifier()); } + private void removeAlternateIdsFromCache(final Collection<YangModelCmHandle> yangModelCmHandles) { + for (final YangModelCmHandle yangModelCmHandle: yangModelCmHandles) { + final String cmHandleId = yangModelCmHandle.getId(); + final String alternateId = yangModelCmHandle.getAlternateId(); + if (StringUtils.isNotBlank(alternateId)) { + cmHandleIdPerAlternateId.delete(alternateId); + } else { + cmHandleIdPerAlternateId.delete(cmHandleId); + } + } + } + } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandler.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandler.java index b7a13d9989..47d03c679f 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandler.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandler.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -22,8 +22,9 @@ package org.onap.cps.ncmp.impl.inventory; -import static org.onap.cps.ncmp.api.NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED; +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS; import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLES_NOT_FOUND; +import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST; import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLE_INVALID_ID; import static org.onap.cps.ncmp.impl.inventory.CmHandleRegistrationServicePropertyHandler.PropertyType.DMI_PROPERTY; import static org.onap.cps.ncmp.impl.inventory.CmHandleRegistrationServicePropertyHandler.PropertyType.PUBLIC_PROPERTY; @@ -32,6 +33,7 @@ import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT; import com.google.common.collect.ImmutableMap; +import com.hazelcast.map.IMap; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.Collection; @@ -49,13 +51,14 @@ import org.onap.cps.api.CpsDataService; import org.onap.cps.api.exceptions.DataNodeNotFoundException; import org.onap.cps.api.exceptions.DataValidationException; import org.onap.cps.api.model.DataNode; -import org.onap.cps.api.model.DataNodeBuilder; +import org.onap.cps.impl.DataNodeBuilder; import org.onap.cps.ncmp.api.inventory.models.CmHandleRegistrationResponse; import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.ncmp.impl.utils.YangDataConverter; import org.onap.cps.utils.ContentType; import org.onap.cps.utils.JsonObjectMapper; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; @Slf4j @@ -69,6 +72,8 @@ public class CmHandleRegistrationServicePropertyHandler { private final CpsDataService cpsDataService; private final JsonObjectMapper jsonObjectMapper; private final AlternateIdChecker alternateIdChecker; + @Qualifier("cmHandleIdPerAlternateId") + private final IMap<String, String> cmHandleIdPerAlternateId; /** * Iterates over incoming updatedNcmpServiceCmHandles and update the dataNodes based on the updated attributes. @@ -81,14 +86,14 @@ public class CmHandleRegistrationServicePropertyHandler { final Collection<String> rejectedCmHandleIds = alternateIdChecker .getIdsOfCmHandlesWithRejectedAlternateId(updatedNcmpServiceCmHandles, AlternateIdChecker.Operation.UPDATE); final List<CmHandleRegistrationResponse> failureResponses = - CmHandleRegistrationResponse.createFailureResponses(rejectedCmHandleIds, ALTERNATE_ID_ALREADY_ASSOCIATED); + CmHandleRegistrationResponse.createFailureResponses(rejectedCmHandleIds, CM_HANDLE_ALREADY_EXIST); final List<CmHandleRegistrationResponse> cmHandleRegistrationResponses = new ArrayList<>(failureResponses); for (final NcmpServiceCmHandle updatedNcmpServiceCmHandle : updatedNcmpServiceCmHandles) { final String cmHandleId = updatedNcmpServiceCmHandle.getCmHandleId(); if (!rejectedCmHandleIds.contains(cmHandleId)) { try { final DataNode existingCmHandleDataNode = inventoryPersistence - .getCmHandleDataNodeByCmHandleId(cmHandleId).iterator().next(); + .getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS).iterator().next(); processUpdates(existingCmHandleDataNode, updatedNcmpServiceCmHandle); cmHandleRegistrationResponses.add(CmHandleRegistrationResponse.createSuccessResponse(cmHandleId)); } catch (final DataNodeNotFoundException e) { @@ -124,9 +129,12 @@ public class CmHandleRegistrationServicePropertyHandler { } private void updateAlternateId(final NcmpServiceCmHandle ncmpServiceCmHandle) { + final String cmHandleId = ncmpServiceCmHandle.getCmHandleId(); final String newAlternateId = ncmpServiceCmHandle.getAlternateId(); if (StringUtils.isNotBlank(newAlternateId)) { setAndUpdateCmHandleField(ncmpServiceCmHandle.getCmHandleId(), "alternate-id", newAlternateId); + cmHandleIdPerAlternateId.delete(cmHandleId); + cmHandleIdPerAlternateId.set(newAlternateId, cmHandleId); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java index b81b5aae70..29eaf2e407 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,6 +27,7 @@ import java.util.Map; import org.onap.cps.api.model.DataNode; import org.onap.cps.api.model.ModuleDefinition; import org.onap.cps.api.model.ModuleReference; +import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.ncmp.api.inventory.models.CompositeState; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; @@ -72,14 +73,6 @@ public interface InventoryPersistence extends NcmpPersistence { Collection<YangModelCmHandle> getYangModelCmHandles(Collection<String> cmHandleIds); /** - * This method retrieves DMI service name, DMI properties and the state for a given list of cm handle references. - * - * @param cmHandleReferences a list of the ids of the cm handles - * @return collection of yang model cm handles - */ - Collection<YangModelCmHandle> getYangModelCmHandlesFromCmHandleReferences(Collection<String> cmHandleReferences); - - /** * Method to return module definitions by cmHandleId. * * @param cmHandleId cm handle ID @@ -125,33 +118,21 @@ public interface InventoryPersistence extends NcmpPersistence { * Get data node with the given cm handle id. * * @param cmHandleId cmHandle ID + * @param fetchDescendantsOption fetch descendants option * @return data node */ - Collection<DataNode> getCmHandleDataNodeByCmHandleId(String cmHandleId); - - /** - * Get yang model cm handle with the given alternate id. - * - * @param alternateId alternate ID - * @return yang model cm handle - */ - YangModelCmHandle getYangModelCmHandleByAlternateId(String alternateId); - - /** - * Get yang model cm handles for the given batch of alternate ids. - * - * @param alternateIds alternate IDs - * @return yang model cm handles - */ - Collection<YangModelCmHandle> getYangModelCmHandleByAlternateIds(Collection<String> alternateIds); + Collection<DataNode> getCmHandleDataNodeByCmHandleId(String cmHandleId, + FetchDescendantsOption fetchDescendantsOption); /** * Get collection of data nodes of given cm handles. * * @param cmHandleIds collection of cmHandle IDs + * @param fetchDescendantsOption fetch descendants option * @return collection of data nodes */ - Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds); + Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds, + FetchDescendantsOption fetchDescendantsOption); /** * get CM handles that has given module names. diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java index e7ec9cd13c..9bbc8b8e42 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -34,8 +34,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsModuleService; @@ -45,13 +45,12 @@ import org.onap.cps.api.model.DataNode; import org.onap.cps.api.model.ModuleDefinition; import org.onap.cps.api.model.ModuleReference; import org.onap.cps.api.parameters.FetchDescendantsOption; -import org.onap.cps.impl.utils.CpsValidator; -import org.onap.cps.ncmp.api.exceptions.CmHandleNotFoundException; import org.onap.cps.ncmp.api.inventory.models.CompositeState; import org.onap.cps.ncmp.api.inventory.models.CompositeStateBuilder; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.ncmp.impl.utils.YangDataConverter; import org.onap.cps.utils.ContentType; +import org.onap.cps.utils.CpsValidator; import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Component; @@ -59,11 +58,10 @@ import org.springframework.stereotype.Component; @Component public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements InventoryPersistence { - private static final int CMHANDLE_BATCH_SIZE = 100; + private static final int CMHANDLE_BATCH_SIZE = 300; private final CpsModuleService cpsModuleService; private final CpsValidator cpsValidator; - private final CmHandleQueryService cmHandleQueryService; /** * initialize an inventory persistence object. @@ -73,21 +71,17 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv * @param cpsAnchorService cps anchor service instance * @param cpsModuleService cps module service instance * @param cpsDataService cps data service instance - * @param cmHandleQueryService cm handle query service instance */ public InventoryPersistenceImpl(final CpsValidator cpsValidator, final JsonObjectMapper jsonObjectMapper, final CpsAnchorService cpsAnchorService, final CpsModuleService cpsModuleService, - final CpsDataService cpsDataService, - final CmHandleQueryService cmHandleQueryService) { + final CpsDataService cpsDataService) { super(jsonObjectMapper, cpsAnchorService, cpsDataService); this.cpsModuleService = cpsModuleService; this.cpsValidator = cpsValidator; - this.cmHandleQueryService = cmHandleQueryService; } - @Override public CompositeState getCmHandleState(final String cmHandleId) { final DataNode stateAsDataNode = cpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, @@ -117,7 +111,8 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv @Override public YangModelCmHandle getYangModelCmHandle(final String cmHandleId) { cpsValidator.validateNameCharacters(cmHandleId); - final DataNode dataNode = getCmHandleDataNodeByCmHandleId(cmHandleId).iterator().next(); + final DataNode dataNode = + getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS).iterator().next(); return YangDataConverter.toYangModelCmHandle(dataNode); } @@ -133,20 +128,7 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv dataValidationException.getMessage()); } }); - return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds)); - } - - @Override - public Collection<YangModelCmHandle> getYangModelCmHandlesFromCmHandleReferences( - final Collection<String> cmHandleReferences) { - - final String cpsPathForCmHandlesByReferences = getCpsPathForCmHandlesByReferences(cmHandleReferences); - - final Collection<DataNode> cmHandlesAsDataNodes = - cmHandleQueryService.queryNcmpRegistryByCpsPath( - cpsPathForCmHandlesByReferences, INCLUDE_ALL_DESCENDANTS); - - return YangDataConverter.toYangModelCmHandles(cmHandlesAsDataNodes); + return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds, INCLUDE_ALL_DESCENDANTS)); } @Override @@ -185,55 +167,34 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv } @Override - public Collection<DataNode> getCmHandleDataNodeByCmHandleId(final String cmHandleId) { - return this.getDataNode(getXPathForCmHandleById(cmHandleId)); - } - - @Override - public YangModelCmHandle getYangModelCmHandleByAlternateId(final String alternateId) { - final String cpsPathForCmHandleByAlternateId = getCpsPathForCmHandleByAlternateId(alternateId); - final Collection<DataNode> dataNodes = cmHandleQueryService - .queryNcmpRegistryByCpsPath(cpsPathForCmHandleByAlternateId, OMIT_DESCENDANTS); - if (dataNodes.isEmpty()) { - throw new CmHandleNotFoundException(alternateId); - } - return YangDataConverter.toYangModelCmHandle(dataNodes.iterator().next()); + public Collection<DataNode> getCmHandleDataNodeByCmHandleId(final String cmHandleId, + final FetchDescendantsOption fetchDescendantsOption) { + return this.getDataNode(getXPathForCmHandleById(cmHandleId), fetchDescendantsOption); } @Override - public Collection<YangModelCmHandle> getYangModelCmHandleByAlternateIds(final Collection<String> alternateIds) { - if (alternateIds.isEmpty()) { - return Collections.emptyList(); - } - final String cpsPathForCmHandlesByAlternateIds = getCpsPathForCmHandlesByAlternateIds(alternateIds); - final Collection<DataNode> dataNodes = cmHandleQueryService.queryNcmpRegistryByCpsPath( - cpsPathForCmHandlesByAlternateIds, INCLUDE_ALL_DESCENDANTS); - return YangDataConverter.toYangModelCmHandles(dataNodes); - } - - @Override - public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds) { + public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds, + final FetchDescendantsOption fetchDescendantsOption) { final Collection<String> xpaths = new ArrayList<>(cmHandleIds.size()); cmHandleIds.forEach(cmHandleId -> xpaths.add(getXPathForCmHandleById(cmHandleId))); - return this.getDataNodes(xpaths); + return this.getDataNodes(xpaths, fetchDescendantsOption); } @Override public Collection<String> getCmHandleReferencesWithGivenModules(final Collection<String> moduleNamesForQuery, final boolean outputAlternateId) { - if (outputAlternateId) { - final Collection<String> cmHandleIds = + final Collection<String> cmHandleIds = cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery); - return getAlternateIdsFromDataNodes(getCmHandleDataNodes(cmHandleIds)); - } else { - return cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery); + if (outputAlternateId) { + return getAlternateIdsForCmHandleIds(cmHandleIds); } + return cmHandleIds; } @Override public boolean isExistingCmHandleId(final String cmHandleId) { try { - return !getCmHandleDataNodeByCmHandleId(cmHandleId).isEmpty(); + return !getCmHandleDataNodeByCmHandleId(cmHandleId, OMIT_DESCENDANTS).isEmpty(); } catch (final DataNodeNotFoundException exception) { return false; } @@ -243,21 +204,6 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv return NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@id='" + cmHandleId + "']"; } - private static String getCpsPathForCmHandleByAlternateId(final String alternateId) { - return NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@alternate-id='" + alternateId + "']"; - } - - private static String getCpsPathForCmHandlesByAlternateIds(final Collection<String> alternateIds) { - return alternateIds.stream().collect(Collectors.joining("' or @alternate-id='", - NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@alternate-id='", "']")); - } - - private String getCpsPathForCmHandlesByReferences(final Collection<String> cmHandleReferences) { - return cmHandleReferences.stream() - .flatMap(id -> Stream.of("@id='" + id + "'", "@alternate-id='" + id + "'")) - .collect(Collectors.joining(" or ", NCMP_DMI_REGISTRY_PARENT + "/cm-handles[", "]")); - } - private static String createStateJsonData(final String state) { return "{\"state\":" + state + "}"; } @@ -266,8 +212,12 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv return "{\"cm-handles\":" + jsonObjectMapper.asJsonString(yangModelCmHandles) + "}"; } - private Collection<String> getAlternateIdsFromDataNodes(final Collection<DataNode> dataNodes) { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet()); + private Collection<String> getAlternateIdsForCmHandleIds(final Collection<String> cmHandleIds) { + final Collection<DataNode> dataNodes = getCmHandleDataNodes(cmHandleIds, OMIT_DESCENDANTS); + return dataNodes.stream() + .map(DataNode::getLeaves) + .map(leaves -> (String) leaves.get("alternate-id")) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toSet()); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryService.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryService.java index 3db4920d3e..5a105b347a 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryService.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ package org.onap.cps.ncmp.impl.inventory; import java.util.Collection; import org.onap.cps.ncmp.api.inventory.models.CmHandleQueryServiceParameters; import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; +import reactor.core.publisher.Flux; public interface ParameterizedCmHandleQueryService { /** @@ -31,6 +32,7 @@ public interface ParameterizedCmHandleQueryService { * public properties * modules * cps-path + * trust level * * @param cmHandleQueryServiceParameters the cm handle query parameters * @param outputAlternateId Boolean for cm handle reference type either @@ -62,17 +64,20 @@ public interface ParameterizedCmHandleQueryService { * public properties * modules * cps-path + * trust level * * @param cmHandleQueryServiceParameters the cm handle query parameters - * @return collection of cm handles + * @return cm handle objects as a reactive stream (flux) */ - Collection<NcmpServiceCmHandle> queryCmHandles(CmHandleQueryServiceParameters cmHandleQueryServiceParameters); + Flux<NcmpServiceCmHandle> queryCmHandles(CmHandleQueryServiceParameters cmHandleQueryServiceParameters); /** - * Get all cm handle objects. - * Note: it is similar to all the queries above but simply no conditions and hence not 'parameterized' + * Retrieves all {@code NcmpServiceCmHandle} instances without their associated properties. + * This method fetches the relevant data nodes from the inventory persistence layer and + * converts them into {@code NcmpServiceCmHandle} objects. Only the handles are returned, + * without any additional properties. * - * @return collection of cm handles + * @return a collection of {@code NcmpServiceCmHandle} instances without properties. */ - Collection<NcmpServiceCmHandle> getAllCmHandles(); + Collection<NcmpServiceCmHandle> getAllCmHandlesWithoutProperties(); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java index 428c6f6491..bafb06578e 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ package org.onap.cps.ncmp.impl.inventory; import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY; -import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS; import static org.onap.cps.ncmp.impl.inventory.CmHandleQueryParametersValidator.validateCpsPathConditionProperties; import static org.onap.cps.ncmp.impl.inventory.CmHandleQueryParametersValidator.validateModuleNameConditionProperties; import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT; @@ -49,26 +48,30 @@ import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; import org.onap.cps.ncmp.impl.inventory.models.InventoryQueryConditions; import org.onap.cps.ncmp.impl.inventory.models.PropertyType; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; +import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelManager; import org.onap.cps.ncmp.impl.utils.YangDataConverter; import org.springframework.stereotype.Service; +import reactor.core.publisher.Flux; @Service @RequiredArgsConstructor public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHandleQueryService { + private static final int FLUX_BUFFER_SIZE = 1000; private static final Collection<String> NO_QUERY_TO_EXECUTE = null; private final CmHandleQueryService cmHandleQueryService; private final InventoryPersistence inventoryPersistence; + private final TrustLevelManager trustLevelManager; @Override public Collection<String> queryCmHandleReferenceIds( - final CmHandleQueryServiceParameters cmHandleQueryServiceParameters, - final boolean outputAlternateId) { + final CmHandleQueryServiceParameters cmHandleQueryServiceParameters, + final boolean outputAlternateId) { return executeQueries(cmHandleQueryServiceParameters, outputAlternateId, - this::executeCpsPathQuery, - this::queryCmHandlesByPublicProperties, - this::executeModuleNameQuery, - this::queryCmHandlesByTrustLevel); + this::executeCpsPathQuery, + this::queryCmHandlesByPublicProperties, + this::executeModuleNameQuery, + this::queryCmHandlesByTrustLevel); } @Override @@ -83,21 +86,18 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } @Override - public Collection<NcmpServiceCmHandle> queryCmHandles( - final CmHandleQueryServiceParameters cmHandleQueryServiceParameters) { - - if (cmHandleQueryServiceParameters.getCmHandleQueryParameters().isEmpty()) { - return getAllCmHandles(); - } - - final Collection<String> cmHandleIds = queryCmHandleReferenceIds(cmHandleQueryServiceParameters, false); - + public Flux<NcmpServiceCmHandle> queryCmHandles(final CmHandleQueryServiceParameters queryParameters) { + final Collection<String> cmHandleIds = queryCmHandleReferenceIds(queryParameters, false); return getNcmpServiceCmHandles(cmHandleIds); } @Override - public Collection<NcmpServiceCmHandle> getAllCmHandles() { - final DataNode dataNode = inventoryPersistence.getDataNode(NCMP_DMI_REGISTRY_PARENT).iterator().next(); + public Collection<NcmpServiceCmHandle> getAllCmHandlesWithoutProperties() { + return toNcmpServiceCmHandles(inventoryPersistence.getDataNode(NCMP_DMI_REGISTRY_PARENT, DIRECT_CHILDREN_ONLY)); + } + + private Collection<NcmpServiceCmHandle> toNcmpServiceCmHandles(final Collection<DataNode> dataNodes) { + final DataNode dataNode = dataNodes.iterator().next(); return dataNode.getChildDataNodes().stream().map(this::createNcmpServiceCmHandle).collect(Collectors.toSet()); } @@ -111,15 +111,11 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } final String dmiPluginIdentifierValue = dmiPropertyQueryPairs - .get(PropertyType.DMI_PLUGIN.getYangContainerName()); + .get(PropertyType.DMI_PLUGIN.getYangContainerName()); + + return cmHandleQueryService.getCmHandleReferencesByDmiPluginIdentifier( + dmiPluginIdentifierValue, outputAlternateId); - if (outputAlternateId) { - return - cmHandleQueryService.getCmHandleReferencesMapByDmiPluginIdentifier(dmiPluginIdentifierValue).values(); - } else { - return cmHandleQueryService.getCmHandleReferencesByDmiPluginIdentifier(dmiPluginIdentifierValue, - outputAlternateId); - } } private Collection<String> queryCmHandlesByPrivateProperties( @@ -175,7 +171,7 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan private Collection<String> executeCpsPathQuery( final CmHandleQueryServiceParameters cmHandleQueryServiceParameters, final boolean outputAlternateId) { final Map<String, String> cpsPathCondition - = getCpsPathCondition(cmHandleQueryServiceParameters.getCmHandleQueryParameters()); + = getCpsPathCondition(cmHandleQueryServiceParameters.getCmHandleQueryParameters()); if (!validateCpsPathConditionProperties(cpsPathCondition)) { return Collections.emptySet(); } @@ -184,9 +180,8 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan return NO_QUERY_TO_EXECUTE; } try { - cpsPathQueryResult = collectCmHandleReferencesFromDataNodes( - cmHandleQueryService.queryCmHandleAncestorsByCpsPath(cpsPathCondition.get("cpsPath"), OMIT_DESCENDANTS), - outputAlternateId); + cpsPathQueryResult = cmHandleQueryService.getCmHandleReferencesByCpsPath(cpsPathCondition.get("cpsPath"), + outputAlternateId); } catch (final PathParsingException pathParsingException) { throw new DataValidationException(pathParsingException.getMessage(), pathParsingException.getDetails(), pathParsingException); @@ -211,7 +206,7 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } private Map<String, String> getPropertyPairs(final List<ConditionProperties> conditionProperties, - final String queryProperty) { + final String queryProperty) { final Map<String, String> result = new HashMap<>(); getConditions(conditionProperties, queryProperty).forEach(result::putAll); return result; @@ -228,20 +223,26 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } private Collection<String> getAllCmHandleReferences(final boolean outputAlternateId) { - final DataNode dataNode = inventoryPersistence.getDataNode(NCMP_DMI_REGISTRY_PARENT, DIRECT_CHILDREN_ONLY) - .iterator().next(); - return collectCmHandleReferencesFromDataNodes(dataNode.getChildDataNodes(), outputAlternateId); + return cmHandleQueryService.getAllCmHandleReferences(outputAlternateId); } - private Collection<NcmpServiceCmHandle> getNcmpServiceCmHandles(final Collection<String> cmHandleIds) { + private Flux<NcmpServiceCmHandle> getNcmpServiceCmHandles(final Collection<String> cmHandleIds) { + return Flux.fromIterable(cmHandleIds) + .buffer(FLUX_BUFFER_SIZE) + .map(this::getNcmpServiceCmHandleBatch) + .flatMap(Flux::fromIterable); + } + + private Collection<NcmpServiceCmHandle> getNcmpServiceCmHandleBatch(final Collection<String> cmHandleIds) { final Collection<YangModelCmHandle> yangModelcmHandles - = inventoryPersistence.getYangModelCmHandles(cmHandleIds); + = inventoryPersistence.getYangModelCmHandles(cmHandleIds); final Collection<NcmpServiceCmHandle> ncmpServiceCmHandles = new ArrayList<>(yangModelcmHandles.size()); yangModelcmHandles.forEach(yangModelcmHandle -> - ncmpServiceCmHandles.add(YangDataConverter.toNcmpServiceCmHandle(yangModelcmHandle)) + ncmpServiceCmHandles.add(YangDataConverter.toNcmpServiceCmHandle(yangModelcmHandle)) ); + trustLevelManager.applyEffectiveTrustLevels(ncmpServiceCmHandles); return ncmpServiceCmHandles; } @@ -252,15 +253,15 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan private Collection<String> executeQueries(final CmHandleQueryServiceParameters cmHandleQueryServiceParameters, final boolean outputAlternateId, final BiFunction<CmHandleQueryServiceParameters, Boolean, - Collection<String>>... queryFunctions) { + Collection<String>>... queryFunctions) { if (cmHandleQueryServiceParameters.getCmHandleQueryParameters().isEmpty()) { return getAllCmHandleReferences(outputAlternateId); } Collection<String> combinedQueryResult = NO_QUERY_TO_EXECUTE; for (final BiFunction<CmHandleQueryServiceParameters, Boolean, - Collection<String>> queryFunction : queryFunctions) { + Collection<String>> queryFunction : queryFunctions) { final Collection<String> queryResult = queryFunction.apply(cmHandleQueryServiceParameters, - outputAlternateId); + outputAlternateId); if (noEntriesFoundCanStopQuerying(queryResult)) { return Collections.emptySet(); } @@ -287,14 +288,4 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan } } - private Collection<String> collectCmHandleReferencesFromDataNodes(final Collection<DataNode> dataNodes, - final boolean outputAlternateId) { - if (outputAlternateId) { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet()); - } else { - return dataNodes.stream().map(dataNode -> - (String) dataNode.getLeaves().get("id")).collect(Collectors.toSet()); - } - } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutor.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutor.java deleted file mode 100644 index 80bc4ab69f..0000000000 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutor.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.impl.inventory.sync; - -import static java.util.concurrent.TimeUnit.MILLISECONDS; - -import jakarta.annotation.PostConstruct; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeoutException; -import java.util.function.Supplier; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Service; - -@Slf4j -@Service -public class AsyncTaskExecutor { - - @Value("${ncmp.modules-sync-watchdog.async-executor.parallelism-level:10}") - @Getter - private int asyncTaskParallelismLevel; - private ExecutorService executorService; - private static final int DEFAULT_PARALLELISM_LEVEL = 10; - - /** - * Set up executor service with thread-pool size as per configuration parameter. - * If modules-sync-watchdog.async-executor.parallelism-level not set a default of 10 threads will be applied. - */ - @PostConstruct - public void setupThreadPool() { - executorService = Executors.newWorkStealingPool( - asyncTaskParallelismLevel == 0 ? DEFAULT_PARALLELISM_LEVEL : asyncTaskParallelismLevel); - } - - /** - * Execute supplied task asynchronously. - * - * @param taskSupplier functional method is get() task need to executed asynchronously - * @param timeOutInMillis the task timeout value in milliseconds - */ - public void executeTask(final Supplier<Object> taskSupplier, final long timeOutInMillis) { - CompletableFuture.supplyAsync(taskSupplier::get, executorService) - .orTimeout(timeOutInMillis, MILLISECONDS) - .whenCompleteAsync(this::handleTaskCompletion); - } - - private void handleTaskCompletion(final Object response, final Throwable throwable) { - if (throwable != null) { - if (throwable instanceof TimeoutException) { - log.error("Async task didn't complete within the required time.", throwable); - } else { - log.error("Watchdog async batch failed.", throwable); - } - } - } -} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java index af78d95742..708077915b 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java @@ -55,7 +55,8 @@ public class DataSyncWatchdog { * Execute Cm Handle poll which queries the cm handle state in 'READY' and Operational Datastore Sync State in * 'UNSYNCHRONIZED'. */ - @Scheduled(fixedDelayString = "${ncmp.timers.cm-handle-data-sync.sleep-time-ms:30000}") + @Scheduled(initialDelayString = "${ncmp.timers.cm-handle-data-sync.initial-delay-ms:40000}", + fixedDelayString = "${ncmp.timers.cm-handle-data-sync.sleep-time-ms:30000}") public void executeUnSynchronizedReadyCmHandlePoll() { moduleOperationsUtils.getUnsynchronizedReadyCmHandles().forEach(unSynchronizedReadyCmHandle -> { final String cmHandleId = unSynchronizedReadyCmHandle.getId(); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperations.java index 1e24671f8d..2cc4375447 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperations.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperations.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -61,13 +61,14 @@ public class DmiModelOperations { * Retrieves module references. * * @param yangModelCmHandle the yang model cm handle + * @param targetModuleSetTag module set tag to send to dmi * @return module references */ @Timed(value = "cps.ncmp.inventory.module.references.from.dmi", description = "Time taken to get all module references for a cm handle from dmi") - public List<ModuleReference> getModuleReferences(final YangModelCmHandle yangModelCmHandle) { - final DmiRequestBody dmiRequestBody = DmiRequestBody.builder() - .moduleSetTag(yangModelCmHandle.getModuleSetTag()).build(); + public List<ModuleReference> getModuleReferences(final YangModelCmHandle yangModelCmHandle, + final String targetModuleSetTag) { + final DmiRequestBody dmiRequestBody = DmiRequestBody.builder().moduleSetTag(targetModuleSetTag).build(); dmiRequestBody.asDmiProperties(yangModelCmHandle.getDmiProperties()); final ResponseEntity<Object> dmiFetchModulesResponseEntity = getResourceFromDmiWithJsonData( yangModelCmHandle.resolveDmiServiceName(MODEL), @@ -79,18 +80,20 @@ public class DmiModelOperations { * Retrieve yang resources from dmi for any modules that CPS-NCMP hasn't cached before. * * @param yangModelCmHandle the yangModelCmHandle + * @param targetModuleSetTag module set tag to send to dmi * @param newModuleReferences the unknown module references * @return yang resources as map of module name to yang(re)source */ @Timed(value = "cps.ncmp.inventory.yang.resources.from.dmi", description = "Time taken to get list of yang resources from dmi") public Map<String, String> getNewYangResourcesFromDmi(final YangModelCmHandle yangModelCmHandle, + final String targetModuleSetTag, final Collection<ModuleReference> newModuleReferences) { if (newModuleReferences.isEmpty()) { return Collections.emptyMap(); } final String jsonWithDataAndDmiProperties = getRequestBodyToFetchYangResources(newModuleReferences, - yangModelCmHandle.getDmiProperties(), yangModelCmHandle.getModuleSetTag()); + yangModelCmHandle.getDmiProperties(), targetModuleSetTag); final ResponseEntity<Object> responseEntity = getResourceFromDmiWithJsonData( yangModelCmHandle.resolveDmiServiceName(MODEL), jsonWithDataAndDmiProperties, @@ -123,13 +126,13 @@ public class DmiModelOperations { private static String getRequestBodyToFetchYangResources(final Collection<ModuleReference> newModuleReferences, final List<YangModelCmHandle.Property> dmiProperties, - final String moduleSetTag) { + final String targetModuleSetTag) { final JsonArray moduleReferencesAsJson = getModuleReferencesAsJson(newModuleReferences); final JsonObject data = new JsonObject(); data.add("modules", moduleReferencesAsJson); final JsonObject jsonRequestObject = new JsonObject(); - if (!moduleSetTag.isEmpty()) { - jsonRequestObject.addProperty("moduleSetTag", moduleSetTag); + if (!targetModuleSetTag.isEmpty()) { + jsonRequestObject.addProperty("moduleSetTag", targetModuleSetTag); } jsonRequestObject.add("data", data); jsonRequestObject.add("cmHandleProperties", toJsonObject(dmiProperties)); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleOperationsUtils.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleOperationsUtils.java index e9f3d9b475..80e41652ee 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleOperationsUtils.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleOperationsUtils.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -189,7 +189,12 @@ public class ModuleOperationsUtils { .getLockReasonCategory())); } - public static String getTargetModuleSetTagFromLockReason(final CompositeState.LockReason lockReason) { + public static String getTargetModuleSetTagForUpgrade(final YangModelCmHandle yangModelCmHandle) { + final CompositeState.LockReason lockReason = yangModelCmHandle.getCompositeState().getLockReason(); + return getTargetModuleSetTagFromLockReason(lockReason); + } + + private static String getTargetModuleSetTagFromLockReason(final CompositeState.LockReason lockReason) { return getLockedCompositeStateDetails(lockReason).getOrDefault(MODULE_SET_TAG_KEY, ""); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncService.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncService.java index 9534cf35b1..79f5496eb7 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncService.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncService.java @@ -36,7 +36,6 @@ import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsModuleService; import org.onap.cps.api.exceptions.AlreadyDefinedException; -import org.onap.cps.api.exceptions.DuplicatedYangResourceException; import org.onap.cps.api.model.ModuleReference; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.utils.ContentType; @@ -57,7 +56,7 @@ public class ModuleSyncService { @AllArgsConstructor private static final class ModuleDelta { Collection<ModuleReference> allModuleReferences; - Map<String, String> newModuleNameToContentMap; + Map<String, String> newYangResourceContentPerName; } /** @@ -67,10 +66,15 @@ public class ModuleSyncService { */ public void syncAndCreateSchemaSetAndAnchor(final YangModelCmHandle yangModelCmHandle) { final String cmHandleId = yangModelCmHandle.getId(); - final String moduleSetTag = yangModelCmHandle.getModuleSetTag(); - final String schemaSetName = getSchemaSetName(cmHandleId, moduleSetTag); - syncAndCreateSchemaSet(yangModelCmHandle, schemaSetName); - cpsAnchorService.createAnchor(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, schemaSetName, cmHandleId); + final String targetModuleSetTag = yangModelCmHandle.getModuleSetTag(); + final String schemaSetName = getSchemaSetNameForModuleSetTag(cmHandleId, targetModuleSetTag); + syncAndCreateSchemaSet(yangModelCmHandle, schemaSetName, targetModuleSetTag); + try { + cpsAnchorService.createAnchor(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, schemaSetName, cmHandleId); + } catch (final AlreadyDefinedException alreadyDefinedException) { + log.warn("Ignoring (Anchor) already exists exception for {}. Exception details: {}", cmHandleId, + alreadyDefinedException.getDetails()); + } } /** @@ -81,39 +85,40 @@ public class ModuleSyncService { public void syncAndUpgradeSchemaSet(final YangModelCmHandle yangModelCmHandle) { final String cmHandleId = yangModelCmHandle.getId(); final String sourceModuleSetTag = yangModelCmHandle.getModuleSetTag(); - final String targetModuleSetTag = ModuleOperationsUtils.getTargetModuleSetTagFromLockReason( - yangModelCmHandle.getCompositeState().getLockReason()); + final String targetModuleSetTag = ModuleOperationsUtils.getTargetModuleSetTagForUpgrade(yangModelCmHandle); + final String schemaSetName = getSchemaSetNameForModuleSetTag(cmHandleId, targetModuleSetTag); if (sourceModuleSetTag.isEmpty() && targetModuleSetTag.isEmpty()) { - final ModuleDelta moduleDelta = getModuleDelta(yangModelCmHandle); + final ModuleDelta moduleDelta = getModuleDelta(yangModelCmHandle, targetModuleSetTag); cpsModuleService.upgradeSchemaSetFromModules(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, - cmHandleId, moduleDelta.newModuleNameToContentMap, moduleDelta.allModuleReferences); + schemaSetName, moduleDelta.newYangResourceContentPerName, moduleDelta.allModuleReferences); } else { - final String targetSchemaSetName = getSchemaSetName(cmHandleId, targetModuleSetTag); - syncAndCreateSchemaSet(yangModelCmHandle, targetSchemaSetName); - cpsAnchorService.updateAnchorSchemaSet(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, cmHandleId, - targetSchemaSetName); + syncAndCreateSchemaSet(yangModelCmHandle, schemaSetName, targetModuleSetTag); + cpsAnchorService.updateAnchorSchemaSet(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, cmHandleId, schemaSetName); setCmHandleModuleSetTag(yangModelCmHandle, targetModuleSetTag); log.info("Upgrading schema set for CM handle ID: {}, Source Tag: {}, Target Tag: {}", cmHandleId, sourceModuleSetTag, targetModuleSetTag); } } - private void syncAndCreateSchemaSet(final YangModelCmHandle yangModelCmHandle, final String schemaSetName) { + private void syncAndCreateSchemaSet(final YangModelCmHandle yangModelCmHandle, + final String schemaSetName, + final String targetModuleSetTag) { if (isNewSchemaSet(schemaSetName)) { - final ModuleDelta moduleDelta = getModuleDelta(yangModelCmHandle); + final String cmHandleId = yangModelCmHandle.getId(); + final ModuleDelta moduleDelta = getModuleDelta(yangModelCmHandle, targetModuleSetTag); try { - log.info("Creating Schema Set {} for CM Handle {}", schemaSetName, yangModelCmHandle.getId()); + log.info("Creating Schema Set {} for CM Handle {}", schemaSetName, cmHandleId); cpsModuleService.createSchemaSetFromModules( - NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, - schemaSetName, - moduleDelta.newModuleNameToContentMap, - moduleDelta.allModuleReferences + NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, + schemaSetName, + moduleDelta.newYangResourceContentPerName, + moduleDelta.allModuleReferences ); - log.info("Successfully created Schema Set {} for CM Handle {}", - schemaSetName, yangModelCmHandle.getId()); - } catch (final AlreadyDefinedException | DuplicatedYangResourceException exception) { - log.warn("Schema Set {} already exists, no need to (re)create it for {}", - schemaSetName, yangModelCmHandle.getId()); + log.info("Successfully created Schema Set {} for CM Handle {}", schemaSetName, + yangModelCmHandle.getId()); + } catch (final AlreadyDefinedException alreadyDefinedException) { + log.warn("Ignoring (Schema Set) already exists exception for {}. Exception details: {}", cmHandleId, + alreadyDefinedException.getDetails()); } } } @@ -122,16 +127,17 @@ public class ModuleSyncService { return !cpsModuleService.schemaSetExists(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, schemaSetName); } - private ModuleDelta getModuleDelta(final YangModelCmHandle yangModelCmHandle) { + private ModuleDelta getModuleDelta(final YangModelCmHandle yangModelCmHandle, + final String targetModuleSetTag) { final Collection<ModuleReference> allModuleReferences = - dmiModelOperations.getModuleReferences(yangModelCmHandle); + dmiModelOperations.getModuleReferences(yangModelCmHandle, targetModuleSetTag); final Collection<ModuleReference> newModuleReferences = cpsModuleService.identifyNewModuleReferences(allModuleReferences); - final Map<String, String> newYangResources = dmiModelOperations.getNewYangResourcesFromDmi(yangModelCmHandle, - newModuleReferences); + final Map<String, String> newYangResourceContentPerName = + dmiModelOperations.getNewYangResourcesFromDmi(yangModelCmHandle, targetModuleSetTag, newModuleReferences); log.debug("Module delta calculated for CM handle ID: {}. All references: {}. New modules: {}", - yangModelCmHandle.getId(), allModuleReferences, newYangResources.keySet()); - return new ModuleDelta(allModuleReferences, newYangResources); + yangModelCmHandle.getId(), allModuleReferences, newYangResourceContentPerName.keySet()); + return new ModuleDelta(allModuleReferences, newYangResourceContentPerName); } private void setCmHandleModuleSetTag(final YangModelCmHandle yangModelCmHandle, final String newModuleSetTag) { @@ -141,7 +147,7 @@ public class ModuleSyncService { jsonForUpdate, OffsetDateTime.now(), ContentType.JSON); } - private static String getSchemaSetName(final String cmHandleId, final String moduleSetTag) { + private static String getSchemaSetNameForModuleSetTag(final String cmHandleId, final String moduleSetTag) { return moduleSetTag.isEmpty() ? cmHandleId : moduleSetTag; } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasks.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasks.java index 40404b719a..f8f023e0f8 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasks.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasks.java @@ -24,8 +24,6 @@ import com.hazelcast.map.IMap; import java.util.Collection; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.atomic.AtomicInteger; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.exceptions.DataNodeNotFoundException; @@ -51,12 +49,8 @@ public class ModuleSyncTasks { * Perform module sync on a batch of cm handles. * * @param cmHandleIds a batch of cm handle ids to perform module sync on - * @param batchCounter the number of batches currently being processed, will be decreased when - * task is finished or fails - * @return completed future to handle post-processing */ - public CompletableFuture<Void> performModuleSync(final Collection<String> cmHandleIds, - final AtomicInteger batchCounter) { + public void performModuleSync(final Collection<String> cmHandleIds) { final Map<YangModelCmHandle, CmHandleState> cmHandleStatePerCmHandle = new HashMap<>(cmHandleIds.size()); try { for (final String cmHandleId : cmHandleIds) { @@ -74,11 +68,8 @@ public class ModuleSyncTasks { } } } finally { - batchCounter.getAndDecrement(); lcmEventsCmHandleStateHandler.updateCmHandleStateBatch(cmHandleStatePerCmHandle); - log.info("Processing module sync batch finished. {} batch(es) active.", batchCounter.get()); } - return CompletableFuture.completedFuture(null); } /** @@ -114,7 +105,7 @@ public class ModuleSyncTasks { compositeState.setLockReason(null); return CmHandleState.READY; } catch (final Exception e) { - log.warn("Processing of {} failed,reason : {}.", yangModelCmHandle.getId(), e.getMessage()); + log.warn("Processing of {} failed, reason: {}.", yangModelCmHandle.getId(), e.getMessage()); final LockReasonCategory lockReasonCategory = inUpgrade ? LockReasonCategory.MODULE_UPGRADE_FAILED : LockReasonCategory.MODULE_SYNC_FAILED; @@ -124,8 +115,8 @@ public class ModuleSyncTasks { } private void removeResetCmHandleFromModuleSyncMap(final String resetCmHandleId) { - moduleSyncStartedOnCmHandles.removeAsync(resetCmHandleId); - log.info("{} will be removed asynchronously from in progress map", resetCmHandleId); + moduleSyncStartedOnCmHandles.delete(resetCmHandleId); + log.info("{} removed from in progress map", resetCmHandleId); } private static boolean isCmHandleInAdvisedState(final YangModelCmHandle yangModelCmHandle) { diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java index 32e1c49f17..12e013f223 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,13 +27,10 @@ import com.hazelcast.map.IMap; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.BlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; -import org.onap.cps.ncmp.impl.utils.Sleeper; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; @@ -46,16 +43,11 @@ public class ModuleSyncWatchdog { private final BlockingQueue<String> moduleSyncWorkQueue; private final IMap<String, Object> moduleSyncStartedOnCmHandles; private final ModuleSyncTasks moduleSyncTasks; - private final AsyncTaskExecutor asyncTaskExecutor; + @Qualifier("cpsAndNcmpLock") private final IMap<String, String> cpsAndNcmpLock; - private final Sleeper sleeper; - private static final int MODULE_SYNC_BATCH_SIZE = 100; - private static final long PREVENT_CPU_BURN_WAIT_TIME_MILLIS = 10; + private static final int MODULE_SYNC_BATCH_SIZE = 300; private static final String VALUE_FOR_HAZELCAST_IN_PROGRESS_MAP = "Started"; - private static final long ASYNC_TASK_TIMEOUT_IN_MILLISECONDS = TimeUnit.MINUTES.toMillis(5); - @Getter - private AtomicInteger batchCounter = new AtomicInteger(1); /** * Check DB for any cm handles in 'ADVISED' state. @@ -63,24 +55,17 @@ public class ModuleSyncWatchdog { * This method will only finish when there are no more 'ADVISED' cm handles in the DB. * This method is triggered on a configurable interval (ncmp.timers.advised-modules-sync.sleep-time-ms) */ - @Scheduled(initialDelayString = "${test.ncmp.timers.advised-modules-sync.initial-delay-ms:0}", + @Scheduled(initialDelayString = "${ncmp.timers.advised-modules-sync.initial-delay-ms:40000}", fixedDelayString = "${ncmp.timers.advised-modules-sync.sleep-time-ms:5000}") public void moduleSyncAdvisedCmHandles() { log.debug("Processing module sync watchdog waking up."); populateWorkQueueIfNeeded(); while (!moduleSyncWorkQueue.isEmpty()) { - if (batchCounter.get() <= asyncTaskExecutor.getAsyncTaskParallelismLevel()) { - final Collection<String> nextBatch = prepareNextBatch(); - log.info("Processing module sync batch of {}. {} batch(es) active.", - nextBatch.size(), batchCounter.get()); - if (!nextBatch.isEmpty()) { - asyncTaskExecutor.executeTask(() -> - moduleSyncTasks.performModuleSync(nextBatch, batchCounter), - ASYNC_TASK_TIMEOUT_IN_MILLISECONDS); - batchCounter.getAndIncrement(); - } - } else { - preventBusyWait(); + final Collection<String> nextBatch = prepareNextBatch(); + if (!nextBatch.isEmpty()) { + log.info("Processing module sync batch of {}. 1 batch(es) active.", nextBatch.size()); + moduleSyncTasks.performModuleSync(nextBatch); + log.info("Processing module sync batch finished. 0 batch(es) active."); } } } @@ -153,13 +138,4 @@ public class ModuleSyncWatchdog { log.info("nextBatch size : {}", nextBatch.size()); return nextBatch; } - - private void preventBusyWait() { - try { - log.debug("Busy waiting now"); - sleeper.haveALittleRest(PREVENT_CPU_BURN_WAIT_TIME_MILLIS); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - } - } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/CmHandleStateMonitor.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/CmHandleStateMonitor.java index 708508e9d8..3d8e8b6e31 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/CmHandleStateMonitor.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/CmHandleStateMonitor.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2025 Nordix Foundation. + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,12 +31,14 @@ import org.onap.cps.ncmp.api.inventory.models.CompositeState; import org.onap.cps.ncmp.impl.inventory.CmHandleQueryService; import org.onap.cps.ncmp.impl.inventory.sync.lcm.LcmEventsCmHandleStateHandlerImpl.CmHandleTransitionPair; import org.onap.cps.ncmp.utils.events.NcmpInventoryModelOnboardingFinishedEvent; +import org.springframework.context.annotation.DependsOn; import org.springframework.context.event.EventListener; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Component; @Component @RequiredArgsConstructor +@DependsOn("adminCacheConfig") @Slf4j public class CmHandleStateMonitor { private static final String METRIC_POSTFIX = "CmHandlesCount"; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandler.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandler.java index 69409bbbca..9d4f956259 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandler.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandler.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,7 +26,7 @@ import org.onap.cps.ncmp.api.inventory.models.CmHandleState; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; /** - * The implementation of it should handle the persisting of composite state and delegate the request to publish the + * The implementation of it should handle the persisting of composite state and delegate the request to send the * corresponding lcm event. */ public interface LcmEventsCmHandleStateHandler { @@ -41,7 +41,7 @@ public interface LcmEventsCmHandleStateHandler { /** * Sets the initial state of cmHandles to ADVISED. * - * @param yangModelCmHandles List of Yang Model Cm Handle. + * @param yangModelCmHandles List of Yang Model Cm Handles. */ void initiateStateAdvised(Collection<YangModelCmHandle> yangModelCmHandles); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerAsyncHelper.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerAsyncHelper.java index a53c902683..6717f8f7c5 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerAsyncHelper.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerAsyncHelper.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,30 +35,31 @@ import org.springframework.stereotype.Service; @RequiredArgsConstructor public class LcmEventsCmHandleStateHandlerAsyncHelper { - private final LcmEventsCreator lcmEventsCreator; - private final LcmEventsService lcmEventsService; + private final LcmEventsProducerHelper lcmEventsProducerHelper; + private final LcmEventsProducer lcmEventsProducer; /** - * Publish LcmEvent in batches and in asynchronous manner. + * Send LcmEvent in batches and in asynchronous manner. * * @param cmHandleTransitionPairs Pair of existing and modified cm handle represented as YangModelCmHandle */ @Async("notificationExecutor") - public void publishLcmEventBatchAsynchronously(final Collection<CmHandleTransitionPair> cmHandleTransitionPairs) { - cmHandleTransitionPairs.forEach(cmHandleTransitionPair -> publishLcmEvent( + public void sendLcmEventBatchAsynchronously(final Collection<CmHandleTransitionPair> cmHandleTransitionPairs) { + cmHandleTransitionPairs.forEach(cmHandleTransitionPair -> sendLcmEvent( toNcmpServiceCmHandle(cmHandleTransitionPair.getTargetYangModelCmHandle()), toNcmpServiceCmHandle(cmHandleTransitionPair.getCurrentYangModelCmHandle()))); } - private void publishLcmEvent(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + private void sendLcmEvent(final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { final String cmHandleId = targetNcmpServiceCmHandle.getCmHandleId(); final LcmEventHeader lcmEventHeader = - lcmEventsCreator.populateLcmEventHeader(cmHandleId, targetNcmpServiceCmHandle, + lcmEventsProducerHelper.populateLcmEventHeader(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); final LcmEvent lcmEvent = - lcmEventsCreator.populateLcmEvent(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); - lcmEventsService.publishLcmEvent(cmHandleId, lcmEvent, lcmEventHeader); + lcmEventsProducerHelper.populateLcmEvent(cmHandleId, targetNcmpServiceCmHandle, + existingNcmpServiceCmHandle); + lcmEventsProducer.sendLcmEvent(cmHandleId, lcmEvent, lcmEventHeader); } private static NcmpServiceCmHandle toNcmpServiceCmHandle(final YangModelCmHandle yangModelCmHandle) { diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImpl.java index 5ca19886fa..5bbc3a0e7c 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -60,7 +60,7 @@ public class LcmEventsCmHandleStateHandlerImpl implements LcmEventsCmHandleState final Collection<CmHandleTransitionPair> cmHandleTransitionPairs = prepareCmHandleTransitionBatch(cmHandleStatePerCmHandle); persistCmHandleBatch(cmHandleTransitionPairs); - lcmEventsCmHandleStateHandlerAsyncHelper.publishLcmEventBatchAsynchronously(cmHandleTransitionPairs); + lcmEventsCmHandleStateHandlerAsyncHelper.sendLcmEventBatchAsynchronously(cmHandleTransitionPairs); cmHandleStateMonitor.updateCmHandleStateMetrics(cmHandleTransitionPairs); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreator.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreator.java deleted file mode 100644 index 5137515758..0000000000 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreator.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.impl.inventory.sync.lcm; - -import java.util.UUID; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.RequiredArgsConstructor; -import lombok.Setter; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; -import org.onap.cps.ncmp.events.lcm.v1.Event; -import org.onap.cps.ncmp.events.lcm.v1.LcmEvent; -import org.onap.cps.ncmp.events.lcm.v1.LcmEventHeader; -import org.onap.cps.ncmp.events.lcm.v1.Values; -import org.onap.cps.ncmp.impl.utils.EventDateTimeFormatter; -import org.springframework.stereotype.Component; - - -/** - * LcmEventsCreator to create LcmEvent based on relevant operation. - */ -@Slf4j -@Component -@RequiredArgsConstructor -public class LcmEventsCreator { - - private final LcmEventHeaderMapper lcmEventHeaderMapper; - - /** - * Populate Lifecycle Management Event. - * - * @param cmHandleId cm handle identifier - * @param targetNcmpServiceCmHandle target ncmp service cmhandle - * @param existingNcmpServiceCmHandle existing ncmp service cmhandle - * @return Populated LcmEvent - */ - public LcmEvent populateLcmEvent(final String cmHandleId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { - return createLcmEvent(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); - } - - /** - * Populate Lifecycle Management Event Header. - * - * @param cmHandleId cm handle identifier - * @param targetNcmpServiceCmHandle target ncmp service cmhandle - * @param existingNcmpServiceCmHandle existing ncmp service cmhandle - * @return Populated LcmEventHeader - */ - public LcmEventHeader populateLcmEventHeader(final String cmHandleId, - final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { - return createLcmEventHeader(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); - } - - private LcmEvent createLcmEvent(final String cmHandleId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { - final LcmEventType lcmEventType = - LcmEventsCreatorHelper.determineEventType(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); - final LcmEvent lcmEvent = lcmEventHeader(cmHandleId, lcmEventType); - lcmEvent.setEvent( - lcmEventPayload(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle, lcmEventType)); - return lcmEvent; - } - - private LcmEventHeader createLcmEventHeader(final String cmHandleId, - final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { - final LcmEventType lcmEventType = - LcmEventsCreatorHelper.determineEventType(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); - final LcmEvent lcmEventWithHeaderInformation = lcmEventHeader(cmHandleId, lcmEventType); - return lcmEventHeaderMapper.toLcmEventHeader(lcmEventWithHeaderInformation); - } - - private Event lcmEventPayload(final String eventCorrelationId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle, final LcmEventType lcmEventType) { - final Event event = new Event(); - event.setCmHandleId(eventCorrelationId); - event.setAlternateId(targetNcmpServiceCmHandle.getAlternateId()); - event.setModuleSetTag(targetNcmpServiceCmHandle.getModuleSetTag()); - event.setDataProducerIdentifier(targetNcmpServiceCmHandle.getDataProducerIdentifier()); - final CmHandleValuesHolder cmHandleValuesHolder = - LcmEventsCreatorHelper.determineEventValues(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle, - lcmEventType); - event.setOldValues(cmHandleValuesHolder.getOldValues()); - event.setNewValues(cmHandleValuesHolder.getNewValues()); - - return event; - } - - private LcmEvent lcmEventHeader(final String eventCorrelationId, final LcmEventType lcmEventType) { - final LcmEvent lcmEvent = new LcmEvent(); - lcmEvent.setEventId(UUID.randomUUID().toString()); - lcmEvent.setEventCorrelationId(eventCorrelationId); - lcmEvent.setEventTime(EventDateTimeFormatter.getCurrentIsoFormattedDateTime()); - lcmEvent.setEventSource("org.onap.ncmp"); - lcmEvent.setEventType(lcmEventType.getEventType()); - lcmEvent.setEventSchema("org.onap.ncmp:cmhandle-lcm-event"); - lcmEvent.setEventSchemaVersion("1.0"); - return lcmEvent; - } - - @NoArgsConstructor - @Getter - @Setter - static class CmHandleValuesHolder { - - private Values oldValues; - private Values newValues; - } - -} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsService.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducer.java index 192667175e..521da0cb83 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsService.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.events.lcm.v1.LcmEvent; import org.onap.cps.ncmp.events.lcm.v1.LcmEventHeader; import org.onap.cps.ncmp.events.lcm.v1.Values; @@ -38,18 +38,18 @@ import org.springframework.kafka.KafkaException; import org.springframework.stereotype.Service; /** - * LcmEventsService to call the publisher and publish on the dedicated topic. + * LcmEventsProducer to call the producer and send on the dedicated topic. */ @Slf4j @Service @RequiredArgsConstructor -public class LcmEventsService { +public class LcmEventsProducer { - private static final Tag TAG_METHOD = Tag.of("method", "publishLcmEvent"); - private static final Tag TAG_CLASS = Tag.of("class", LcmEventsService.class.getName()); + private static final Tag TAG_METHOD = Tag.of("method", "sendLcmEvent"); + private static final Tag TAG_CLASS = Tag.of("class", LcmEventsProducer.class.getName()); private static final String UNAVAILABLE_CM_HANDLE_STATE = "N/A"; - private final EventsPublisher<LcmEvent> eventsPublisher; + private final EventsProducer<LcmEvent> eventsProducer; private final JsonObjectMapper jsonObjectMapper; private final MeterRegistry meterRegistry; @@ -60,23 +60,23 @@ public class LcmEventsService { private boolean notificationsEnabled; /** - * Publishes an LCM event to the dedicated topic with optional notification headers. - * Capture and log KafkaException If an error occurs while publishing the event to Kafka + * Sends an LCM event to the dedicated topic with optional notification headers. + * Capture and log KafkaException If an error occurs while sending the event to Kafka * * @param cmHandleId Cm Handle Id associated with the LCM event - * @param lcmEvent The LCM event object to be published + * @param lcmEvent The LCM event object to be sent * @param lcmEventHeader Optional headers associated with the LCM event */ - public void publishLcmEvent(final String cmHandleId, final LcmEvent lcmEvent, final LcmEventHeader lcmEventHeader) { + public void sendLcmEvent(final String cmHandleId, final LcmEvent lcmEvent, final LcmEventHeader lcmEventHeader) { if (notificationsEnabled) { final Timer.Sample timerSample = Timer.start(meterRegistry); try { final Map<String, Object> lcmEventHeadersMap = jsonObjectMapper.convertToValueType(lcmEventHeader, Map.class); - eventsPublisher.publishEvent(topicName, cmHandleId, lcmEventHeadersMap, lcmEvent); + eventsProducer.sendEvent(topicName, cmHandleId, lcmEventHeadersMap, lcmEvent); } catch (final KafkaException e) { - log.error("Unable to publish message to topic : {} and cause : {}", topicName, e.getMessage()); + log.error("Unable to send message to topic : {} and cause : {}", topicName, e.getMessage()); } finally { recordMetrics(lcmEvent, timerSample); } @@ -96,8 +96,8 @@ public class LcmEventsService { final String newCmHandleState = extractCmHandleStateValue(lcmEvent.getEvent().getNewValues()); tags.add(Tag.of("newCmHandleState", newCmHandleState)); - timerSample.stop(Timer.builder("cps.ncmp.lcm.events.publish") - .description("Time taken to publish a LCM event") + timerSample.stop(Timer.builder("cps.ncmp.lcm.events.send") + .description("Time taken to send a LCM event") .tags(tags) .register(meterRegistry)); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreatorHelper.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerHelper.java index e4fb5c3ce8..bc12b22481 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreatorHelper.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerHelper.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,30 +30,78 @@ import com.google.common.collect.Maps; import java.util.HashMap; import java.util.List; import java.util.Map; -import lombok.AccessLevel; +import java.util.UUID; +import lombok.Getter; import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; +import org.onap.cps.ncmp.events.lcm.v1.Event; +import org.onap.cps.ncmp.events.lcm.v1.LcmEvent; +import org.onap.cps.ncmp.events.lcm.v1.LcmEventHeader; import org.onap.cps.ncmp.events.lcm.v1.Values; +import org.onap.cps.ncmp.impl.utils.EventDateTimeFormatter; +import org.springframework.stereotype.Component; /** - * LcmEventsCreatorHelper has helper methods to create LcmEvent. - * Determine the lcm event type i.e create,update and delete. - * Based on lcm event type create the LcmEvent payload. + * LcmEventsProducerHelper to create LcmEvent based on relevant operation. */ @Slf4j -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public class LcmEventsCreatorHelper { +@Component +@RequiredArgsConstructor +public class LcmEventsProducerHelper { + + private final LcmEventHeaderMapper lcmEventHeaderMapper; + + /** + * Populate Lifecycle Management Event. + * + * @param cmHandleId cm handle identifier + * @param targetNcmpServiceCmHandle target ncmp service cmhandle + * @param existingNcmpServiceCmHandle existing ncmp service cmhandle + * @return Populated LcmEvent + */ + public LcmEvent populateLcmEvent(final String cmHandleId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + return createLcmEvent(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); + } /** - * Determining the event type based on the composite state. + * Populate Lifecycle Management Event Header. * - * @param targetNcmpServiceCmHandle target ncmpServiceCmHandle - * @param existingNcmpServiceCmHandle existing ncmpServiceCmHandle - * @return Event Type + * @param cmHandleId cm handle identifier + * @param targetNcmpServiceCmHandle target ncmp service cmhandle + * @param existingNcmpServiceCmHandle existing ncmp service cmhandle + * @return Populated LcmEventHeader */ - public static LcmEventType determineEventType(final NcmpServiceCmHandle targetNcmpServiceCmHandle, + public LcmEventHeader populateLcmEventHeader(final String cmHandleId, + final NcmpServiceCmHandle targetNcmpServiceCmHandle, final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + return createLcmEventHeader(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); + } + + private LcmEvent createLcmEvent(final String cmHandleId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + final LcmEventType lcmEventType = + determineEventType(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); + final LcmEvent lcmEvent = lcmEventHeader(cmHandleId, lcmEventType); + lcmEvent.setEvent( + lcmEventPayload(cmHandleId, targetNcmpServiceCmHandle, existingNcmpServiceCmHandle, lcmEventType)); + return lcmEvent; + } + + private LcmEventHeader createLcmEventHeader(final String cmHandleId, + final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + final LcmEventType lcmEventType = + determineEventType(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); + final LcmEvent lcmEventWithHeaderInformation = lcmEventHeader(cmHandleId, lcmEventType); + return lcmEventHeaderMapper.toLcmEventHeader(lcmEventWithHeaderInformation); + } + + private static LcmEventType determineEventType(final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { if (existingNcmpServiceCmHandle.getCompositeState() == null) { return CREATE; @@ -63,15 +111,7 @@ public class LcmEventsCreatorHelper { return UPDATE; } - /** - * Determine the cmhandle value difference pair.Contains the difference in the form of oldValues and newValues. - * - * @param targetNcmpServiceCmHandle target ncmpServiceCmHandle - * @param existingNcmpServiceCmHandle existing ncmpServiceCmHandle - * @param lcmEventType lcm event type - * @return Lcm Event Value difference pair - */ - public static LcmEventsCreator.CmHandleValuesHolder determineEventValues( + private static CmHandleValuesHolder determineEventValues( final NcmpServiceCmHandle targetNcmpServiceCmHandle, final NcmpServiceCmHandle existingNcmpServiceCmHandle, final LcmEventType lcmEventType) { @@ -80,13 +120,42 @@ public class LcmEventsCreatorHelper { } else if (UPDATE == lcmEventType) { return determineUpdateEventValues(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle); } - return new LcmEventsCreator.CmHandleValuesHolder(); + return new CmHandleValuesHolder(); + + } + + private Event lcmEventPayload(final String eventCorrelationId, final NcmpServiceCmHandle targetNcmpServiceCmHandle, + final NcmpServiceCmHandle existingNcmpServiceCmHandle, final LcmEventType lcmEventType) { + final Event event = new Event(); + event.setCmHandleId(eventCorrelationId); + event.setAlternateId(targetNcmpServiceCmHandle.getAlternateId()); + event.setModuleSetTag(targetNcmpServiceCmHandle.getModuleSetTag()); + event.setDataProducerIdentifier(targetNcmpServiceCmHandle.getDataProducerIdentifier()); + final CmHandleValuesHolder cmHandleValuesHolder = + determineEventValues(targetNcmpServiceCmHandle, existingNcmpServiceCmHandle, + lcmEventType); + event.setOldValues(cmHandleValuesHolder.getOldValues()); + event.setNewValues(cmHandleValuesHolder.getNewValues()); + + return event; + } + private LcmEvent lcmEventHeader(final String eventCorrelationId, final LcmEventType lcmEventType) { + final LcmEvent lcmEvent = new LcmEvent(); + lcmEvent.setEventId(UUID.randomUUID().toString()); + lcmEvent.setEventCorrelationId(eventCorrelationId); + lcmEvent.setEventTime(EventDateTimeFormatter.getCurrentIsoFormattedDateTime()); + lcmEvent.setEventSource("org.onap.ncmp"); + lcmEvent.setEventType(lcmEventType.getEventType()); + lcmEvent.setEventSchema("org.onap.ncmp:cmhandle-lcm-event"); + lcmEvent.setEventSchemaVersion("1.0"); + return lcmEvent; } - private static LcmEventsCreator.CmHandleValuesHolder determineCreateEventValues( + + private static CmHandleValuesHolder determineCreateEventValues( final NcmpServiceCmHandle ncmpServiceCmHandle) { - final LcmEventsCreator.CmHandleValuesHolder cmHandleValuesHolder = new LcmEventsCreator.CmHandleValuesHolder(); + final CmHandleValuesHolder cmHandleValuesHolder = new CmHandleValuesHolder(); cmHandleValuesHolder.setNewValues(new Values()); cmHandleValuesHolder.getNewValues().setDataSyncEnabled(getDataSyncEnabledFlag(ncmpServiceCmHandle)); cmHandleValuesHolder.getNewValues() @@ -95,7 +164,7 @@ public class LcmEventsCreatorHelper { return cmHandleValuesHolder; } - private static LcmEventsCreator.CmHandleValuesHolder determineUpdateEventValues( + private static CmHandleValuesHolder determineUpdateEventValues( final NcmpServiceCmHandle targetNcmpServiceCmHandle, final NcmpServiceCmHandle existingNcmpServiceCmHandle) { @@ -107,7 +176,7 @@ public class LcmEventsCreatorHelper { arePublicCmHandlePropertiesEqual(targetNcmpServiceCmHandle.getPublicProperties(), existingNcmpServiceCmHandle.getPublicProperties()); - final LcmEventsCreator.CmHandleValuesHolder cmHandleValuesHolder = new LcmEventsCreator.CmHandleValuesHolder(); + final CmHandleValuesHolder cmHandleValuesHolder = new CmHandleValuesHolder(); if (hasDataSyncFlagEnabledChanged || hasCmHandleStateChanged || (!arePublicCmHandlePropertiesEqual)) { cmHandleValuesHolder.setOldValues(new Values()); @@ -134,8 +203,8 @@ public class LcmEventsCreatorHelper { } private static void setDataSyncEnabledFlag(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle, - final LcmEventsCreator.CmHandleValuesHolder cmHandleValuesHolder) { + final NcmpServiceCmHandle existingNcmpServiceCmHandle, + final CmHandleValuesHolder cmHandleValuesHolder) { cmHandleValuesHolder.getOldValues().setDataSyncEnabled(getDataSyncEnabledFlag(existingNcmpServiceCmHandle)); cmHandleValuesHolder.getNewValues().setDataSyncEnabled(getDataSyncEnabledFlag(targetNcmpServiceCmHandle)); @@ -143,8 +212,8 @@ public class LcmEventsCreatorHelper { } private static void setCmHandleStateChange(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle, - final LcmEventsCreator.CmHandleValuesHolder cmHandleValuesHolder) { + final NcmpServiceCmHandle existingNcmpServiceCmHandle, + final CmHandleValuesHolder cmHandleValuesHolder) { cmHandleValuesHolder.getOldValues() .setCmHandleState(mapCmHandleStateToLcmEventCmHandleState(existingNcmpServiceCmHandle)); cmHandleValuesHolder.getNewValues() @@ -152,8 +221,8 @@ public class LcmEventsCreatorHelper { } private static void setPublicCmHandlePropertiesChange(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle, - final LcmEventsCreator.CmHandleValuesHolder cmHandleValuesHolder) { + final NcmpServiceCmHandle existingNcmpServiceCmHandle, + final CmHandleValuesHolder cmHandleValuesHolder) { final Map<String, Map<String, String>> publicCmHandlePropertiesDifference = getPublicCmHandlePropertiesDifference(targetNcmpServiceCmHandle.getPublicProperties(), @@ -175,7 +244,7 @@ public class LcmEventsCreatorHelper { } private static boolean hasDataSyncEnabledFlagChanged(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { final Boolean targetDataSyncFlag = targetNcmpServiceCmHandle.getCompositeState().getDataSyncEnabled(); final Boolean existingDataSyncFlag = existingNcmpServiceCmHandle.getCompositeState().getDataSyncEnabled(); @@ -188,14 +257,14 @@ public class LcmEventsCreatorHelper { } private static boolean hasCmHandleStateChanged(final NcmpServiceCmHandle targetNcmpServiceCmHandle, - final NcmpServiceCmHandle existingNcmpServiceCmHandle) { + final NcmpServiceCmHandle existingNcmpServiceCmHandle) { return targetNcmpServiceCmHandle.getCompositeState().getCmHandleState() - != existingNcmpServiceCmHandle.getCompositeState().getCmHandleState(); + != existingNcmpServiceCmHandle.getCompositeState().getCmHandleState(); } private static boolean arePublicCmHandlePropertiesEqual(final Map<String, String> targetCmHandleProperties, - final Map<String, String> existingCmHandleProperties) { + final Map<String, String> existingCmHandleProperties) { if (targetCmHandleProperties.size() != existingCmHandleProperties.size()) { return false; } @@ -224,4 +293,14 @@ public class LcmEventsCreatorHelper { return oldAndNewPropertiesDifferenceMap; } + + @NoArgsConstructor + @Getter + @Setter + static class CmHandleValuesHolder { + + private Values oldValues; + private Values newValues; + } + } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/DeviceTrustLevelMessageConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/DeviceTrustLevelMessageConsumer.java index efcbb78ace..44befab574 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/DeviceTrustLevelMessageConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/DeviceTrustLevelMessageConsumer.java @@ -27,11 +27,13 @@ import org.apache.kafka.clients.consumer.ConsumerRecord; import org.onap.cps.ncmp.api.inventory.models.TrustLevel; import org.onap.cps.ncmp.events.trustlevel.DeviceTrustLevel; import org.onap.cps.ncmp.utils.events.CloudEventMapper; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; @Component @RequiredArgsConstructor +@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) public class DeviceTrustLevelMessageConsumer { private static final String CLOUD_EVENT_ID_HEADER_NAME = "ce_id"; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManager.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManager.java index f68bb3b543..944b5eb9a0 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManager.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManager.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,7 +36,7 @@ import org.onap.cps.ncmp.impl.dmi.DmiServiceNameResolver; import org.onap.cps.ncmp.impl.inventory.InventoryPersistence; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; import org.onap.cps.ncmp.impl.models.RequiredDmiService; -import org.onap.cps.ncmp.utils.events.CmAvcEventPublisher; +import org.onap.cps.ncmp.utils.events.InventoryEventProducer; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; @@ -52,7 +52,7 @@ public class TrustLevelManager { private final IMap<String, TrustLevel> trustLevelPerDmiPlugin; private final InventoryPersistence inventoryPersistence; - private final CmAvcEventPublisher cmAvcEventPublisher; + private final InventoryEventProducer inventoryEventProducer; private static final String AVC_CHANGED_ATTRIBUTE_NAME = "trustLevel"; private static final String AVC_NO_OLD_VALUE = null; @@ -68,7 +68,7 @@ public class TrustLevelManager { } /** - * Add cmHandles to the cache and publish notification for initial trust level of cmHandles if it is NONE. + * Add cmHandles to the cache and send notification for initial trust level of cmHandles if it is NONE. * * @param cmHandlesToBeCreated a list of cmHandles being created */ @@ -82,7 +82,7 @@ public class TrustLevelManager { } trustLevelPerCmHandleIdForCache.put(cmHandleId, initialTrustLevel); if (TrustLevel.NONE.equals(initialTrustLevel)) { - cmAvcEventPublisher.publishAvcEvent(cmHandleId, + inventoryEventProducer.sendAvcEvent(cmHandleId, AVC_CHANGED_ATTRIBUTE_NAME, AVC_NO_OLD_VALUE, initialTrustLevel.name()); @@ -92,7 +92,7 @@ public class TrustLevelManager { } /** - * Updates trust level of dmi plugin in the cache and publish notification for trust level of cmHandles if it + * Updates trust level of dmi plugin in the cache and sends notification for trust level of cmHandles if it * has changed. * * @param dmiServiceName dmi service name @@ -113,7 +113,7 @@ public class TrustLevelManager { } /** - * Updates trust level of device in the cache and publish notification for trust level of device if it has + * Updates trust level of device in the cache and send notification for trust level of device if it has * changed. * * @param cmHandleId cm handle id @@ -193,14 +193,14 @@ public class TrustLevelManager { final TrustLevel newEffectiveTrustLevel) { if (oldEffectiveTrustLevel.equals(newEffectiveTrustLevel)) { log.debug("The Cm Handle: {} has already the same trust level: {}", notificationCandidateCmHandleId, - newEffectiveTrustLevel); + newEffectiveTrustLevel); } else { log.info("The trust level for Cm Handle: {} is now: {} ", notificationCandidateCmHandleId, - newEffectiveTrustLevel); - cmAvcEventPublisher.publishAvcEvent(notificationCandidateCmHandleId, - AVC_CHANGED_ATTRIBUTE_NAME, - oldEffectiveTrustLevel.name(), - newEffectiveTrustLevel.name()); + newEffectiveTrustLevel); + inventoryEventProducer.sendAvcEvent(notificationCandidateCmHandleId, + AVC_CHANGED_ATTRIBUTE_NAME, + oldEffectiveTrustLevel.name(), + newEffectiveTrustLevel.name()); } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/AlternateIdMatcher.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/AlternateIdMatcher.java index b97da2977a..3a0201e029 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/AlternateIdMatcher.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/AlternateIdMatcher.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,39 +20,46 @@ package org.onap.cps.ncmp.impl.utils; +import com.hazelcast.map.IMap; +import java.util.Map; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.onap.cps.ncmp.api.exceptions.CmHandleNotFoundException; +import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle; import org.onap.cps.ncmp.exceptions.NoAlternateIdMatchFoundException; -import org.onap.cps.ncmp.impl.inventory.InventoryPersistence; -import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; +@Slf4j @Service @RequiredArgsConstructor public class AlternateIdMatcher { - private final InventoryPersistence inventoryPersistence; + @Qualifier("cmHandleIdPerAlternateId") + private final IMap<String, String> cmHandleIdPerAlternateId; /** - * Get yang model cm handle that matches longest alternate id by removing elements + * Get cm handle that matches longest alternate id by removing elements * (as defined by the separator string) from right to left. * If alternate id contains a hash then all elements after that hash are ignored. * - * @param alternateId alternate ID - * @param separator a string that separates each element from the next. - * @return yang model cm handle + * @param alternateId alternate ID + * @param separator a string that separates each element from the next. + * @param cmHandlePerAlternateId all CM-handles by alternate ID + * @return ncmp service cm handle */ - public YangModelCmHandle getYangModelCmHandleByLongestMatchingAlternateId(final String alternateId, - final String separator) { + public NcmpServiceCmHandle getCmHandleByLongestMatchingAlternateId( + final String alternateId, final String separator, + final Map<String, NcmpServiceCmHandle> cmHandlePerAlternateId) { final String[] splitPath = alternateId.split("#", 2); String bestMatch = splitPath[0]; while (StringUtils.isNotEmpty(bestMatch)) { - try { - return inventoryPersistence.getYangModelCmHandleByAlternateId(bestMatch); - } catch (final CmHandleNotFoundException ignored) { - bestMatch = getParentPath(bestMatch, separator); + final NcmpServiceCmHandle ncmpServiceCmHandle = cmHandlePerAlternateId.get(bestMatch); + if (ncmpServiceCmHandle != null) { + return ncmpServiceCmHandle; } + bestMatch = getParentPath(bestMatch, separator); } throw new NoAlternateIdMatchFoundException(alternateId); } @@ -64,11 +71,15 @@ public class AlternateIdMatcher { * @return cm handle id string */ public String getCmHandleId(final String cmHandleReference) { - if (inventoryPersistence.isExistingCmHandleId(cmHandleReference)) { - return cmHandleReference; - } else { - return inventoryPersistence.getYangModelCmHandleByAlternateId(cmHandleReference).getId(); + final String cmHandleId = cmHandleIdPerAlternateId.get(cmHandleReference); + if (cmHandleId == null) { + if (cmHandleIdPerAlternateId.containsValue(cmHandleReference)) { + return cmHandleReference; + } else { + throw new CmHandleNotFoundException(cmHandleReference); + } } + return cmHandleId; } private String getParentPath(final String path, final String separator) { diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/AlternateIdCacheDataLoader.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/AlternateIdCacheDataLoader.java new file mode 100644 index 0000000000..0629c51d70 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/AlternateIdCacheDataLoader.java @@ -0,0 +1,75 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.init; + +import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY; +import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT; + +import com.hazelcast.map.IMap; +import java.util.Collection; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.api.model.DataNode; +import org.onap.cps.ncmp.impl.inventory.CmHandleRegistrationService; +import org.onap.cps.ncmp.impl.inventory.InventoryPersistence; +import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; +import org.onap.cps.ncmp.impl.utils.YangDataConverter; +import org.onap.cps.ncmp.utils.events.NcmpInventoryModelOnboardingFinishedEvent; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Service; + +@Slf4j +@Service +@RequiredArgsConstructor +public class AlternateIdCacheDataLoader { + + private final InventoryPersistence inventoryPersistence; + private final CmHandleRegistrationService cmHandleRegistrationService; + + @Qualifier("cmHandleIdPerAlternateId") + private final IMap<String, String> cmHandleIdPerAlternateId; + + /** + * Method to initialise the Alternate ID Cache by querying the current inventory. + * This method is triggered by NcmpInventoryModelOnboardingFinishedEvent. + * + * @param event the event that triggers the initialization + */ + @EventListener + public void populateCmHandleIdPerAlternateIdMap(final NcmpInventoryModelOnboardingFinishedEvent event) { + if (cmHandleIdPerAlternateId.isEmpty()) { + log.info("Populating Alternate ID map from inventory"); + final Collection<DataNode> dataNodes = inventoryPersistence.getDataNode( + NCMP_DMI_REGISTRY_PARENT, DIRECT_CHILDREN_ONLY).iterator().next().getChildDataNodes(); + final Collection<YangModelCmHandle> yangModelCmHandles = dataNodes.stream() + .map(YangDataConverter::toYangModelCmHandle).toList(); + addAlternateIdsToCache(yangModelCmHandles); + } + log.info("Alternate ID map has {} entries", cmHandleIdPerAlternateId.size()); + } + + + public void addAlternateIdsToCache(final Collection<YangModelCmHandle> yangModelCmHandles) { + cmHandleRegistrationService.addAlternateIdsToCache(yangModelCmHandles); + } + +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/CmAvcEventPublisher.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/InventoryEventProducer.java index 2a9717cc1a..8f83e28a7c 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/CmAvcEventPublisher.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/InventoryEventProducer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,12 +20,14 @@ package org.onap.cps.ncmp.utils.events; +import static org.onap.cps.ncmp.events.NcmpEventDataSchema.INVENTORY_EVENTS_V1; + import io.cloudevents.CloudEvent; import java.util.Collections; import java.util.HashMap; import java.util.Map; import lombok.RequiredArgsConstructor; -import org.onap.cps.events.EventsPublisher; +import org.onap.cps.events.EventsProducer; import org.onap.cps.ncmp.events.avc.ncmp_to_client.Avc; import org.onap.cps.ncmp.events.avc.ncmp_to_client.AvcEvent; import org.onap.cps.ncmp.events.avc.ncmp_to_client.Data; @@ -34,28 +36,32 @@ import org.springframework.stereotype.Service; @Service @RequiredArgsConstructor -public class CmAvcEventPublisher { +public class InventoryEventProducer { - private final EventsPublisher<CloudEvent> eventsPublisher; + private final EventsProducer<CloudEvent> eventsProducer; - @Value("${app.ncmp.avc.cm-events-topic}") - private String avcTopic; + @Value("${app.ncmp.avc.inventory-events-topic}") + private String ncmpInventoryEventsTopicName; /** - * Publish attribute value change event. + * Send attribute value change event. * * @param eventKey id of the cmHandle being registered */ - public void publishAvcEvent(final String eventKey, final String attributeName, - final String oldAttributeValue, final String newAttributeValue) { + public void sendAvcEvent(final String eventKey, final String attributeName, + final String oldAttributeValue, final String newAttributeValue) { final AvcEvent avcEvent = buildAvcEvent(attributeName, oldAttributeValue, newAttributeValue); final Map<String, String> extensions = createAvcEventExtensions(eventKey); - final CloudEvent avcCloudEvent = - NcmpEvent.builder().type(AvcEvent.class.getTypeName()) - .data(avcEvent).extensions(extensions).build().asCloudEvent(); + final CloudEvent avcCloudEvent = NcmpEvent.builder() + .type(AvcEvent.class.getTypeName()) + .dataSchema(INVENTORY_EVENTS_V1.getDataSchema()) + .data(avcEvent) + .extensions(extensions) + .build() + .asCloudEvent(); - eventsPublisher.publishCloudEvent(avcTopic, eventKey, avcCloudEvent); + eventsProducer.sendCloudEvent(ncmpInventoryEventsTopicName, eventKey, avcCloudEvent); } private AvcEvent buildAvcEvent(final String attributeName, @@ -78,4 +84,4 @@ public class CmAvcEventPublisher { extensions.put("correlationid", eventKey); return extensions; } -} +}
\ No newline at end of file diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/NcmpEvent.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/NcmpEvent.java index 8d3190eb00..6db962c068 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/NcmpEvent.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/utils/events/NcmpEvent.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,12 +34,11 @@ import org.onap.cps.utils.JsonObjectMapper; @Builder public class NcmpEvent { + private String dataSchema; private Object data; private Map<String, String> extensions; private String type; @Builder.Default - private static final String CLOUD_EVENT_SPEC_VERSION_V1 = "1.0.0"; - @Builder.Default private static final String CLOUD_EVENT_SOURCE = "NCMP"; /** @@ -53,7 +52,7 @@ public class NcmpEvent { .withId(UUID.randomUUID().toString()) .withSource(URI.create(CLOUD_EVENT_SOURCE)) .withType(type) - .withDataSchema(URI.create("urn:cps:" + type + ":" + CLOUD_EVENT_SPEC_VERSION_V1)) + .withDataSchema(URI.create(dataSchema)) .withTime(EventDateTimeFormatter.toIsoOffsetDateTime( EventDateTimeFormatter.getCurrentIsoFormattedDateTime())) .withData(jsonObjectMapper.asJsonBytes(data)); diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CmHandleRegistrationResponseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CmHandleRegistrationResponseSpec.groovy index 055a6e7448..c49af0f01b 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CmHandleRegistrationResponseSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CmHandleRegistrationResponseSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2022 Bell Canada - * Modifications Copyright (C) 2023-2024 Nordix Foundation + * Modifications Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,7 +26,6 @@ import spock.lang.Specification import java.util.stream.Collectors -import static org.onap.cps.ncmp.api.NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST import static org.onap.cps.ncmp.api.NcmpResponseStatus.UNKNOWN_ERROR @@ -89,14 +88,14 @@ class CmHandleRegistrationResponseSpec extends Specification { } def 'Failed cm-handle registration based on cm handle id and registration error'() { - when: 'the failure response is created with "alternate id already associated" error code for 1 cm handle' + when: 'the failure response is created with "cm-handle already exists" error code for 1 cm handle' def cmHandleRegistrationResponses = - CmHandleRegistrationResponse.createFailureResponses(['ch 1'], ALTERNATE_ID_ALREADY_ASSOCIATED) + CmHandleRegistrationResponse.createFailureResponses(['ch 1'], CM_HANDLE_ALREADY_EXIST) then: 'the response with expected values' assert cmHandleRegistrationResponses[0].cmHandle == 'ch 1' assert cmHandleRegistrationResponses[0].status == Status.FAILURE - assert cmHandleRegistrationResponses[0].ncmpResponseStatus == ALTERNATE_ID_ALREADY_ASSOCIATED - assert cmHandleRegistrationResponses[0].errorText == 'alternate id already associated' + assert cmHandleRegistrationResponses[0].ncmpResponseStatus == CM_HANDLE_ALREADY_EXIST + assert cmHandleRegistrationResponses[0].errorText == 'cm-handle already exists' } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CompositeStateBuilderSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CompositeStateBuilderSpec.groovy index 4d42e62025..8b04568239 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CompositeStateBuilderSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/models/CompositeStateBuilderSpec.groovy @@ -24,7 +24,7 @@ package org.onap.cps.ncmp.api.inventory.models import org.onap.cps.ncmp.api.inventory.DataStoreSyncState import org.onap.cps.api.model.DataNode -import org.onap.cps.api.model.DataNodeBuilder +import org.onap.cps.impl.DataNodeBuilder import spock.lang.Specification import java.time.OffsetDateTime diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/config/CmHandleStateGaugeConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/config/CmHandleStateGaugeConfigSpec.groovy new file mode 100644 index 0000000000..499f8b8e54 --- /dev/null +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/config/CmHandleStateGaugeConfigSpec.groovy @@ -0,0 +1,75 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.config + +import com.hazelcast.map.IMap +import io.micrometer.core.instrument.MeterRegistry +import io.micrometer.core.instrument.simple.SimpleMeterRegistry +import org.onap.cps.ncmp.impl.cache.AdminCacheConfig +import org.onap.cps.ncmp.impl.inventory.CmHandleQueryService +import org.onap.cps.ncmp.impl.inventory.sync.lcm.CmHandleStateMonitor +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.test.context.ContextConfiguration +import org.springframework.test.context.TestPropertySource +import spock.lang.Specification + +@SpringBootTest(classes = [CmHandleStateGaugeConfig, CmHandleStateMonitor, AdminCacheConfig]) +@ContextConfiguration(classes = [CpsApplicationContext]) +@TestPropertySource(properties = ["hazelcast.mode.kubernetes.enabled=false"]) +class CmHandleStateGaugeConfigSpec extends Specification { + + @Autowired + CpsApplicationContext cpsApplicationContext + @SpringBean + CmHandleQueryService cmHandleQueryService = Mock() + @SpringBean + MeterRegistry meterRegistry = Mock() + + def cmHandlesByState = Mock(IMap) + def objectUnderTest = new CmHandleStateGaugeConfig(cmHandlesByState) + def simpleMeterRegistry = new SimpleMeterRegistry() + + def 'Creating gauges for cm handle states.'() { + given: 'cache returns a test value (123) for each state' + cmHandlesByState.get(_) >> 123 + when: 'gauges for each state are created' + objectUnderTest.advisedCmHandles(simpleMeterRegistry) + objectUnderTest.readyCmHandles(simpleMeterRegistry) + objectUnderTest.lockedCmHandles(simpleMeterRegistry) + objectUnderTest.deletingCmHandles(simpleMeterRegistry) + objectUnderTest.deletedCmHandles(simpleMeterRegistry) + then: 'each state has the correct value when queried' + ['ADVISED', 'READY', 'LOCKED', 'DELETING', 'DELETED'].each { state -> + def gaugeValue = simpleMeterRegistry.get(objectUnderTest.CM_HANDLE_STATE_GAUGE).tag('state',state).gauge().value() + assert gaugeValue == 123 + } + } + + def 'Controlling order of bean initialization'() { + when: 'cm handle state gauge config is retrieved' + cpsApplicationContext.getCpsBean(CmHandleStateGaugeConfig.class) + then: 'cm handle state monitor should already be available' + cpsApplicationContext.getCpsBean(CmHandleStateMonitor.class) != null + } + +} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfigSpec.groovy index c08ff75a44..0026d7c4e6 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfigSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cache/HazelcastCacheConfigSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,8 +20,7 @@ package org.onap.cps.ncmp.impl.cache -import com.hazelcast.config.Config -import com.hazelcast.config.RestEndpointGroup + import com.hazelcast.core.Hazelcast import spock.lang.Specification @@ -60,17 +59,4 @@ class HazelcastCacheConfigSpec extends Specification { 'Set Config' | HazelcastCacheConfig.createSetConfig('my set config') || false | false | true } - def 'Verify Hazelcast Cluster Information'() { - given: 'a test configuration' - def testConfig = new Config() - when: 'cluster information is exposed' - objectUnderTest.exposeClusterInformation(testConfig) - then: 'REST api configs are enabled' - assert testConfig.networkConfig.restApiConfig.enabled - and: 'only health check and cluster read are enabled' - def enabledGroups = testConfig.networkConfig.restApiConfig.enabledGroups - assert enabledGroups.size() == 2 - assert enabledGroups.containsAll([RestEndpointGroup.CLUSTER_READ, RestEndpointGroup.HEALTH_CHECK]) - } - } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumerSpec.groovy index ad5f42ed94..b0a8f20ccb 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/cmavc/CmAvcEventConsumerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,7 +27,7 @@ import io.cloudevents.kafka.CloudEventDeserializer import io.cloudevents.kafka.impl.KafkaHeaders import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.consumer.KafkaConsumer -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.events.avc1_0_0.AvcEvent import org.onap.cps.ncmp.utils.TestUtils import org.onap.cps.ncmp.utils.events.MessagingBaseSpec @@ -41,16 +41,16 @@ import java.time.Duration import static org.onap.cps.ncmp.utils.events.CloudEventMapper.toTargetEvent -@SpringBootTest(classes = [EventsPublisher, CmAvcEventConsumer, ObjectMapper, JsonObjectMapper]) +@SpringBootTest(classes = [EventsProducer, CmAvcEventConsumer, ObjectMapper, JsonObjectMapper]) @Testcontainers @DirtiesContext class CmAvcEventConsumerSpec extends MessagingBaseSpec { @SpringBean - EventsPublisher eventsPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) + EventsProducer eventsProducer = new EventsProducer<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) @SpringBean - CmAvcEventConsumer acvEventConsumer = new CmAvcEventConsumer(eventsPublisher) + CmAvcEventConsumer acvEventConsumer = new CmAvcEventConsumer(eventsProducer) @Autowired JsonObjectMapper jsonObjectMapper diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducerSpec.groovy index 34fa4549f5..5a101471f5 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiInEventProducerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2024 Nordix Foundation. + * Copyright (c) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,22 +22,27 @@ package org.onap.cps.ncmp.impl.cmnotificationsubscription.dmi import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.CloudEvent -import org.onap.cps.events.EventsPublisher +import io.cloudevents.core.v1.CloudEventBuilder +import org.onap.cps.events.EventsProducer +import org.onap.cps.ncmp.config.CpsApplicationContext import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_dmi.CmHandle import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_dmi.Data import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_dmi.DmiInEvent import org.onap.cps.ncmp.utils.events.CloudEventMapper import org.onap.cps.utils.JsonObjectMapper +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.test.context.ContextConfiguration import spock.lang.Specification +@SpringBootTest(classes = [ObjectMapper, JsonObjectMapper, CloudEventBuilder]) +@ContextConfiguration(classes = [CpsApplicationContext]) class DmiInEventProducerSpec extends Specification { - def mockEventsPublisher = Mock(EventsPublisher) - def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + def mockEventsProducer = Mock(EventsProducer) - def objectUnderTest = new DmiInEventProducer(mockEventsPublisher, jsonObjectMapper) + def objectUnderTest = new DmiInEventProducer(mockEventsProducer) - def 'Create and Publish Cm Notification Subscription DMI In Event'() { + def 'Create and Send Cm Notification Subscription DMI In Event'() { given: 'a cm subscription for a dmi plugin' def subscriptionId = 'test-subscription-id' def dmiPluginName = 'test-dmiplugin' @@ -45,10 +50,10 @@ class DmiInEventProducerSpec extends Specification { def dmiInEvent = new DmiInEvent(data: new Data(cmHandles: [new CmHandle(cmhandleId: 'test-1', privateProperties: [:])])) and: 'also we have target topic for dmiPlugin' objectUnderTest.dmiInEventTopic = 'dmiplugin-test-topic' - when: 'the event is published' - objectUnderTest.publishDmiInEvent(subscriptionId, dmiPluginName, eventType, dmiInEvent) + when: 'the event is sent' + objectUnderTest.sendDmiInEvent(subscriptionId, dmiPluginName, eventType, dmiInEvent) then: 'the event contains the required attributes' - 1 * mockEventsPublisher.publishCloudEvent(_, _, _) >> { + 1 * mockEventsProducer.sendCloudEvent(_, _, _) >> { args -> { assert args[0] == 'dmiplugin-test-topic' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumerSpec.groovy index bcf8780873..2ab15d231c 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/dmi/DmiOutEventConsumerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2024 Nordix Foundation. + * Copyright (c) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -108,8 +108,8 @@ class DmiOutEventConsumerSpec extends MessagingBaseSpec { expectedPersistenceCalls * mockDmiCacheHandler.persistIntoDatabasePerDmi('sub-1','test-dmi-plugin-name') and: 'correct number of calls to map the ncmp out event' 1 * mockNcmpOutEventMapper.toNcmpOutEvent('sub-1', _) - and: 'correct number of calls to publish the ncmp out event to client' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent('sub-1', 'subscriptionCreateResponse', _, false) + and: 'correct number of calls to send the ncmp out event to client' + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent('sub-1', 'subscriptionCreateResponse', _, false) where: 'the following parameters are used' scenario | subscriptionStatus | statusCode || expectedCacheCalls | expectedPersistenceCalls 'Accepted Status' | ACCEPTED | '1' || 1 | 1 diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImplSpec.groovy index 1a54deea6a..e4321ff718 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/CmSubscriptionHandlerImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2024 Nordix Foundation. + * Copyright (c) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -81,11 +81,11 @@ class CmSubscriptionHandlerImplSpec extends Specification { objectUnderTest.processSubscriptionCreateRequest(subscriptionId, predicates) then: 'the subscription cache handler is called once' 1 * mockDmiCacheHandler.add('test-id', _) - and: 'the events handler method to publish DMI event is called correct number of times with the correct parameters' - testDmiSubscriptionsPerDmi.size() * mockDmiInEventProducer.publishDmiInEvent( + and: 'the events handler method to send DMI event is called correct number of times with the correct parameters' + testDmiSubscriptionsPerDmi.size() * mockDmiInEventProducer.sendDmiInEvent( "test-id", "dmi-1", "subscriptionCreateRequest", testDmiInEvent) and: 'we schedule to send the response after configured time from the cache' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent('test-id', 'subscriptionCreateResponse', null, true) + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent('test-id', 'subscriptionCreateResponse', null, true) } def 'Consume valid and Overlapping Cm Notification Subscription NcmpIn Event'() { @@ -105,7 +105,7 @@ class CmSubscriptionHandlerImplSpec extends Specification { and: 'the subscription details are updated in the cache' 1 * mockDmiCacheHandler.updateDmiSubscriptionStatus('test-id', _, ACCEPTED) and: 'we schedule to send the response after configured time from the cache' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent('test-id', 'subscriptionCreateResponse', null, true) + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent('test-id', 'subscriptionCreateResponse', null, true) } def 'Consume valid and but non-unique CmNotificationSubscription create message'() { @@ -122,10 +122,10 @@ class CmSubscriptionHandlerImplSpec extends Specification { "test-id", _) >> testNcmpOutEvent when: 'the valid but non-unique event is consumed' objectUnderTest.processSubscriptionCreateRequest(subscriptionId, predicates) - then: 'the events handler method to publish DMI event is never called' - 0 * mockDmiInEventProducer.publishDmiInEvent(_, _, _, _) - and: 'the events handler method to publish NCMP out event is called once' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent('test-id', 'subscriptionCreateResponse', testNcmpOutEvent, false) + then: 'the events handler method to send DMI event is never called' + 0 * mockDmiInEventProducer.sendDmiInEvent(_, _, _, _) + and: 'the events handler method to send NCMP out event is called once' + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent('test-id', 'subscriptionCreateResponse', testNcmpOutEvent, false) } def 'Consume valid CmNotificationSubscriptionNcmpInEvent delete message'() { @@ -140,11 +140,11 @@ class CmSubscriptionHandlerImplSpec extends Specification { 1 * mockInventoryPersistence.getYangModelCmHandle('ch-2') >> new YangModelCmHandle(dmiServiceName: 'dmi-2') when: 'the subscription delete request is processed' objectUnderTest.processSubscriptionDeleteRequest(subscriptionId) - then: 'the method to publish a dmi event is called with correct parameters' - 1 * mockDmiInEventProducer.publishDmiInEvent(subscriptionId,'dmi-1','subscriptionDeleteRequest',_) - 1 * mockDmiInEventProducer.publishDmiInEvent(subscriptionId,'dmi-2','subscriptionDeleteRequest',_) - and: 'the method to publish nmcp out event is called with correct parameters' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent(subscriptionId, 'subscriptionDeleteResponse', null, true) + then: 'the method to send a dmi event is called with correct parameters' + 1 * mockDmiInEventProducer.sendDmiInEvent(subscriptionId,'dmi-1','subscriptionDeleteRequest',_) + 1 * mockDmiInEventProducer.sendDmiInEvent(subscriptionId,'dmi-2','subscriptionDeleteRequest',_) + and: 'the method to send nmcp out event is called with correct parameters' + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent(subscriptionId, 'subscriptionDeleteResponse', null, true) } def 'Delete a subscriber for fully overlapping subscriptions'() { @@ -161,12 +161,12 @@ class CmSubscriptionHandlerImplSpec extends Specification { 2 * mockDmiCacheHandler.get(subscriptionId) >> ['dmi-1':[:],'dmi-2':[:]] when: 'the subscription delete request is processed' objectUnderTest.processSubscriptionDeleteRequest(subscriptionId) - then: 'the method to publish a dmi event is never called' - 0 * mockDmiInEventProducer.publishDmiInEvent(_,_,_,_) + then: 'the method to send a dmi event is never called' + 0 * mockDmiInEventProducer.sendDmiInEvent(_,_,_,_) and: 'the cache handler is called to remove subscriber from database per dmi' 1 * mockDmiCacheHandler.removeFromDatabase('test-id', 'dmi-1') 1 * mockDmiCacheHandler.removeFromDatabase('test-id', 'dmi-2') - and: 'the method to publish nmcp out event is called with correct parameters' - 1 * mockNcmpOutEventProducer.publishNcmpOutEvent(subscriptionId, 'subscriptionDeleteResponse', null, false) + and: 'the method to send ncmp out event is called with correct parameters' + 1 * mockNcmpOutEventProducer.sendNcmpOutEvent(subscriptionId, 'subscriptionDeleteResponse', null, false) } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducerSpec.groovy index afa2e9874e..09aebf3499 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/cmnotificationsubscription/ncmp/NcmpOutEventProducerSpec.groovy @@ -1,39 +1,64 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + package org.onap.cps.ncmp.impl.cmnotificationsubscription.ncmp import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.CloudEvent -import org.onap.cps.events.EventsPublisher +import io.cloudevents.core.v1.CloudEventBuilder +import org.onap.cps.events.EventsProducer +import org.onap.cps.ncmp.config.CpsApplicationContext import org.onap.cps.ncmp.impl.cmnotificationsubscription.cache.DmiCacheHandler import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_client.Data import org.onap.cps.ncmp.impl.cmnotificationsubscription_1_0_0.ncmp_to_client.NcmpOutEvent import org.onap.cps.ncmp.utils.events.CloudEventMapper import org.onap.cps.utils.JsonObjectMapper +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.test.context.ContextConfiguration import spock.lang.Specification +@SpringBootTest(classes = [ObjectMapper, JsonObjectMapper, CloudEventBuilder]) +@ContextConfiguration(classes = [CpsApplicationContext]) class NcmpOutEventProducerSpec extends Specification { - def mockEventsPublisher = Mock(EventsPublisher) - def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + def mockEventsProducer = Mock(EventsProducer) def mockNcmpOutEventMapper = Mock(NcmpOutEventMapper) def mockDmiCacheHandler = Mock(DmiCacheHandler) - def objectUnderTest = new NcmpOutEventProducer(mockEventsPublisher, jsonObjectMapper, mockNcmpOutEventMapper, mockDmiCacheHandler) + def objectUnderTest = new NcmpOutEventProducer(mockEventsProducer, mockNcmpOutEventMapper, mockDmiCacheHandler) def 'Create and #scenario Cm Notification Subscription NCMP out event'() { given: 'a cm subscription response for the client' def subscriptionId = 'test-subscription-id-2' def eventType = 'subscriptionCreateResponse' def ncmpOutEvent = new NcmpOutEvent(data: new Data(subscriptionId: 'test-subscription-id-2', acceptedTargets: ['ch-1', 'ch-2'])) - and: 'also we have target topic for publishing to client' + and: 'also we have target topic for sending to client' objectUnderTest.ncmpOutEventTopic = 'client-test-topic' and: 'a deadline to an event' objectUnderTest.dmiOutEventTimeoutInMs = 1000 - when: 'the event is published' - objectUnderTest.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, eventPublishingTaskToBeScheduled) + when: 'the event is sent' + objectUnderTest.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, eventPublishingTaskToBeScheduled) then: 'we conditionally wait for a while' Thread.sleep(delayInMs) then: 'the event contains the required attributes' - 1 * mockEventsPublisher.publishCloudEvent(_, _, _) >> { + 1 * mockEventsProducer.sendCloudEvent(_, _, _) >> { args -> { assert args[0] == 'client-test-topic' @@ -47,27 +72,27 @@ class NcmpOutEventProducerSpec extends Specification { } where: 'following scenarios are considered' scenario | delayInMs | eventPublishingTaskToBeScheduled - 'publish event now' | 0 | false - 'schedule and publish after the configured time ' | 1500 | true + 'send event now' | 0 | false + 'schedule and send after the configured time ' | 1500 | true } - def 'Schedule Cm Notification Subscription NCMP out event but later publish it on demand'() { + def 'Schedule Cm Notification Subscription NCMP out event but later send it on demand'() { given: 'a cm subscription response for the client' def subscriptionId = 'test-subscription-id-3' def eventType = 'subscriptionCreateResponse' def ncmpOutEvent = new NcmpOutEvent(data: new Data(subscriptionId: 'test-subscription-id-3', acceptedTargets: ['ch-2', 'ch-3'])) - and: 'also we have target topic for publishing to client' + and: 'also we have target topic for sending to client' objectUnderTest.ncmpOutEventTopic = 'client-test-topic' and: 'a deadline to an event' objectUnderTest.dmiOutEventTimeoutInMs = 1000 - when: 'the event is scheduled to be published' - objectUnderTest.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, true) + when: 'the event is scheduled to be sent' + objectUnderTest.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, true) then: 'we wait for 10ms and then we receive response from DMI' Thread.sleep(10) - and: 'we receive response from DMI so we publish the message on demand' - objectUnderTest.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false) + and: 'we receive response from DMI so we send the message on demand' + objectUnderTest.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false) then: 'the event contains the required attributes' - 1 * mockEventsPublisher.publishCloudEvent(_, _, _) >> { + 1 * mockEventsProducer.sendCloudEvent(_, _, _) >> { args -> { assert args[0] == 'client-test-topic' @@ -83,24 +108,23 @@ class NcmpOutEventProducerSpec extends Specification { 1 * mockDmiCacheHandler.removeAcceptedAndRejectedDmiSubscriptionEntries(subscriptionId) } - def 'No event published when NCMP out event is null'() { + def 'No event sent when NCMP out event is null'() { given: 'a cm subscription response for the client' def subscriptionId = 'test-subscription-id-3' def eventType = 'subscriptionCreateResponse' def ncmpOutEvent = null - and: 'also we have target topic for publishing to client' + and: 'also we have target topic for sending to client' objectUnderTest.ncmpOutEventTopic = 'client-test-topic' and: 'a deadline to an event' objectUnderTest.dmiOutEventTimeoutInMs = 1000 - when: 'the event is scheduled to be published' - objectUnderTest.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, true) + when: 'the event is scheduled to be sent' + objectUnderTest.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, true) then: 'we wait for 10ms and then we receive response from DMI' Thread.sleep(10) - and: 'we receive NO response from DMI so we publish the message on demand' - objectUnderTest.publishNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false) - and: 'no event published' - 0 * mockEventsPublisher.publishCloudEvent(*_) + and: 'we receive NO response from DMI so we send the message on demand' + objectUnderTest.sendNcmpOutEvent(subscriptionId, eventType, ncmpOutEvent, false) + and: 'no event sent' + 0 * mockEventsProducer.sendCloudEvent(*_) } - } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/DmiDataOperationsSpec.groovy index 01a08e7bb5..93338c97ec 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/DmiDataOperationsSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/DmiDataOperationsSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,9 +22,7 @@ package org.onap.cps.ncmp.impl.data import com.fasterxml.jackson.databind.ObjectMapper -import org.onap.cps.api.exceptions.DataNodeNotFoundException -import org.onap.cps.api.exceptions.DataValidationException -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.api.data.models.CmResourceAddress import org.onap.cps.ncmp.api.data.models.DataOperationRequest import org.onap.cps.ncmp.api.exceptions.DmiClientRequestException @@ -59,7 +57,7 @@ import static org.onap.cps.ncmp.impl.models.RequiredDmiService.DATA import static org.onap.cps.ncmp.utils.events.CloudEventMapper.toTargetEvent @SpringBootTest -@ContextConfiguration(classes = [EventsPublisher, CpsApplicationContext, DmiProperties, DmiDataOperations, PolicyExecutor]) +@ContextConfiguration(classes = [EventsProducer, CpsApplicationContext, DmiProperties, DmiDataOperations, PolicyExecutor]) class DmiDataOperationsSpec extends DmiOperationsBaseSpec { def NO_TOPIC = null @@ -76,7 +74,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { DmiDataOperations objectUnderTest @SpringBean - EventsPublisher eventsPublisher = Stub() + EventsProducer eventsProducer = Stub() @SpringBean PolicyExecutor policyExecutor = Mock() @@ -86,6 +84,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { def 'call get resource data for #expectedDataStore from DMI without topic #scenario.'() { given: 'a cm handle for #cmHandleId' + alternateIdMatcher.getCmHandleId(cmHandleId) >> cmHandleId mockYangModelCmHandleRetrieval(dmiProperties) and: 'a positive response from DMI service when it is called with the expected parameters' def responseFromDmi = Mono.just(new ResponseEntity<Object>('{some-key:some-value}', HttpStatus.OK)) @@ -110,7 +109,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { def 'Execute (async) data operation from DMI service.'() { given: 'collection of yang model cm Handles and data operation request' - mockYangModelCmHandleCollectionRetrieval([yangModelCmHandleProperty]) + mockYangModelCmHandleRetrievalByCmHandleId([yangModelCmHandleProperty]) def dataOperationBatchRequestJsonData = TestUtils.getResourceFileContent('dataOperationRequest.json') def dataOperationRequest = spiedJsonObjectMapper.convertJsonString(dataOperationBatchRequestJsonData, DataOperationRequest.class) dataOperationRequest.dataOperationDefinitions[0].cmHandleReferences = [cmHandleId] @@ -127,13 +126,13 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { def 'Execute (async) data operation from DMI service with Exception.'() { given: 'collection of yang model cm Handles and data operation request' - mockYangModelCmHandleCollectionRetrieval([yangModelCmHandleProperty]) + mockYangModelCmHandleRetrievalByCmHandleId([yangModelCmHandleProperty]) def dataOperationBatchRequestJsonData = TestUtils.getResourceFileContent('dataOperationRequest.json') def dataOperationRequest = spiedJsonObjectMapper.convertJsonString(dataOperationBatchRequestJsonData, DataOperationRequest.class) dataOperationRequest.dataOperationDefinitions[0].cmHandleReferences = [cmHandleId] - and: 'the published cloud event will be captured' + and: 'the sent cloud event will be captured' def actualDataOperationCloudEvent = null - eventsPublisher.publishCloudEvent('my-topic-name', 'my-request-id', _) >> { args -> actualDataOperationCloudEvent = args[2] } + eventsProducer.sendCloudEvent('my-topic-name', 'my-request-id', _) >> { args -> actualDataOperationCloudEvent = args[2] } and: 'a DMI client request exception is thrown when DMI service is called' mockDmiRestClient.asynchronousPostOperationWithJsonData(*_) >> { Mono.error(new DmiClientRequestException(123, '', '', UNKNOWN_ERROR)) } when: 'attempt to get resource data for group of cm handles is invoked' @@ -206,33 +205,6 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { CmHandleState.ADVISED || true } - def 'Resolving cm handle references with cm handle id.'() { - given: 'a resource address with a cm handle id' - def cmResourceAddress = new CmResourceAddress('some store', 'cm-handle-id', 'some resource') - and: 'the given cm handle id is available in the inventory' - mockInventoryPersistence.getYangModelCmHandle('cm-handle-id') >> yangModelCmHandle - expect: 'resolving the cm handle id returns the cm handle' - assert objectUnderTest.resolveYangModelCmHandleFromCmHandleReference(cmResourceAddress) == yangModelCmHandle - } - - def 'Resolving cm handle references with alternate id #scenario.'() { - given: 'a resource with a alternate id' - def cmResourceAddress = new CmResourceAddress('some store', alternateId, 'some resource') - and: 'the alternate id cannot be found in the inventory directly and that results in an exception' - mockInventoryPersistence.getYangModelCmHandle(alternateId) >> { throw errorThrownDuringCmHandleIdSearch } - and: 'the alternate id can be matched to a cm handle id' - alternateIdMatcher.getCmHandleId(alternateId) >> 'cm-handle-id' - and: 'that cm handle id is available in the inventory' - mockInventoryPersistence.getYangModelCmHandle('cm-handle-id') >> yangModelCmHandle - expect: 'resolving that cm handle id returns the cm handle' - assert objectUnderTest.resolveYangModelCmHandleFromCmHandleReference(cmResourceAddress) == yangModelCmHandle - where: 'the following alternate ids are used' - scenario | alternateId | errorThrownDuringCmHandleIdSearch - 'alternate id with no special characters' | 'alternate-id' | new DataNodeNotFoundException('','') - 'alternate id with special characters' | 'alternate#id' | new DataValidationException('','') - } - - def extractDataValue(actualDataOperationCloudEvent) { return toTargetEvent(actualDataOperationCloudEvent, DataOperationEvent).data.responses[0] } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy index 4bcafe8c61..8ea73b672f 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2022-2024 Nordix Foundation. + * Copyright (c) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.serialization.StringDeserializer import org.mapstruct.factory.Mappers -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent import org.onap.cps.ncmp.utils.TestUtils @@ -35,17 +35,16 @@ import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.test.context.SpringBootTest import org.springframework.test.annotation.DirtiesContext import org.testcontainers.spock.Testcontainers - import java.time.Duration -@SpringBootTest(classes = [EventsPublisher, AsyncRestRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper]) +@SpringBootTest(classes = [EventsProducer, DmiAsyncRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper]) @Testcontainers @DirtiesContext class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBaseSpec { @SpringBean - EventsPublisher cpsAsyncRequestResponseEventPublisher = - new EventsPublisher<NcmpAsyncRequestResponseEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate); + EventsProducer cpsAsyncRequestResponseEventProducer = + new EventsProducer<NcmpAsyncRequestResponseEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate); @SpringBean @@ -53,8 +52,8 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase Mappers.getMapper(NcmpAsyncRequestResponseEventMapper.class) @SpringBean - AsyncRestRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer = - new AsyncRestRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher, + DmiAsyncRequestResponseEventConsumer dmiAsyncRequestResponseEventConsumer = + new DmiAsyncRequestResponseEventConsumer(cpsAsyncRequestResponseEventProducer, ncmpAsyncRequestResponseEventMapper) @Autowired @@ -69,7 +68,7 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase def jsonData = TestUtils.getResourceFileContent('dmiAsyncRequestResponseEvent.json') def testEventSent = jsonObjectMapper.convertJsonString(jsonData, DmiAsyncRequestResponseEvent.class) when: 'the event is consumed' - ncmpAsyncRequestResponseEventConsumer.consumeAndForward(testEventSent) + dmiAsyncRequestResponseEventConsumer.consumeAndForward(testEventSent) and: 'the topic is polled' def records = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500)) then: 'poll returns one record' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumerSpec.groovy index afb594ab04..9c9768ab1f 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/DataOperationEventConsumerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,7 +29,7 @@ import io.cloudevents.kafka.impl.KafkaHeaders import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.header.internals.RecordHeaders -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent import org.onap.cps.ncmp.utils.TestUtils import org.onap.cps.ncmp.utils.events.MessagingBaseSpec @@ -45,16 +45,16 @@ import java.time.Duration import static org.onap.cps.ncmp.utils.events.CloudEventMapper.toTargetEvent -@SpringBootTest(classes = [EventsPublisher, DataOperationEventConsumer, RecordFilterStrategies, JsonObjectMapper, ObjectMapper]) +@SpringBootTest(classes = [EventsProducer, DataOperationEventConsumer, RecordFilterStrategies, JsonObjectMapper, ObjectMapper]) @Testcontainers @DirtiesContext class DataOperationEventConsumerSpec extends MessagingBaseSpec { @SpringBean - EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) + EventsProducer asyncDataOperationEventProducer = new EventsProducer<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) @SpringBean - DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventPublisher) + DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventProducer) @Autowired JsonObjectMapper jsonObjectMapper @@ -66,13 +66,13 @@ class DataOperationEventConsumerSpec extends MessagingBaseSpec { def static clientTopic = 'client-topic' def static dataOperationType = 'org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent' - def 'Consume and publish event to client specified topic'() { + def 'Consume and send event to client specified topic'() { given: 'consumer subscribing to client topic' cloudEventKafkaConsumer.subscribe([clientTopic]) and: 'consumer record for data operation event' def consumerRecordIn = createConsumerRecord(dataOperationType) - when: 'the data operation event is consumed and published to client specified topic' - objectUnderTest.consumeAndPublish(consumerRecordIn) + when: 'the data operation event is consumed and sent to client specified topic' + objectUnderTest.consumeAndSend(consumerRecordIn) and: 'the client specified topic is polled' def consumerRecordOut = cloudEventKafkaConsumer.poll(Duration.ofMillis(1500))[0] then: 'verify cloud compliant headers' @@ -84,7 +84,7 @@ class DataOperationEventConsumerSpec extends MessagingBaseSpec { assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_destination') == clientTopic and: 'map consumer record to expected event type' def dataOperationResponseEvent = toTargetEvent(consumerRecordOut.value(), DataOperationEvent.class) - and: 'verify published response data properties' + and: 'verify sent response data properties' def response = dataOperationResponseEvent.data.responses[0] response.operationId == 'some-operation-id' response.statusCode == 'any-success-status-code' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/FilterStrategiesIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/FilterStrategiesIntegrationSpec.groovy index 01d2a3666b..baca4450dd 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/FilterStrategiesIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/FilterStrategiesIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,7 @@ package org.onap.cps.ncmp.impl.data.async import io.cloudevents.core.builder.CloudEventBuilder -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.config.KafkaConfig import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent import org.onap.cps.ncmp.utils.events.ConsumerBaseSpec @@ -35,14 +35,14 @@ import spock.util.concurrent.PollingConditions import java.util.concurrent.TimeUnit -@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) +@SpringBootTest(classes =[DataOperationEventConsumer, DmiAsyncRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) @DirtiesContext @Testcontainers @EnableAutoConfiguration class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { @SpringBean - EventsPublisher mockEventsPublisher = Mock() + EventsProducer mockEventsProducer = Mock() @SpringBean NcmpAsyncRequestResponseEventMapper mapper = Stub() @@ -61,23 +61,23 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { then: 'wait a little for async processing of message (must wait to try to avoid false positives)' TimeUnit.MILLISECONDS.sleep(300) and: 'event is not consumed' - 0 * mockEventsPublisher.publishEvent(*_) + 0 * mockEventsProducer.sendEvent(*_) } def 'Legacy event consumer with valid legacy event.'() { given: 'a legacy event' DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget') - and: 'a flag to track the publish event call' - def publishEventMethodCalled = false - and: 'the (mocked) events publisher will use the flag to indicate if it is called' - mockEventsPublisher.publishEvent(*_) >> { - publishEventMethodCalled = true + and: 'a flag to track the send event call' + def sendEventMethodCalled = false + and: 'the (mocked) events producer will use the flag to indicate if it is called' + mockEventsProducer.sendEvent(*_) >> { + sendEventMethodCalled = true } when: 'send the cloud event' legacyEventKafkaTemplate.send(topic, legacyEvent) then: 'the event is consumed by the (legacy) AsynRestRequest consumer' new PollingConditions().within(1) { - assert publishEventMethodCalled == true + assert sendEventMethodCalled == true } } @@ -87,20 +87,20 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { .withType(eventType) .withSource(URI.create('some-source')) .build() - and: 'a flag to track the publish event call' - def publishEventMethodCalled = false - and: 'the (mocked) events publisher will use the flag to indicate if it is called' - mockEventsPublisher.publishCloudEvent(*_) >> { - publishEventMethodCalled = true + and: 'a flag to track the sent event call' + def sendEventMethodCalled = false + and: 'the (mocked) events producer will use the flag to indicate if it is called' + mockEventsProducer.sendCloudEvent(*_) >> { + sendEventMethodCalled = true } when: 'send the cloud event' cloudEventKafkaTemplate.send(topic, cloudEvent) then: 'the event has only been forwarded for the correct type' new PollingConditions(initialDelay: 0.3).within(1) { - assert publishEventMethodCalled == expectCallToPublishEventMethod + assert sendEventMethodCalled == expectCallToSendEventMethod } where: 'the following event types are used' - eventType || expectCallToPublishEventMethod + eventType || expectCallToSendEventMethod 'DataOperationEvent' || true 'other type' || false 'any type contain the word "DataOperationEvent"' || true @@ -114,7 +114,7 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { then: 'wait a little for async processing of message (must wait to try to avoid false positives)' TimeUnit.MILLISECONDS.sleep(300) and: 'the event is not processed by this consumer' - 0 * mockEventsPublisher.publishCloudEvent(*_) + 0 * mockEventsProducer.sendCloudEvent(*_) } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/SerializationIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/SerializationIntegrationSpec.groovy index 3fe7ec222e..65e8af8e48 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/SerializationIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/SerializationIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ package org.onap.cps.ncmp.impl.data.async import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.core.builder.CloudEventBuilder -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.config.KafkaConfig import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent @@ -39,14 +39,14 @@ import org.springframework.test.annotation.DirtiesContext import org.testcontainers.spock.Testcontainers import spock.util.concurrent.PollingConditions -@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) +@SpringBootTest(classes =[DataOperationEventConsumer, DmiAsyncRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) @DirtiesContext @Testcontainers @EnableAutoConfiguration class SerializationIntegrationSpec extends ConsumerBaseSpec { @SpringBean - EventsPublisher mockEventsPublisher = Mock() + EventsProducer mockEventsProducer = Mock() @SpringBean NcmpAsyncRequestResponseEventMapper mapper = Stub() { toNcmpAsyncEvent(_) >> new NcmpAsyncRequestResponseEvent(eventId: 'my-event-id', eventTarget: 'some client topic')} @@ -60,34 +60,34 @@ class SerializationIntegrationSpec extends ConsumerBaseSpec { def 'Forwarding DataOperation Event Data.'() { given: 'a data operation cloud event' def cloudEvent = createCloudEvent() - and: 'a flag to track the publish cloud event call' - def publishCloudEventMethodCalled = false - and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the cloud event' - mockEventsPublisher.publishCloudEvent('some client topic', 'some-correlation-id', cloudEvent) >> { - publishCloudEventMethodCalled = true + and: 'a flag to track the send cloud event call' + def sendCloudEventMethodCalled = false + and: 'the (mocked) events producer will use the flag to indicate if it is called and will capture the cloud event' + mockEventsProducer.sendCloudEvent('some client topic', 'some-correlation-id', cloudEvent) >> { + sendCloudEventMethodCalled = true } when: 'send the event' cloudEventKafkaTemplate.send(topic, cloudEvent) then: 'the event has been forwarded' new PollingConditions().within(1) { - assert publishCloudEventMethodCalled == true + assert sendCloudEventMethodCalled == true } } def 'Forwarding AsyncRestRequestResponse Event Data.'() { given: 'async request response legacy event' def dmiAsyncRequestResponseEvent = new DmiAsyncRequestResponseEvent(eventId: 'my-event-id',eventTarget: 'some client topic') - and: 'a flag to track the publish event call' - def publishEventMethodCalled = false - and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the event' - mockEventsPublisher.publishEvent(*_) >> { - publishEventMethodCalled = true + and: 'a flag to track the send event call' + def sendEventMethodCalled = false + and: 'the (mocked) events producer will use the flag to indicate if it is called and will capture the event' + mockEventsProducer.sendEvent(*_) >> { + sendEventMethodCalled = true } when: 'send the event' legacyEventKafkaTemplate.send(topic, dmiAsyncRequestResponseEvent) then: 'the event has been forwarded' new PollingConditions().within(1) { - assert publishEventMethodCalled == true + assert sendEventMethodCalled == true } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelperSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelperSpec.groovy index 33b8490e5e..22ce4ab084 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelperSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/utils/DmiDataOperationsHelperSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ import io.cloudevents.CloudEvent import io.cloudevents.kafka.CloudEventDeserializer import io.cloudevents.kafka.impl.KafkaHeaders import org.apache.kafka.clients.consumer.KafkaConsumer -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.api.data.models.DataOperationRequest import org.onap.cps.ncmp.api.data.models.OperationType import org.onap.cps.ncmp.api.inventory.models.CompositeStateBuilder @@ -46,7 +46,7 @@ import static org.onap.cps.ncmp.api.inventory.models.CmHandleState.ADVISED import static org.onap.cps.ncmp.api.inventory.models.CmHandleState.READY import static org.onap.cps.ncmp.utils.events.CloudEventMapper.toTargetEvent -@ContextConfiguration(classes = [EventsPublisher, CpsApplicationContext]) +@ContextConfiguration(classes = [EventsProducer, CpsApplicationContext]) class DmiDataOperationsHelperSpec extends MessagingBaseSpec { def static clientTopic = 'my-topic-name' @@ -56,7 +56,7 @@ class DmiDataOperationsHelperSpec extends MessagingBaseSpec { JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) @SpringBean - EventsPublisher eventPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) + EventsProducer eventProducer = new EventsProducer<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate) def 'Process per data operation request with #serviceName.'() { given: 'data operation request with 3 operations' @@ -106,7 +106,7 @@ class DmiDataOperationsHelperSpec extends MessagingBaseSpec { assert cmHandlesInRequestBody[0].moduleSetTag == 'module-set-tag1' } - def 'Process per data operation request with non-ready, non-existing cm handle and publish event to client specified topic'() { + def 'Process per data operation request with non-ready, non-existing cm handle and send event to client specified topic'() { given: 'consumer subscribing to client topic' def cloudEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test-1', CloudEventDeserializer)) cloudEventKafkaConsumer.subscribe([clientTopic]) @@ -129,7 +129,7 @@ class DmiDataOperationsHelperSpec extends MessagingBaseSpec { toTargetEvent(consumerRecordOut.value(), DataOperationEvent.class) and: 'data operation response event response size is 3' dataOperationResponseEvent.data.responses.size() == 3 - and: 'verify published data operation response as json string' + and: 'verify sent data operation response as json string' def dataOperationResponseEventJson = TestUtils.getResourceFileContent('dataOperationResponseEvent.json') jsonObjectMapper.asJsonString(dataOperationResponseEvent.data.responses) == dataOperationResponseEventJson } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy index 4b536b9710..9f0e134466 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobReadRequest import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest import org.onap.cps.ncmp.api.datajobs.models.ReadOperation import org.onap.cps.ncmp.api.datajobs.models.WriteOperation +import org.onap.cps.utils.JsonObjectMapper import org.slf4j.LoggerFactory import spock.lang.Specification @@ -36,8 +37,9 @@ class DataJobServiceImplSpec extends Specification { def mockWriteRequestExaminer = Mock(WriteRequestExaminer) def mockDmiSubJobRequestHandler = Mock(DmiSubJobRequestHandler) + def mockJsonObjectMapper = Mock(JsonObjectMapper) - def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer) + def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer, mockJsonObjectMapper) def myDataJobMetadata = new DataJobMetadata('', '', '') def authorization = 'my authorization header' @@ -45,7 +47,7 @@ class DataJobServiceImplSpec extends Specification { def logger = Spy(ListAppender<ILoggingEvent>) def setup() { - setupLogger() + setupLogger(Level.DEBUG) } def cleanup() { @@ -62,22 +64,32 @@ class DataJobServiceImplSpec extends Specification { assert loggingEvent.formattedMessage.contains('data job id for read operation is: my-job-id') } - def 'Write data-job request.'() { + def 'Write data-job request and verify logging when info enabled.'() { given: 'data job metadata and write request' def dataJobWriteRequest = new DataJobWriteRequest([new WriteOperation('', '', '', null)]) - and: 'a map of producer key and dmi 3gpp write operation' + and: 'a map of producer key and DMI 3GPP write operations' def dmiWriteOperationsPerProducerKey = [:] - when: 'write data job request is processed' + and: 'mocking the splitDmiWriteOperationsFromRequest method to return the expected data' + mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest(_, _) >> dmiWriteOperationsPerProducerKey + and: 'mocking the sendRequestsToDmi method to simulate empty sub-job responses from the DMI request handler' + mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey) >> [] + when: 'the write data job request is processed' objectUnderTest.writeDataJob(authorization, 'my-job-id', myDataJobMetadata, dataJobWriteRequest) then: 'the examiner service is called and a map is returned' 1 * mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest('my-job-id', dataJobWriteRequest) >> dmiWriteOperationsPerProducerKey - and: 'the dmi request handler is called with the result from the examiner' - 1 * mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey) + and: 'write operation details are logged at debug level' + with(logger.list.find { it.level == Level.DEBUG }) { + assert it.formattedMessage.contains("Initiating WRITE operation for Data Job ID: my-job-id") + } + and: 'number of operations are logged at info level' + with(logger.list.find { it.level == Level.INFO }) { + assert it.formattedMessage.contains("Data Job ID: my-job-id - Total operations received: 1") + } } - def setupLogger() { + def setupLogger(Level level) { def setupLogger = ((Logger) LoggerFactory.getLogger(DataJobServiceImpl.class)) - setupLogger.setLevel(Level.DEBUG) + setupLogger.setLevel(level) setupLogger.addAppender(logger) logger.start() } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandlerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandlerSpec.groovy index 041fbd95ee..175fb1877b 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandlerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DmiSubJobRequestHandlerSpec.groovy @@ -25,16 +25,17 @@ class DmiSubJobRequestHandlerSpec extends Specification { given: 'a data job id, metadata and a map of producer keys and write operations to create a request' def dataJobId = 'some-job-id' def dataJobMetadata = new DataJobMetadata('d1', 't1', 't2') - def dmiWriteOperation = new DmiWriteOperation('p', 'operation', 'tag', null, 'o1', [:]) - def dmiWriteOperationsPerProducerKey = [new ProducerKey('dmi1', 'prod1'): [dmiWriteOperation]] + def dmiWriteOperation = new DmiWriteOperation('p', 'operation', 'tag', null, 'o1') + def dmiWriteOperationsPerProducerKey = [(new ProducerKey('dmi1', 'prod1')): [dmiWriteOperation]] def authorization = 'my authorization header' and: 'the dmi rest client will return a response (for the correct parameters)' - def responseEntity = new ResponseEntity<>(new SubJobWriteResponse('my-sub-job-id', 'dmi1', 'prod1'), HttpStatus.OK) - def expectedJson = '{"destination":"d1","dataAcceptType":"t1","dataContentType":"t2","dataProducerId":"prod1","dataJobId":"some-job-id","data":[{"path":"p","op":"operation","moduleSetTag":"tag","value":null,"operationId":"o1","privateProperties":{}}]}' + def responseAsKeyValuePairs = [subJobId:'my-sub-job-id'] + def responseEntity = new ResponseEntity<>(responseAsKeyValuePairs, HttpStatus.OK) + def expectedJson = '{"destination":"d1","dataAcceptType":"t1","dataContentType":"t2","dataProducerId":"prod1","dataJobId":"some-job-id","data":[{"path":"p","op":"operation","moduleSetTag":"tag","value":null,"operationId":"o1"}]}' mockDmiRestClient.synchronousPostOperationWithJsonData(RequiredDmiService.DATA, _, expectedJson, OperationType.CREATE, authorization) >> responseEntity when: 'sending request to DMI invoked' objectUnderTest.sendRequestsToDmi(authorization, dataJobId, dataJobMetadata, dmiWriteOperationsPerProducerKey) then: 'the result contains the expected sub-job id' - assert responseEntity.body.subJobId == 'my-sub-job-id' + assert responseEntity.body.get('subJobId') == 'my-sub-job-id' } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminerSpec.groovy index 6aa84d1c7f..d051927b3d 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/WriteRequestExaminerSpec.groovy @@ -23,24 +23,27 @@ package org.onap.cps.ncmp.impl.datajobs import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest import org.onap.cps.ncmp.api.datajobs.models.WriteOperation import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle -import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle +import org.onap.cps.ncmp.impl.inventory.ParameterizedCmHandleQueryService import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher import spock.lang.Specification class WriteRequestExaminerSpec extends Specification { def mockAlternateIdMatcher = Mock(AlternateIdMatcher) - def objectUnderTest = new WriteRequestExaminer(mockAlternateIdMatcher) + def mockParameterizedCmHandleQueryService = Mock(ParameterizedCmHandleQueryService) + def objectUnderTest = new WriteRequestExaminer(mockAlternateIdMatcher, mockParameterizedCmHandleQueryService) def setup() { - def ch1 = new YangModelCmHandle(id: 'ch1', dmiServiceName: 'dmiA', dataProducerIdentifier: 'p1', dmiProperties: []) - def ch2 = new YangModelCmHandle(id: 'ch2', dmiServiceName: 'dmiA', dataProducerIdentifier: 'p1', dmiProperties: []) - def ch3 = new YangModelCmHandle(id: 'ch3', dmiServiceName: 'dmiA', dataProducerIdentifier: 'p2', dmiProperties: []) - def ch4 = new YangModelCmHandle(id: 'ch4', dmiServiceName: 'dmiB', dataProducerIdentifier: 'p1', dmiProperties: []) - mockAlternateIdMatcher.getYangModelCmHandleByLongestMatchingAlternateId('fdn1', '/') >> ch1 - mockAlternateIdMatcher.getYangModelCmHandleByLongestMatchingAlternateId('fdn2', '/') >> ch2 - mockAlternateIdMatcher.getYangModelCmHandleByLongestMatchingAlternateId('fdn3', '/') >> ch3 - mockAlternateIdMatcher.getYangModelCmHandleByLongestMatchingAlternateId('fdn4', '/') >> ch4 + def ch1 = new NcmpServiceCmHandle(cmHandleId: 'ch1', dmiServiceName: 'dmiA', moduleSetTag: 'someModuleSetTag', alternateId: 'fdn1', dataProducerIdentifier: 'p1') + def ch2 = new NcmpServiceCmHandle(cmHandleId: 'ch2', dmiServiceName: 'dmiA', moduleSetTag: 'someModuleSetTag', alternateId: 'fdn2', dataProducerIdentifier: 'p1') + def ch3 = new NcmpServiceCmHandle(cmHandleId: 'ch3', dmiServiceName: 'dmiA', moduleSetTag: 'someModuleSetTag', alternateId: 'fdn3', dataProducerIdentifier: 'p2') + def ch4 = new NcmpServiceCmHandle(cmHandleId: 'ch4', dmiServiceName: 'dmiB', moduleSetTag: 'someModuleSetTag', alternateId: 'fdn4', dataProducerIdentifier: 'p1') + def cmHandlePerAlternateId = ['fdn1': ch1, 'fdn2': ch2, 'fdn3': ch3, 'fdn4': ch4] + mockAlternateIdMatcher.getCmHandleByLongestMatchingAlternateId('fdn1', '/', cmHandlePerAlternateId) >> ch1 + mockAlternateIdMatcher.getCmHandleByLongestMatchingAlternateId('fdn2', '/', cmHandlePerAlternateId) >> ch2 + mockAlternateIdMatcher.getCmHandleByLongestMatchingAlternateId('fdn3', '/', cmHandlePerAlternateId) >> ch3 + mockAlternateIdMatcher.getCmHandleByLongestMatchingAlternateId('fdn4', '/', cmHandlePerAlternateId) >> ch4 + mockParameterizedCmHandleQueryService.getAllCmHandlesWithoutProperties() >> [ch1, ch2, ch3, ch4] } def 'Create a map of dmi write requests per producer key with #scenario.'() { @@ -83,9 +86,9 @@ class WriteRequestExaminerSpec extends Specification { def 'Validate the creation of a ProducerKey with correct dmiservicename.'() { given: 'yangModelCmHandles with service name: "#dmiServiceName" and data service name: "#dataServiceName"' - def yangModelCmHandle = YangModelCmHandle.toYangModelCmHandle(dmiServiceName, dataServiceName, '', new NcmpServiceCmHandle(cmHandleId: 'cm-handle-id-1'), '', '', 'dpi1') + def ncmpServiceCmHandle = new NcmpServiceCmHandle(dmiServiceName: dmiServiceName, dmiDataServiceName: dataServiceName, dataProducerIdentifier: 'dpi1') when: 'the ProducerKey is created' - def result = objectUnderTest.createProducerKey(yangModelCmHandle).toString() + def result = objectUnderTest.createProducerKey(ncmpServiceCmHandle).toString() then: 'we get the ProducerKey with the correct service name' assert result == expectedProducerKey where: 'the following services are registered' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiOperationsBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiOperationsBaseSpec.groovy index 1edee4e355..c5c1397048 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiOperationsBaseSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiOperationsBaseSpec.groovy @@ -59,9 +59,9 @@ abstract class DmiOperationsBaseSpec extends Specification { mockInventoryPersistence.getYangModelCmHandle(cmHandleId) >> yangModelCmHandle } - def mockYangModelCmHandleCollectionRetrieval(dmiProperties) { + def mockYangModelCmHandleRetrievalByCmHandleId(dmiProperties) { populateYangModelCmHandle(dmiProperties, '') - mockInventoryPersistence.getYangModelCmHandlesFromCmHandleReferences(_) >> [yangModelCmHandle] + mockInventoryPersistence.getYangModelCmHandles(_) >> [yangModelCmHandle] } def populateYangModelCmHandle(dmiProperties, moduleSetTag) { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiRestClientSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiRestClientSpec.groovy index 4d47ef14a0..c968a32b3c 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiRestClientSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/dmi/DmiRestClientSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -168,13 +168,12 @@ class DmiRestClientSpec extends Specification { def 'DMI GET Operation for DMI Data Service '() { given: 'the Data web client returns a valid response entity for the expected parameters' mockDataServicesWebClient.get() >> mockRequestBody - def jsonNode = jsonObjectMapper.convertJsonString('{"status":"some status"}', JsonNode.class) - ((ObjectNode) jsonNode).put('status', 'some status') - mockResponse.bodyToMono(JsonNode.class) >> Mono.just(jsonNode) + def result = '{"status":"some status"}' + mockResponse.bodyToMono(String.class) >> Mono.just(result) when: 'GET operation is invoked for Data Service' def response = objectUnderTest.getDataJobStatus(urlTemplateParameters, NO_AUTH_HEADER).block() then: 'the response equals to the expected value' - assert response == 'some status' + assert response == '{"status":"some status"}' } def 'Get data job result from DMI.'() { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/AlternateIdCheckerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/AlternateIdCheckerSpec.groovy index aba9bf96bc..d1ced0d8c2 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/AlternateIdCheckerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/AlternateIdCheckerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================== - * Copyright (c) 2024 Nordix Foundation. + * Copyright (c) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ package org.onap.cps.ncmp.impl.inventory +import com.hazelcast.map.IMap import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle import org.onap.cps.api.exceptions.DataNodeNotFoundException @@ -28,53 +29,52 @@ import spock.lang.Specification class AlternateIdCheckerSpec extends Specification { def mockInventoryPersistenceService = Mock(InventoryPersistence) + def mockCmHandleIdPerAlternateId = Mock(IMap) - def objectUnderTest = new AlternateIdChecker(mockInventoryPersistenceService) + def objectUnderTest = new AlternateIdChecker(mockInventoryPersistenceService, mockCmHandleIdPerAlternateId) + + def setup() { + mockCmHandleIdPerAlternateId.getAll(_) >> [fdnInCache1:'ch-1',fdnInCache2:'ch-2'] + } def 'Check a batch of created cm handles with #scenario.'() { given: 'a batch of 2 new cm handles with alternate ids #alt1 and #alt2' def batch = [new NcmpServiceCmHandle(cmHandleId: 'ch-1', alternateId: alt1), new NcmpServiceCmHandle(cmHandleId: 'ch-2', alternateId: alt2)] - and: 'the database already contains cm handle(s) with these alternate ids: #altAlreadyInDb' - mockInventoryPersistenceService.getYangModelCmHandleByAlternateIds(_ as Collection<String>) >> - { args -> args[0].stream().filter(altId -> altAlreadyInDb.contains(altId)).map(altId -> new YangModelCmHandle(alternateId: altId)).toList() } when: 'the batch of new cm handles is checked' def result = objectUnderTest.getIdsOfCmHandlesWithRejectedAlternateId(batch, AlternateIdChecker.Operation.CREATE) then: 'the result contains ids of the rejected cm handles' assert result == expectedRejectedCmHandleIds where: 'the following alternate ids are used' - scenario | alt1 | alt2 | altAlreadyInDb || expectedRejectedCmHandleIds - 'blank alternate ids' | '' | '' | ['dont matter'] || [] - 'null alternate ids' | null | null | ['dont matter'] || [] - 'new alternate ids' | 'fdn1' | 'fdn2' | ['other fdn'] || [] - 'one already used alternate id' | 'fdn1' | 'fdn2' | ['fdn1'] || ['ch-1'] - 'two already used alternate ids' | 'fdn1' | 'fdn2' | ['fdn1', 'fdn2'] || ['ch-1', 'ch-2'] - 'duplicate alternate id in batch' | 'fdn1' | 'fdn1' | ['dont matter'] || ['ch-2'] + scenario | alt1 | alt2 || expectedRejectedCmHandleIds + 'blank alternate ids' | '' | '' || [] + 'null alternate ids' | null | null || [] + 'new alternate ids' | 'newFdn1' | 'newFdn2' || [] + 'one already used alternate id' | 'fdnInCache1'| 'newFdn' || ['ch-1'] + 'two already used alternate ids' | 'fdnInCache1'| 'fdnInCache2'|| ['ch-1', 'ch-2'] + 'duplicate alternate id in batch' | 'newFdn1' | 'newFdn1' || ['ch-2'] } def 'Check a batch of updates to existing cm handles with #scenario.'() { given: 'a batch of 1 existing cm handle to update alternate id to #proposedAlt' def batch = [new NcmpServiceCmHandle(cmHandleId: 'ch-1', alternateId: proposedAlt)] and: 'the database already contains a cm handle with alternate id: #altAlreadyInDb' - mockInventoryPersistenceService.getYangModelCmHandleByAlternateIds(_ as Collection<String>) >> - { args -> args[0].stream().filter(altId -> altAlreadyInDb == altId).map(altId -> new YangModelCmHandle(alternateId: altId)).toList() } mockInventoryPersistenceService.getYangModelCmHandle(_) >> new YangModelCmHandle(alternateId: altAlreadyInDb) when: 'the batch of cm handle updates is checked' def result = objectUnderTest.getIdsOfCmHandlesWithRejectedAlternateId(batch, AlternateIdChecker.Operation.UPDATE) then: 'the result contains ids of the rejected cm handles' assert result == expectedRejectedCmHandleIds where: 'the following parameters are used' - scenario | proposedAlt | altAlreadyInDb || expectedRejectedCmHandleIds - 'no alternate id' | 'fdn1' | '' || [] - 'used the same alternate id' | 'fdn1' | 'fdn1' || [] - 'used different alternate id' | 'otherFdn' | 'fdn1' || ['ch-1'] + scenario | proposedAlt | altAlreadyInDb|| expectedRejectedCmHandleIds + 'no alternate id' | 'newFdn1' | '' || [] + 'used the same alternate id' | 'fdnInCache1'| 'fdnInCache1' || [] + 'used different alternate id' | 'otherFdn' | 'fdnInCache1' || ['ch-1'] } def 'Check update of non-existing cm handle.'() { given: 'a batch of 1 non-existing cm handle to update alternate id' def batch = [new NcmpServiceCmHandle(cmHandleId: 'non-existing', alternateId: 'altId')] and: 'the database does not contain any cm handles' - mockInventoryPersistenceService.getYangModelCmHandleByAlternateIds(_) >> [] mockInventoryPersistenceService.getYangModelCmHandle(_) >> { throwDataNodeNotFoundException() } when: 'the batch of cm handle updates is checked' def result = objectUnderTest.getIdsOfCmHandlesWithRejectedAlternateId(batch, AlternateIdChecker.Operation.UPDATE) diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy index 811e4ea526..e978121644 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,24 +21,24 @@ package org.onap.cps.ncmp.impl.inventory +import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DATASPACE_NAME +import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_ANCHOR +import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS + import com.hazelcast.config.Config import com.hazelcast.core.Hazelcast import com.hazelcast.instance.impl.HazelcastInstanceFactory import org.onap.cps.api.CpsDataService import org.onap.cps.api.CpsQueryService -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.utils.CpsValidator import org.onap.cps.ncmp.api.inventory.DataStoreSyncState import org.onap.cps.ncmp.api.inventory.models.TrustLevel import org.onap.cps.ncmp.api.inventory.models.CmHandleState import org.onap.cps.api.model.DataNode import spock.lang.Specification -import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DATASPACE_NAME -import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_ANCHOR -import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT -import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS -import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS - class CmHandleQueryServiceImplSpec extends Specification { def mockCpsQueryService = Mock(CpsQueryService) @@ -51,7 +51,7 @@ class CmHandleQueryServiceImplSpec extends Specification { def static sampleDataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']"), new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-2']")] - def dataNodeWithPrivateField = '//additional-properties[@name=\"Contact3\" and @value=\"newemailforstore3@bookstore.com\"]/ancestor::cm-handles' + def cpsPathWithPrivateField = '//additional-properties[@name=\'Contact3\' and @value=\'newemailforstore3@bookstore.com\']/ancestor::cm-handles/@id' def static pnfDemo = createDataNode('PNFDemo') def static pnfDemo2 = createDataNode('PNFDemo2') @@ -86,21 +86,21 @@ class CmHandleQueryServiceImplSpec extends Specification { } def 'Query cm handles on trust level'() { - given: 'query properties for trust level COMPLETE' - def trustLevelPropertyQueryPairs = ['trustLevel' : TrustLevel.COMPLETE.toString()] + given: 'query properties for #trustLevel' + def trustLevelPropertyQueryPairs = ['trustLevel' : trustLevel.toString()] and: 'the dmi cache has been initialised and "knows" about my-dmi-plugin-identifier' - trustLevelPerDmiPlugin.put('my-dmi-plugin-identifier', TrustLevel.COMPLETE) + trustLevelPerDmiPlugin.put('my-dmi-plugin-identifier', trustLevel) and: 'the DataNodes queried for a given cpsPath are returned from the persistence service' mockResponses() when: 'the query is run' def result = objectUnderTest.queryCmHandlesByTrustLevel(trustLevelPropertyQueryPairs, outputAlternateId) then: 'the result contain trusted cmHandle reference' - assert result.size() == 1 - assert result[0] == expectedCmHandleReference + assert result as Set == expectedCmHandleReference as Set where: 'the following data is used' - senario | outputAlternateId | expectedCmHandleReference - 'output cmHandleId' | false | 'PNFDemo' - 'output AlternateId' | true | 'alt-PNFDemo' + senario | outputAlternateId | expectedCmHandleReference | trustLevel || resultSize + 'output cmHandleId for trustLevel Complete' | false | ['PNFDemo'] | TrustLevel.COMPLETE || 1 + 'output alternateId for trustLevel Complete'| true | ['alt-PNFDemo'] | TrustLevel.COMPLETE || 1 + 'output alternateIds for trustLevel None' | true | ['alt-PNFDemo2', 'alt-PNFDemo', 'alt-PNFDemo4']| TrustLevel.NONE || 3 } def 'Query CmHandles using empty public properties query pair.'() { @@ -119,7 +119,7 @@ class CmHandleQueryServiceImplSpec extends Specification { def 'Query CmHandles by a private field\'s value.'() { given: 'a data node exists with a certain additional-property' - mockCpsQueryService.queryDataNodes(_, _, dataNodeWithPrivateField, _) >> [pnfDemo5] + mockCpsQueryService.queryDataLeaf(_, _, cpsPathWithPrivateField, _) >> [pnfDemo5.getLeaves().get('id')] when: 'a query on CmHandle private properties is executed using a map' def result = objectUnderTest.queryCmHandleAdditionalProperties(['Contact3': 'newemailforstore3@bookstore.com'], false) then: 'one cm handle is returned' @@ -131,7 +131,7 @@ class CmHandleQueryServiceImplSpec extends Specification { def cmHandleState = CmHandleState.ADVISED and: 'the persistence service returns a list of data nodes' mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - "//state[@cm-handle-state='ADVISED']", OMIT_DESCENDANTS) >> sampleDataNodes + "//state[@cm-handle-state='ADVISED']", OMIT_DESCENDANTS, 0) >> sampleDataNodes when: 'cm handles are fetched by state' def result = objectUnderTest.queryCmHandleIdsByState(cmHandleState) then: 'the returned result matches the result from the persistence service' @@ -143,8 +143,8 @@ class CmHandleQueryServiceImplSpec extends Specification { def cmHandleState = state and: 'the persistence service returns a list of data nodes' mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - NCMP_DMI_REGISTRY_PARENT + '/cm-handles[@id=\'some-cm-handle\']/state', - OMIT_DESCENDANTS) >> [new DataNode(leaves: ['cm-handle-state': 'READY'])] + NCMP_DMI_REGISTRY_PARENT + '/cm-handles[@id=\'some-cm-handle\']/state', + OMIT_DESCENDANTS) >> [new DataNode(leaves: ['cm-handle-state': 'READY'])] when: 'cm handles are compared by state' def result = objectUnderTest.cmHandleHasState('some-cm-handle', cmHandleState) then: 'the returned result matches the expected result from the persistence service' @@ -160,8 +160,8 @@ class CmHandleQueryServiceImplSpec extends Specification { def cmHandleState = CmHandleState.READY and: 'cps data service returns a list of data nodes' mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - NCMP_DMI_REGISTRY_PARENT + '/cm-handles[@id=\'some-cm-handle\']/state', - OMIT_DESCENDANTS) >> [new DataNode(leaves: ['cm-handle-state': 'READY'])] + NCMP_DMI_REGISTRY_PARENT + '/cm-handles[@id=\'some-cm-handle\']/state', + OMIT_DESCENDANTS) >> [new DataNode(leaves: ['cm-handle-state': 'READY'])] when: 'cm handles are fetched by state and id' def result = objectUnderTest.getCmHandleState('some-cm-handle') then: 'the returned result is a list of data nodes returned by cps data service' @@ -169,11 +169,9 @@ class CmHandleQueryServiceImplSpec extends Specification { } def 'Retrieve Cm Handles By Operational Sync State : UNSYNCHRONIZED'() { - given: 'a cm handle state to query' - def cmHandleState = CmHandleState.READY - and: 'cps data service returns a list of data nodes' + given: 'cps data service returns a list of data nodes' mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - '//state/datastores/operational[@sync-state="'+'UNSYNCHRONIZED'+'"]/ancestor::cm-handles', OMIT_DESCENDANTS) >> sampleDataNodes + '//state/datastores/operational[@sync-state="'+'UNSYNCHRONIZED'+'"]/ancestor::cm-handles', OMIT_DESCENDANTS, 0) >> sampleDataNodes when: 'cm handles are fetched by the UNSYNCHRONIZED operational sync state' def result = objectUnderTest.queryCmHandlesByOperationalSyncState(DataStoreSyncState.UNSYNCHRONIZED) then: 'the returned result is a list of data nodes returned by cps data service' @@ -186,7 +184,7 @@ class CmHandleQueryServiceImplSpec extends Specification { def cpsPath = "//state[@cm-handle-state='LOCKED']" and: 'cps data service returns a valid data node for cm handle ancestor' mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - cpsPath + '/ancestor::cm-handles', INCLUDE_ALL_DESCENDANTS) + cpsPath + '/ancestor::cm-handles', INCLUDE_ALL_DESCENDANTS, 0) >> Arrays.asList(cmHandleDataNode) when: 'get cm handles by cps path is invoked' def result = objectUnderTest.queryCmHandleAncestorsByCpsPath(cpsPath, INCLUDE_ALL_DESCENDANTS) @@ -200,8 +198,8 @@ class CmHandleQueryServiceImplSpec extends Specification { def cpsPath = "//cm-handles[@alternate-id='1']" and: 'cps data service returns a valid data node' mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - cpsPath, INCLUDE_ALL_DESCENDANTS) - >> Arrays.asList(cmHandleDataNode) + cpsPath, INCLUDE_ALL_DESCENDANTS, 0) + >> Arrays.asList(cmHandleDataNode) when: 'get cm handles by cps path is invoked' def result = objectUnderTest.queryCmHandleAncestorsByCpsPath(cpsPath, INCLUDE_ALL_DESCENDANTS) then: 'the returned result is a list of data nodes returned by cps data service' @@ -223,30 +221,75 @@ class CmHandleQueryServiceImplSpec extends Specification { 'output is for cm handle ids' | false || ['PNFDemo', 'PNFDemo2', 'PNFDemo4'] } - def 'Get all alternateIds by dmi plugin identifier'() { - given: 'the DataNodes queried for a given cpsPath are returned from the persistence service.' + def 'Get all cm handle references when #scenario'() { + given: 'the query service returns all references' mockResponses() - when: 'cm Handles are fetched for a given dmi plugin identifier' - def result = objectUnderTest.getCmHandleReferencesMapByDmiPluginIdentifier('my-dmi-plugin-identifier').values() - then: 'result is the correct size' - assert result.size() == 3 - and: 'result contains the correct alternate Ids' - assert result.containsAll('alt-PNFDemo', 'alt-PNFDemo2', 'alt-PNFDemo4') + when: 'the all cm hande references is retrieved with #scenario' + def result = objectUnderTest.getAllCmHandleReferences(outputAlternateId) + then: 'result contains all the correct cm handle references' + result.containsAll(expectedResult) + where: + scenario | outputAlternateId || expectedResult + 'output is alternate ids' | true || ['alt-PNFDemo', 'alt-PNFDemo2', 'alt-PNFDemo3', 'alt-PNFDemo4', 'alt-PNFDemo5'] + 'output is cm handle ids' | false || ['PNFDemo', 'PNFDemo2', 'PNFDemo3', 'PNFDemo4', 'PNFDemo5'] + } + + def 'Get all cm handle references by cps path'() { + when: 'the all cm handle references is retrieved via cps path' + objectUnderTest.getCmHandleReferencesByCpsPath(sampleCpsPath, outputAlternateId) + then: 'query service to query data leaf is called once with the correct cps path as parameter' + 1 * mockCpsQueryService.queryDataLeaf(_, _, expectedCpsPathForQuery,_) + where: + scenario | sampleCpsPath | outputAlternateId || expectedCpsPathForQuery + 'cps path suffixes with cm-handles and outputs alternateId' | '/some/path/ending/in/cm-handles' | true || '/some/path/ending/in/cm-handles/@alternate-id' + 'cps path suffixes without cm-handles and outputs alternateId'| '/some/path/NotEnding/incmhandles'| true || '/some/path/NotEnding/incmhandles/ancestor::cm-handles/@alternate-id' + 'cps path suffixes with cm-handles and outputs cmHandleId' | '/some/path/ending/in/cm-handles' | false || '/some/path/ending/in/cm-handles/@id' + 'cps path suffixes without cm-handles and outputs cmhandleId' | '/some/path/NotEnding/incmhandles'| false || '/some/path/NotEnding/incmhandles/ancestor::cm-handles/@id' + } void mockResponses() { - mockCpsQueryService.queryDataNodes(_, _, '//public-properties[@name=\"Contact\" and @value=\"newemailforstore@bookstore.com\"]/ancestor::cm-handles', _) >> [pnfDemo, pnfDemo2, pnfDemo4] - mockCpsQueryService.queryDataNodes(_, _, '//public-properties[@name=\"wont_match\" and @value=\"wont_match\"]/ancestor::cm-handles', _) >> [] - mockCpsQueryService.queryDataNodes(_, _, '//public-properties[@name=\"Contact2\" and @value=\"newemailforstore2@bookstore.com\"]/ancestor::cm-handles', _) >> [pnfDemo4] - mockCpsQueryService.queryDataNodes(_, _, '//public-properties[@name=\"Contact2\" and @value=\"\"]/ancestor::cm-handles', _) >> [] + + mockCpsQueryService.queryDataLeaf(_, _, '//public-properties[@name=\'Contact\' and @value=\'newemailforstore@bookstore.com\']/ancestor::cm-handles/@id', _) >> [pnfDemo.getLeaves().get('id'), pnfDemo2.getLeaves().get('id'), pnfDemo4.getLeaves().get('id')] + mockCpsQueryService.queryDataLeaf(_, _, '//public-properties[@name=\'wont_match\' and @value=\'wont_match\']/ancestor::cm-handles/@id', _) >> [] + mockCpsQueryService.queryDataLeaf(_, _, '//public-properties[@name=\'Contact2\' and @value=\'newemailforstore2@bookstore.com\']/ancestor::cm-handles/@alternate-id', _) >> [pnfDemo4.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(_, _, '//public-properties[@name=\'Contact\' and @value=\'newemailforstore@bookstore.com\']/ancestor::cm-handles/@alternate-id', _) >> [pnfDemo.getLeaves().get('alternate-id'), pnfDemo2.getLeaves().get('alternate-id'), pnfDemo4.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(_, _, '//public-properties[@name=\'Contact2\' and @value=\'\']/ancestor::cm-handles/@id', _) >> [] mockCpsQueryService.queryDataNodes(_, _, '//state[@cm-handle-state=\"READY\"]/ancestor::cm-handles', _) >> [pnfDemo, pnfDemo3] mockCpsQueryService.queryDataNodes(_, _, '//state[@cm-handle-state=\"LOCKED\"]/ancestor::cm-handles', _) >> [pnfDemo2, pnfDemo4] mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-service-name=\'my-dmi-plugin-identifier\']', OMIT_DESCENDANTS) >> [pnfDemo, pnfDemo2] mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-data-service-name=\'my-dmi-plugin-identifier\']', OMIT_DESCENDANTS) >> [pnfDemo, pnfDemo4] mockCpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-model-service-name=\'my-dmi-plugin-identifier\']', OMIT_DESCENDANTS) >> [pnfDemo2, pnfDemo4] + + + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-service-name=\'my-dmi-plugin-identifier\']/@id', _) >> [pnfDemo.getLeaves().get('id'), pnfDemo2.getLeaves().get('id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-data-service-name=\'my-dmi-plugin-identifier\']/@id', _) >> [pnfDemo.getLeaves().get('id'), pnfDemo4.getLeaves().get('id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-model-service-name=\'my-dmi-plugin-identifier\']/@id', _) >> [pnfDemo2.getLeaves().get('id'), pnfDemo4.getLeaves().get('id')] + + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-service-name=\'my-dmi-plugin-identifier\']/@alternate-id', _) >> [pnfDemo.getLeaves().get('alternate-id'), pnfDemo2.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-data-service-name=\'my-dmi-plugin-identifier\']/@alternate-id', _) >> [pnfDemo.getLeaves().get('alternate-id'), pnfDemo4.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@dmi-model-service-name=\'my-dmi-plugin-identifier\']/@alternate-id', _) >> [pnfDemo2.getLeaves().get('alternate-id'), pnfDemo4.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@id=\'PNFDemo\']/@alternate-id', _) >> [pnfDemo.getLeaves().get('alternate-id')] + mockCpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@id=\'PNFDemo2\' or @id=\'PNFDemo4\' or @id=\'PNFDemo\']/@alternate-id', _) >> [pnfDemo2.getLeaves().get('alternate-id'), pnfDemo.getLeaves().get('alternate-id'), pnfDemo4.getLeaves().get('alternate-id')] + + mockCpsQueryService.queryDataLeaf(_, _, '/dmi-registry/cm-handles/@alternate-id', _) >> getAllCmHandleReferences(true) + mockCpsQueryService.queryDataLeaf(_, _, '/dmi-registry/cm-handles/@id', _) >> getAllCmHandleReferences(false) + + } def static createDataNode(dataNodeId) { return new DataNode(xpath: '/dmi-registry/cm-handles[@id=\'' + dataNodeId + '\']', leaves: ['id':dataNodeId, 'alternate-id':'alt-' + dataNodeId]) } + + def static getAllCmHandleReferences(outputAlternateId) { + def sampleNodes = [pnfDemo, pnfDemo2, pnfDemo3, pnfDemo4, pnfDemo5] + return sampleNodes.collect { dataNode -> + if (outputAlternateId) { + return dataNode.getLeaves().get('alternate-id') + } else { + return dataNode.getLeaves().get('id') + } + } + } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandlerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandlerSpec.groovy index b600d02be5..cec3acbb1f 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandlerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServicePropertyHandlerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -27,17 +27,19 @@ import ch.qos.logback.classic.Logger import ch.qos.logback.classic.spi.ILoggingEvent import ch.qos.logback.core.read.ListAppender import com.fasterxml.jackson.databind.ObjectMapper +import com.hazelcast.map.IMap import org.onap.cps.api.CpsDataService import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle import org.onap.cps.api.exceptions.DataNodeNotFoundException import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.model.DataNode -import org.onap.cps.api.model.DataNodeBuilder +import org.onap.cps.impl.DataNodeBuilder import org.onap.cps.utils.ContentType import org.onap.cps.utils.JsonObjectMapper import org.slf4j.LoggerFactory import spock.lang.Specification +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLES_NOT_FOUND import static org.onap.cps.ncmp.api.NcmpResponseStatus.CM_HANDLE_INVALID_ID import static org.onap.cps.ncmp.api.NcmpResponseStatus.UNKNOWN_ERROR @@ -51,8 +53,9 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def mockCpsDataService = Mock(CpsDataService) def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) def mockAlternateIdChecker = Mock(AlternateIdChecker) + def mockCmHandleIdPerAlternateId = Mock(IMap) - def objectUnderTest = new CmHandleRegistrationServicePropertyHandler(mockInventoryPersistence, mockCpsDataService, jsonObjectMapper, mockAlternateIdChecker) + def objectUnderTest = new CmHandleRegistrationServicePropertyHandler(mockInventoryPersistence, mockCpsDataService, jsonObjectMapper, mockAlternateIdChecker, mockCmHandleIdPerAlternateId) def logger = Spy(ListAppender<ILoggingEvent>) void setup() { @@ -79,7 +82,7 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def 'Update CM Handle Public Properties: #scenario'() { given: 'the CPS service return a CM handle' - mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId) >> cmHandleDataNodeAsCollection + mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS) >> cmHandleDataNodeAsCollection and: 'an update cm handle request with public properties updates' def cmHandleUpdateRequest = [new NcmpServiceCmHandle(cmHandleId: cmHandleId, publicProperties: updatedPublicProperties)] when: 'update data node leaves is called with the update request' @@ -101,7 +104,7 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def 'Update DMI Properties: #scenario'() { given: 'the CPS service return a CM handle' - mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId) >> cmHandleDataNodeAsCollection + mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS) >> cmHandleDataNodeAsCollection and: 'an update cm handle request with DMI properties updates' def cmHandleUpdateRequest = [new NcmpServiceCmHandle(cmHandleId: cmHandleId, dmiProperties: updatedDmiProperties)] when: 'update data node leaves is called with the update request' @@ -125,7 +128,7 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def 'Update CM Handle Properties, remove all properties: #scenario'() { given: 'the CPS service return a CM handle' def cmHandleDataNode = new DataNode(xpath: cmHandleXpath, leaves: ['id': cmHandleId], childDataNodes: originalPropertyDataNodes) - mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId) >> [cmHandleDataNode] + mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS) >> [cmHandleDataNode] and: 'an update cm handle request that removes all public properties(existing and non-existing)' def cmHandleUpdateRequest = [new NcmpServiceCmHandle(cmHandleId: cmHandleId, publicProperties: ['publicProp3': null, 'publicProp4': null])] when: 'update data node leaves is called with the update request' @@ -202,9 +205,11 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def 'Update alternate id of existing CM Handle.'() { given: 'cm handles request' def cmHandleUpdateRequest = [new NcmpServiceCmHandle(cmHandleId: cmHandleId, alternateId: 'alt-1')] + and: 'the cm handle per alternate id cache returns a value' + mockCmHandleIdPerAlternateId.get(_) >> 'someId' and: 'a data node found' def dataNode = new DataNode(xpath: cmHandleXpath, leaves: ['id': cmHandleId, 'alternate-id': 'alt-1']) - mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId) >> [dataNode] + mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS) >> [dataNode] when: 'cm handle properties is updated' def response = objectUnderTest.updateCmHandleProperties(cmHandleUpdateRequest) then: 'the update is delegated to cps data service with correct parameters' @@ -224,7 +229,7 @@ class CmHandleRegistrationServicePropertyHandlerSpec extends Specification { def updatedNcmpServiceCmHandles = [new NcmpServiceCmHandle(cmHandleId: cmHandleId, alternateId: 'alt-1')] and: 'a data node found' def dataNode = new DataNode(xpath: cmHandleXpath, leaves: ['id': cmHandleId, 'alternate-id': 'alt-1']) - mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId) >> [dataNode] + mockInventoryPersistence.getCmHandleDataNodeByCmHandleId(cmHandleId, INCLUDE_ALL_DESCENDANTS) >> [dataNode] when: 'attempt to update the cm handle' def response = objectUnderTest.updateCmHandleProperties(updatedNcmpServiceCmHandles) then: 'the update is NOT delegated to cps data service' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServiceSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServiceSpec.groovy index 953e1c7d0e..f99fe2d650 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServiceSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleRegistrationServiceSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2025 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -59,10 +59,11 @@ class CmHandleRegistrationServiceSpec extends Specification { def mockModuleSyncStartedOnCmHandles = Mock(IMap<String, Object>) def mockTrustLevelManager = Mock(TrustLevelManager) def mockAlternateIdChecker = Mock(AlternateIdChecker) + def mockCmHandleIdPerAlternateId = Mock(IMap) def objectUnderTest = Spy(new CmHandleRegistrationService( mockNetworkCmProxyDataServicePropertyHandler, mockInventoryPersistence, mockCpsDataService, mockLcmEventsCmHandleStateHandler, - mockModuleSyncStartedOnCmHandles as IMap<String, Object>, mockTrustLevelManager, mockAlternateIdChecker)) + mockModuleSyncStartedOnCmHandles, mockTrustLevelManager, mockAlternateIdChecker, mockCmHandleIdPerAlternateId)) def setup() { // always accept all cm handles diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy index d8d92e99f5..0755554c85 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ @@ -34,13 +34,10 @@ import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.model.DataNode import org.onap.cps.api.model.ModuleDefinition import org.onap.cps.api.model.ModuleReference -import org.onap.cps.api.parameters.FetchDescendantsOption -import org.onap.cps.impl.utils.CpsValidator -import org.onap.cps.ncmp.api.exceptions.CmHandleNotFoundException +import org.onap.cps.utils.CpsValidator import org.onap.cps.ncmp.api.inventory.models.CompositeState import org.onap.cps.ncmp.api.inventory.models.CmHandleState import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle -import org.onap.cps.ncmp.impl.utils.YangDataConverter import org.onap.cps.utils.ContentType import org.onap.cps.utils.JsonObjectMapper import spock.lang.Shared @@ -66,11 +63,7 @@ class InventoryPersistenceImplSpec extends Specification { def mockCpsValidator = Mock(CpsValidator) - def mockCmHandleQueries = Mock(CmHandleQueryService) - - def mockYangDataConverter = Mock(YangDataConverter) - - def objectUnderTest = new InventoryPersistenceImpl(mockCpsValidator, spiedJsonObjectMapper, mockCpsAnchorService, mockCpsModuleService, mockCpsDataService, mockCmHandleQueries) + def objectUnderTest = new InventoryPersistenceImpl(mockCpsValidator, spiedJsonObjectMapper, mockCpsAnchorService, mockCpsModuleService, mockCpsDataService) def formattedDateAndTime = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") .format(OffsetDateTime.of(2022, 12, 31, 20, 30, 40, 1, ZoneOffset.UTC)) @@ -162,24 +155,13 @@ class InventoryPersistenceImplSpec extends Specification { assert results.size() == 0 } - def "Retrieve multiple YangModelCmHandles using cm handle references"() { - given: 'the cps data service returns 2 data nodes from the DMI registry' - def dataNodes = [new DataNode(xpath: xpath, leaves: ['id': cmHandleId, 'alternate-id':alternateId]), new DataNode(xpath: xpath2, leaves: ['id': cmHandleId2,'alternate-id':alternateId2])] - mockCmHandleQueries.queryNcmpRegistryByCpsPath(_, INCLUDE_ALL_DESCENDANTS) >> dataNodes - when: 'retrieving the yang modelled cm handle' - def results = objectUnderTest.getYangModelCmHandlesFromCmHandleReferences([cmHandleId, cmHandleId2]) - then: 'verify both have returned and cmhandleIds are correct' - assert results.size() == 2 - assert results.id.containsAll([cmHandleId, cmHandleId2]) - } - def 'Get a Cm Handle Composite State'() { given: 'a valid cm handle id' def cmHandleId = 'Some-Cm-Handle' def dataNode = new DataNode(leaves: ['cm-handle-state': 'ADVISED']) and: 'cps data service returns a valid data node' mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, - '/dmi-registry/cm-handles[@id=\'Some-Cm-Handle\']/state', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> [dataNode] + '/dmi-registry/cm-handles[@id=\'Some-Cm-Handle\']/state', INCLUDE_ALL_DESCENDANTS) >> [dataNode] when: 'get cm handle state is invoked' def result = objectUnderTest.getCmHandleState(cmHandleId) then: 'result has returned the correct cm handle state' @@ -302,44 +284,11 @@ class InventoryPersistenceImplSpec extends Specification { given: 'expected xPath to get cmHandle data node' def expectedXPath = '/dmi-registry/cm-handles[@id=\'sample cmHandleId\']' when: 'the method to get data nodes is called' - objectUnderTest.getCmHandleDataNodeByCmHandleId('sample cmHandleId') + objectUnderTest.getCmHandleDataNodeByCmHandleId('sample cmHandleId', INCLUDE_ALL_DESCENDANTS) then: 'the data persistence service method to get cmHandle data node is invoked once with expected xPath' 1 * mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, expectedXPath, INCLUDE_ALL_DESCENDANTS) } - def 'Get yang model cm handle by alternate id'() { - given: 'expected xPath to get cmHandle data node' - def expectedXPath = '/dmi-registry/cm-handles[@alternate-id=\'alternate id\']' - def expectedDataNode = new DataNode(xpath: expectedXPath, leaves: [id: 'id', alternateId: 'alternate id']) - and: 'query service is invoked with expected xpath' - mockCmHandleQueries.queryNcmpRegistryByCpsPath(expectedXPath, OMIT_DESCENDANTS) >> [expectedDataNode] - mockYangDataConverter.toYangModelCmHandle(expectedDataNode) >> new YangModelCmHandle(id: 'id') - expect: 'getting the yang model cm handle' - assert objectUnderTest.getYangModelCmHandleByAlternateId('alternate id') == new YangModelCmHandle(id: 'id') - } - - def 'Attempt to get non existing yang model cm handle by alternate id'() { - given: 'query service is invoked and returns empty collection of data nodes' - mockCmHandleQueries.queryNcmpRegistryByCpsPath(*_) >> [] - when: 'getting the yang model cm handle' - objectUnderTest.getYangModelCmHandleByAlternateId('alternate id') - then: 'no data found exception thrown' - def thrownException = thrown(CmHandleNotFoundException) - assert thrownException.getMessage().contains('Cm handle not found') - assert thrownException.getDetails().contains('No cm handles found with reference alternate id') - } - - def 'Get multiple yang model cm handles by alternate ids #scenario'() { - when: 'getting the yang model cm handle with a empty/populated collection of alternate Ids' - objectUnderTest.getYangModelCmHandleByAlternateIds(alternateIdCollection) - then: 'query service invoked when needed' - expectedInvocations * mockCmHandleQueries.queryNcmpRegistryByCpsPath(*_) >> [dataNode] - where: 'collections are either empty or populated with alternate ids' - scenario | alternateIdCollection || expectedInvocations - 'empty collection' | [] || 0 - 'populated collection' | ['alt'] || 1 - } - def 'Get CM handle ids for CM Handles that has given module names'() { when: 'the method to get cm handles is called' objectUnderTest.getCmHandleReferencesWithGivenModules(['sample-module-name'], false) @@ -348,15 +297,15 @@ class InventoryPersistenceImplSpec extends Specification { } def 'Get Alternate Ids for CM Handles that has given module names'() { - given: 'A Collection of data nodes' - def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])] - when: 'the methods to get dataNodes is called and returns correct values' + given: 'cps anchor service returns a CM-handle ID for the given module name' mockCpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, ['sample-module-name']) >> ['ch-1'] - mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], INCLUDE_ALL_DESCENDANTS) >> dataNodes - and: 'the method returns a result' + and: 'cps data service returns some data nodes for the given CM-handle ID' + def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])] + mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], OMIT_DESCENDANTS) >> dataNodes + when: 'the method to get cm-handle references by modules is called (outputting alternate IDs)' def result = objectUnderTest.getCmHandleReferencesWithGivenModules(['sample-module-name'], true) then: 'the result contains the correct alternate Id' - assert result == ['alt-1'] as HashSet + assert result == ['alt-1'] as Set } def 'Replace list content'() { @@ -382,21 +331,21 @@ class InventoryPersistenceImplSpec extends Specification { def 'CM handle exists'() { given: 'data service returns a datanode with correct cm handle id' - mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, xpath, INCLUDE_ALL_DESCENDANTS) >> [dataNode] + mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, xpath, OMIT_DESCENDANTS) >> [dataNode] expect: 'cm handle exists for given cm handle id' assert true == objectUnderTest.isExistingCmHandleId(cmHandleId) } def 'CM handle does not exist, empty dataNode collection returned'() { given: 'data service returns an empty datanode' - mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, xpath, INCLUDE_ALL_DESCENDANTS) >> [] + mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, xpath, OMIT_DESCENDANTS) >> [] expect: 'false is returned for non-existent cm handle' assert false == objectUnderTest.isExistingCmHandleId(cmHandleId) } def 'CM handle does not exist, exception thrown'() { given: 'data service throws an exception' - mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, "/dmi-registry/cm-handles[@id='non-existent-cm-handle']", INCLUDE_ALL_DESCENDANTS) >> {throw new DataNodeNotFoundException('','')} + mockCpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, "/dmi-registry/cm-handles[@id='non-existent-cm-handle']", OMIT_DESCENDANTS) >> {throw new DataNodeNotFoundException('','')} expect: 'false is returned for non-existent cm handle' assert false == objectUnderTest.isExistingCmHandleId('non-existent-cm-handle') } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/NetworkCmProxyInventoryFacadeSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/NetworkCmProxyInventoryFacadeSpec.groovy index eff8082a0d..29cd92db3f 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/NetworkCmProxyInventoryFacadeSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/NetworkCmProxyInventoryFacadeSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada * Modifications Copyright (C) 2023 TechMahindra Ltd. @@ -40,6 +40,7 @@ import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelManager import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher import org.onap.cps.utils.JsonObjectMapper +import reactor.core.publisher.Flux import spock.lang.Specification class NetworkCmProxyInventoryFacadeSpec extends Specification { @@ -249,11 +250,10 @@ class NetworkCmProxyInventoryFacadeSpec extends Specification { and: 'query cm handle method returns two cm handles' mockParameterizedCmHandleQueryService.queryCmHandles( spiedJsonObjectMapper.convertToValueType(cmHandleQueryApiParameters, CmHandleQueryServiceParameters.class)) - >> [new NcmpServiceCmHandle(cmHandleId: 'ch-0'), new NcmpServiceCmHandle(cmHandleId: 'ch-1')] - and: 'a trust level for cm handles' - 1 * mockTrustLevelManager.applyEffectiveTrustLevels(_) >> { args -> args[0].forEach{it.currentTrustLevel = TrustLevel.COMPLETE } } + >> Flux.fromIterable([new NcmpServiceCmHandle(cmHandleId: 'ch-0', currentTrustLevel: TrustLevel.COMPLETE), + new NcmpServiceCmHandle(cmHandleId: 'ch-1', currentTrustLevel: TrustLevel.COMPLETE)]) when: 'execute cm handle search is called' - def result = objectUnderTest.executeCmHandleSearch(cmHandleQueryApiParameters) + def result = objectUnderTest.executeCmHandleSearch(cmHandleQueryApiParameters).collectList().block() then: 'result consists of the two cm handles returned by the CPS Data Service' assert result.size() == 2 assert result[0].cmHandleId == 'ch-0' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceSpec.groovy index bf6ea36d65..594d7fb31d 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import org.onap.cps.api.exceptions.DataInUseException import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.model.ConditionProperties import org.onap.cps.api.model.DataNode +import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelManager import spock.lang.Specification import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_PARENT @@ -38,27 +39,28 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def cmHandleQueries = Mock(CmHandleQueryService) def partiallyMockedCmHandleQueries = Spy(CmHandleQueryService) def mockInventoryPersistence = Mock(InventoryPersistence) + def mockTrustLevelManager = Mock(TrustLevelManager) def dmiRegistry = new DataNode(xpath: NCMP_DMI_REGISTRY_PARENT, childDataNodes: createDataNodeList(['PNFDemo1', 'PNFDemo2', 'PNFDemo3', 'PNFDemo4'])) - def objectUnderTest = new ParameterizedCmHandleQueryServiceImpl(cmHandleQueries, mockInventoryPersistence) - def objectUnderTestWithPartiallyMockedQueries = new ParameterizedCmHandleQueryServiceImpl(partiallyMockedCmHandleQueries, mockInventoryPersistence) + def objectUnderTest = new ParameterizedCmHandleQueryServiceImpl(cmHandleQueries, mockInventoryPersistence, mockTrustLevelManager) + def objectUnderTestWithPartiallyMockedQueries = new ParameterizedCmHandleQueryServiceImpl(partiallyMockedCmHandleQueries, mockInventoryPersistence, mockTrustLevelManager) def 'Query cm handle ids with cpsPath.'() { given: 'a cmHandleWithCpsPath condition property' def cmHandleQueryParameters = new CmHandleQueryServiceParameters() def conditionProperties = createConditionProperties('cmHandleWithCpsPath', [['cpsPath' : '/some/cps/path']]) cmHandleQueryParameters.setCmHandleQueryParameters([conditionProperties]) - and: 'the query get the cm handle datanodes excluding all descendants returns a datanode' - cmHandleQueries.queryCmHandleAncestorsByCpsPath('/some/cps/path', FetchDescendantsOption.OMIT_DESCENDANTS) >> [new DataNode(leaves: ['id':'some-cmhandle-id', 'alternate-id':'some-alternate-id'])] + and: 'the query get the cm handle references' + cmHandleQueries.getCmHandleReferencesByCpsPath('/some/cps/path', outputAlternateId) >> cmHandleReferences.asCollection() when: 'the query is executed for cm handle ids' def result = objectUnderTest.queryCmHandleReferenceIds(cmHandleQueryParameters, outputAlternateId) then: 'the correct expected cm handles ids are returned' assert result == expectedCmhandleReference where: 'the following data is used' - senario | outputAlternateId || expectedCmhandleReference - 'output CmHandle Ids' | false || ['some-cmhandle-id'] as Set - 'output Alternate Ids' | true || ['some-alternate-id'] as Set + senario | outputAlternateId | cmHandleReferences || expectedCmhandleReference + 'output CmHandle Ids' | false | ['some-cmhandle-id'] as Set || ['some-cmhandle-id'] as Set + 'output Alternate Ids' | true | ['some-alternate-id'] as Set || ['some-alternate-id'] as Set } def 'Query cm handle where cps path itself is ancestor axis.'() { @@ -66,16 +68,16 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def cmHandleQueryParameters = new CmHandleQueryServiceParameters() def conditionProperties = createConditionProperties('cmHandleWithCpsPath', [['cpsPath' : '/some/cps/path']]) cmHandleQueryParameters.setCmHandleQueryParameters([conditionProperties]) - and: 'the query get the cm handle data nodes excluding all descendants returns a datanode' - cmHandleQueries.queryCmHandleAncestorsByCpsPath('/some/cps/path', FetchDescendantsOption.OMIT_DESCENDANTS) >> [new DataNode(leaves: ['id':'some-cmhandle-id', 'alternate-id':'some-alternate-id'])] + and: 'the query get the cm handle references' + cmHandleQueries.getCmHandleReferencesByCpsPath('/some/cps/path', outputAlternateId) >> cmHandleReferences.asCollection() when: 'the query is executed for cm handle ids' def result = objectUnderTest.queryCmHandleIdsForInventory(cmHandleQueryParameters, outputAlternateId) then: 'the correct expected cm handles ids are returned' assert result == expectedCmhandleReference where: 'the following data is used' - senario | outputAlternateId || expectedCmhandleReference - 'outputAlternate is false' | false || ['some-cmhandle-id'] as Set - 'outputAlternate is true' | true || ['some-alternate-id'] as Set + senario | outputAlternateId | cmHandleReferences || expectedCmhandleReference + 'outputAlternate is false' | false | ['some-cmhandle-id'] as Set || ['some-cmhandle-id'] as Set + 'outputAlternate is true' | true | ['some-alternate-id'] as Set|| ['some-alternate-id'] as Set } def 'Cm handle ids query with error: #scenario.'() { @@ -84,7 +86,7 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def conditionProperties = createConditionProperties('cmHandleWithCpsPath', [['cpsPath' : '/some/cps/path']]) cmHandleQueryParameters.setCmHandleQueryParameters([conditionProperties]) and: 'cmHandleQueries throws a path parsing exception' - cmHandleQueries.queryCmHandleAncestorsByCpsPath('/some/cps/path', FetchDescendantsOption.OMIT_DESCENDANTS) >> { throw thrownException } + cmHandleQueries.getCmHandleReferencesByCpsPath('/some/cps/path', _) >> { throw thrownException } when: 'the query is executed for cm handle ids' objectUnderTest.queryCmHandleReferenceIds(cmHandleQueryParameters, false) then: 'a data validation exception is thrown' @@ -141,7 +143,7 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def conditionProperties = createConditionProperties('hasAllModules', [['moduleName': 'some-module-name']]) cmHandleQueryParameters.setCmHandleQueryParameters([conditionProperties]) when: 'the query is executed for cm handle ids' - def result = objectUnderTest.queryCmHandles(cmHandleQueryParameters) + def result = objectUnderTest.queryCmHandles(cmHandleQueryParameters).collectList().block() then: 'the inventory service is called with the correct module names' 1 * mockInventoryPersistence.getCmHandleReferencesWithGivenModules(['some-module-name'], false) >> ['ch1'] and: 'the inventory service is called with teh correct if and returns a yang model cm handle' @@ -156,8 +158,8 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def 'Query cm handle references when the query is empty.'() { given: 'We use an empty query' def cmHandleQueryParameters = new CmHandleQueryServiceParameters() - and: 'the inventory persistence returns the dmi registry datanode with just ids' - mockInventoryPersistence.getDataNode(NCMP_DMI_REGISTRY_PARENT, FetchDescendantsOption.DIRECT_CHILDREN_ONLY) >> [dmiRegistry] + and: 'the inventory persistence returns the dmi registry datanode with just cm handle references' + cmHandleQueries.getAllCmHandleReferences(outputAlternateId) >> getCmHandleReferencesForDmiRegistry(outputAlternateId) when: 'the query is executed for both cm handle ids' def result = objectUnderTest.queryCmHandleReferenceIds(cmHandleQueryParameters, outputAlternateId) then: 'the correct expected cm handles are returned' @@ -171,10 +173,12 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def 'Query cm handle details when the query is empty.'() { given: 'We use an empty query' def cmHandleQueryParameters = new CmHandleQueryServiceParameters() - and: 'the inventory persistence returns the dmi registry datanode with just ids' - mockInventoryPersistence.getDataNode(NCMP_DMI_REGISTRY_PARENT) >> [dmiRegistry] + and: 'the inventory persistence returns the cm handle ids of all cm handles' + cmHandleQueries.getAllCmHandleReferences(false) >> getCmHandleReferencesForDmiRegistry(false) + and: 'the inventory persistence returns the cm handle details when requested' + mockInventoryPersistence.getYangModelCmHandles(_) >> dmiRegistry.childDataNodes.collect { new YangModelCmHandle(id: it.leaves.get("id").toString(), dmiProperties: [], publicProperties: []) } when: 'the query is executed for both cm handle details' - def result = objectUnderTest.queryCmHandles(cmHandleQueryParameters) + def result = objectUnderTest.queryCmHandles(cmHandleQueryParameters).collectList().block() then: 'the correct cm handles are returned' assert result.size() == 4 assert result.cmHandleId.containsAll('PNFDemo1', 'PNFDemo2', 'PNFDemo3', 'PNFDemo4') @@ -206,7 +210,7 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { def conditionProperties = createConditionProperties('cmHandleWithDmiPlugin', [['some-key': 'some-value']]) cmHandleQueryParameters.setCmHandleQueryParameters([conditionProperties]) and: 'the inventoryPersistence returns different CmHandleIds' - partiallyMockedCmHandleQueries.getCmHandleReferencesMapByDmiPluginIdentifier(*_) >> [:] + partiallyMockedCmHandleQueries.getCmHandleReferencesByDmiPluginIdentifier(_,_) >> [] when: 'the query executed' def result = objectUnderTestWithPartiallyMockedQueries.queryCmHandleIdsForInventory(cmHandleQueryParameters, true) then: 'the expected number of results are returned.' @@ -252,4 +256,14 @@ class ParameterizedCmHandleQueryServiceSpec extends Specification { dataNodeIds.each{ dataNodes << new DataNode(xpath: "/dmi-registry/cm-handles[@id='${it}']", leaves: ['id':it, 'alternate-id':'alt-' + it]) } return dataNodes } + + def getCmHandleReferencesForDmiRegistry(outputAlternateId) { + def cmHandles = dmiRegistry.childDataNodes ?: [] + def cmHandleReferences = [] + def attributeName = outputAlternateId ? 'alternate-id' : 'id' + cmHandles.each { cmHandle -> + cmHandleReferences.add(cmHandle.leaves.get(attributeName)) + } + return cmHandleReferences + } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutorSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutorSpec.groovy deleted file mode 100644 index 751c97a4d0..0000000000 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/AsyncTaskExecutorSpec.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.impl.inventory.sync - - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import spock.lang.Specification - -import java.util.concurrent.TimeoutException -import java.util.function.Supplier - -@SpringBootTest(classes = AsyncTaskExecutor) -class AsyncTaskExecutorSpec extends Specification { - - @Autowired - AsyncTaskExecutor objectUnderTest - def mockTaskSupplier = Mock(Supplier<Object>) - - def 'Parallelism level configuration.'() { - expect: 'Parallelism level is configured with the correct value' - assert objectUnderTest.getAsyncTaskParallelismLevel() == 3 - } - - def 'Task completion with #caseDescriptor.'() { - when: 'task completion is handled' - def irrelevantResponse = null - objectUnderTest.handleTaskCompletion(irrelevantResponse, exception); - then: 'any exception is swallowed by the task completion (logged)' - noExceptionThrown() - where: 'following cases are tested' - caseDescriptor | exception - 'no exception' | null - 'time out exception' | new TimeoutException("time-out") - 'unexpected exception' | new Exception("some exception") - } - - def 'Task execution.'() { - when: 'a task is submitted for execution' - objectUnderTest.executeTask(() -> mockTaskSupplier, 0) - then: 'the task submission is successful' - noExceptionThrown() - } - -} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperationsSpec.groovy index 714555958a..302e43f170 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperationsSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/DmiModelOperationsSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,14 +21,11 @@ package org.onap.cps.ncmp.impl.inventory.sync -import com.fasterxml.jackson.core.JsonProcessingException -import com.fasterxml.jackson.databind.ObjectMapper import org.onap.cps.ncmp.impl.dmi.DmiOperationsBaseSpec import org.onap.cps.ncmp.impl.dmi.DmiProperties import org.onap.cps.ncmp.impl.utils.http.UrlTemplateParameters import org.onap.cps.api.model.ModuleReference import org.onap.cps.utils.JsonObjectMapper -import org.spockframework.spring.SpringBean import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.test.context.SpringBootTest import org.springframework.http.HttpStatus @@ -40,9 +37,12 @@ import static org.onap.cps.ncmp.api.data.models.OperationType.READ import static org.onap.cps.ncmp.impl.models.RequiredDmiService.MODEL @SpringBootTest -@ContextConfiguration(classes = [DmiProperties, DmiModelOperations]) +@ContextConfiguration(classes = [DmiProperties, DmiModelOperations, JsonObjectMapper]) class DmiModelOperationsSpec extends DmiOperationsBaseSpec { + def NO_AUTH_HEADER = null + def NO_MODULE_SET_TAG = '' + def expectedModulesUrlTemplateWithVariables = new UrlTemplateParameters('myServiceName/dmi/v1/ch/{cmHandleId}/modules', ['cmHandleId': cmHandleId]) def expectedModuleResourcesUrlTemplateWithVariables = new UrlTemplateParameters('myServiceName/dmi/v1/ch/{cmHandleId}/moduleResources', ['cmHandleId': cmHandleId]) @@ -52,11 +52,6 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { @Autowired DmiModelOperations objectUnderTest - @SpringBean - JsonObjectMapper spiedJsonObjectMapper = Spy(new JsonObjectMapper(new ObjectMapper())) - - def NO_AUTH_HEADER = null - def 'Retrieving module references.'() { given: 'a cm handle' mockYangModelCmHandleRetrieval([]) @@ -65,7 +60,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { def responseFromDmi = new ResponseEntity([schemas: moduleReferencesAsLisOfMaps], HttpStatus.OK) mockDmiRestClient.synchronousPostOperationWithJsonData(MODEL, expectedModulesUrlTemplateWithVariables, '{"cmHandleProperties":{},"moduleSetTag":""}', READ, NO_AUTH_HEADER) >> responseFromDmi when: 'get module references is called' - def result = objectUnderTest.getModuleReferences(yangModelCmHandle) + def result = objectUnderTest.getModuleReferences(yangModelCmHandle, NO_MODULE_SET_TAG) then: 'the result consists of expected module references' assert result == [new ModuleReference(moduleName: 'mod1', revision: 'A'), new ModuleReference(moduleName: 'mod2', revision: 'X')] } @@ -78,7 +73,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { def responseFromDmi = new ResponseEntity(bodyAsMap, HttpStatus.NO_CONTENT) mockDmiRestClient.synchronousPostOperationWithJsonData(*_) >> responseFromDmi when: 'get module references is called' - def result = objectUnderTest.getModuleReferences(yangModelCmHandle) + def result = objectUnderTest.getModuleReferences(yangModelCmHandle, NO_MODULE_SET_TAG) then: 'the result is empty' assert result == [] where: 'the DMI response body has the following content' @@ -97,7 +92,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { mockDmiRestClient.synchronousPostOperationWithJsonData(MODEL, expectedModulesUrlTemplateWithVariables, '{"cmHandleProperties":' + expectedAdditionalPropertiesInRequest + ',"moduleSetTag":""}', READ, NO_AUTH_HEADER) >> responseFromDmi when: 'a get module references is called' - def result = objectUnderTest.getModuleReferences(yangModelCmHandle) + def result = objectUnderTest.getModuleReferences(yangModelCmHandle, NO_MODULE_SET_TAG) then: 'the result is the response from DMI service' assert result == [] where: 'the following DMI properties are used' @@ -116,7 +111,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { mockDmiRestClient.synchronousPostOperationWithJsonData(MODEL, expectedModuleResourcesUrlTemplateWithVariables, '{"data":{"modules":[' + expectedModuleReferencesInRequest + ']},"cmHandleProperties":{}}', READ, NO_AUTH_HEADER) >> responseFromDmi when: 'get new yang resources from DMI service' - def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, newModuleReferences) + def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, NO_MODULE_SET_TAG, newModuleReferences) then: 'the result has the 2 expected yang (re)sources (order is not guaranteed)' assert result.size() == 2 assert result.get('mod1') == 'some yang source' @@ -131,7 +126,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { def responseFromDmi = new ResponseEntity(responseFromDmiBody, HttpStatus.NO_CONTENT) mockDmiRestClient.synchronousPostOperationWithJsonData(*_) >> responseFromDmi when: 'get new yang resources from DMI service' - def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, newModuleReferences) + def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, NO_MODULE_SET_TAG, newModuleReferences) then: 'the result is empty' assert result == [:] where: 'the DMI response body has the following content' @@ -149,7 +144,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { '{"data":{"modules":[{"name":"mod1","revision":"A"},{"name":"mod2","revision":"X"}]},"cmHandleProperties":' + expectedAdditionalPropertiesInRequest + '}', READ, NO_AUTH_HEADER) >> responseFromDmi when: 'get new yang resources from DMI service' - def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, newModuleReferences) + def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, NO_MODULE_SET_TAG, newModuleReferences) then: 'the result is the response from DMI service' assert result == [mod1:'some yang source'] where: 'the following DMI properties are used' @@ -166,7 +161,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { mockDmiRestClient.synchronousPostOperationWithJsonData(MODEL, expectedModuleResourcesUrlTemplateWithVariables, '{' + expectedModuleSetTagInRequest + '"data":{"modules":[{"name":"mod1","revision":"A"},{"name":"mod2","revision":"X"}]},"cmHandleProperties":{}}', READ, NO_AUTH_HEADER) >> responseFromDmi when: 'get new yang resources from DMI service' - def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, newModuleReferences) + def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, moduleSetTag, newModuleReferences) then: 'the result is the response from DMI service' assert result == [mod1:'some yang source'] where: 'the following Module Set Tags are used' @@ -180,7 +175,7 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { given: 'a cm handle' mockYangModelCmHandleRetrieval([]) when: 'a get new yang resources from DMI is called with no module references' - def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, []) + def result = objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, NO_MODULE_SET_TAG, []) then: 'no resources are returned' assert result == [:] and: 'no request is sent to DMI' @@ -191,21 +186,35 @@ class DmiModelOperationsSpec extends DmiOperationsBaseSpec { given: 'a cm handle' mockYangModelCmHandleRetrieval(null) when: 'a get new yang resources from DMI is called' - objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, [new ModuleReference('mod1', 'A')]) + objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, NO_MODULE_SET_TAG, [new ModuleReference('mod1', 'A')]) then: 'a null pointer is thrown (we might need to address this later)' thrown(NullPointerException) } - def 'Retrieving module references with Json processing exception.'() { - given: 'a cm handle' - mockYangModelCmHandleRetrieval([]) - and: 'a Json processing exception occurs' - spiedJsonObjectMapper.asJsonString(_) >> {throw (new JsonProcessingException('parsing error'))} - when: 'a DMI operation is executed' - objectUnderTest.getModuleReferences(yangModelCmHandle) - then: 'an ncmp exception is thrown' - def exceptionThrown = thrown(JsonProcessingException) - and: 'the message indicates a parsing error' - exceptionThrown.message.toLowerCase().contains('parsing error') + def 'Retrieving module references forwards the new module set tag to DMI during CM-handle upgrade.'() { + given: 'a cm handle with an existing module set tag' + mockYangModelCmHandleRetrieval([], 'OLD-TAG') + when: 'get module references is called' + objectUnderTest.getModuleReferences(yangModelCmHandle, 'NEW-TAG') + then: 'a request was sent to DMI with the NEW module set tag in the body' + 1 * mockDmiRestClient.synchronousPostOperationWithJsonData(*_) >> { args -> + def requestBodyAsJson = args[2] as String + assert requestBodyAsJson.contains('"moduleSetTag":"NEW-TAG"') + return new ResponseEntity([schemas: [[moduleName: 'mod1', revision: 'A'], [moduleName: 'mod2', revision: 'X']]], HttpStatus.OK) + } + } + + def 'Retrieving yang resources forwards the new module set tag to DMI during CM-handle upgrade.'() { + given: 'a cm handle with an existing module set tag' + mockYangModelCmHandleRetrieval([], 'OLD-TAG') + when: 'get new yang resources from DMI service' + objectUnderTest.getNewYangResourcesFromDmi(yangModelCmHandle, 'NEW-TAG', newModuleReferences) + then: 'a request was sent to DMI with the NEW module set tag in the body' + 1 * mockDmiRestClient.synchronousPostOperationWithJsonData(*_) >> { args -> + def requestBodyAsJson = args[2] as String + assert requestBodyAsJson.contains('"moduleSetTag":"NEW-TAG"') + return new ResponseEntity([[moduleName: 'mod1', revision: 'A', yangSource: 'some yang source'], + [moduleName: 'mod2', revision: 'X', yangSource: 'other yang source']], HttpStatus.OK) + } } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncServiceSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncServiceSpec.groovy index f8adfe5578..b4837f7bab 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncServiceSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncServiceSpec.groovy @@ -39,6 +39,8 @@ import static org.onap.cps.ncmp.api.inventory.models.LockReasonCategory.MODULE_U class ModuleSyncServiceSpec extends Specification { + def NO_MODULE_SET_TAG = '' + def mockCpsModuleService = Mock(CpsModuleService) def mockDmiModelOperations = Mock(DmiModelOperations) def mockCpsAnchorService = Mock(CpsAnchorService) @@ -53,9 +55,9 @@ class ModuleSyncServiceSpec extends Specification { def yangModelCmHandle = createAdvisedCmHandle(moduleSetTag) and: 'DMI operations returns some module references' def moduleReferences = [ new ModuleReference('module1','1'), new ModuleReference('module2','2') ] - mockDmiModelOperations.getModuleReferences(yangModelCmHandle) >> moduleReferences + mockDmiModelOperations.getModuleReferences(yangModelCmHandle, moduleSetTag) >> moduleReferences and: 'DMI-Plugin returns resource(s) for "new" module(s)' - mockDmiModelOperations.getNewYangResourcesFromDmi(yangModelCmHandle, identifiedNewModuleReferences) >> newModuleNameContentToMap + mockDmiModelOperations.getNewYangResourcesFromDmi(yangModelCmHandle, moduleSetTag, identifiedNewModuleReferences) >> newModuleNameContentToMap and: 'the module service identifies #identifiedNewModuleReferences.size() new modules' mockCpsModuleService.identifyNewModuleReferences(moduleReferences) >> identifiedNewModuleReferences when: 'module sync is triggered' @@ -90,20 +92,45 @@ class ModuleSyncServiceSpec extends Specification { 'without' | '' } - def 'Attempt Sync models for a cm handle with existing schema set (#exception).'() { + def 'Sync models for a cm handle with already defined exception upon schema set creation.'() { given: 'a cm handle to be synced' def yangModelCmHandle = createAdvisedCmHandle('existing tag') and: 'dmi returns no new yang resources' mockDmiModelOperations.getNewYangResourcesFromDmi(*_) >> [:] and: 'already defined exception occurs when creating schema (existing)' - mockCpsModuleService.createSchemaSetFromModules(*_) >> { throw exception } + mockCpsModuleService.createSchemaSetFromModules(*_) >> { throw AlreadyDefinedException.forSchemaSet('', '', null) } when: 'module sync is triggered' objectUnderTest.syncAndCreateSchemaSetAndAnchor(yangModelCmHandle) - then: 'no exception is thrown up' + then: 'the exception is ignored' noExceptionThrown() - where: 'following exceptions occur' - exception << [ AlreadyDefinedException.forSchemaSet('', '', null), - new DuplicatedYangResourceException('', '', null) ] + } + + def 'Sync models for a cm handle with already defined exception upon anchor set creation.'() { + given: 'a cm handle to be synced' + def yangModelCmHandle = createAdvisedCmHandle('existing tag') + and: 'dmi returns no new yang resources' + mockDmiModelOperations.getNewYangResourcesFromDmi(*_) >> [:] + and: 'already defined exception occurs when creating schema (existing)' + mockCpsAnchorService.createAnchor(*_) >> { throw AlreadyDefinedException.forAnchor('', '', null) } + when: 'module sync is triggered' + objectUnderTest.syncAndCreateSchemaSetAndAnchor(yangModelCmHandle) + then: 'the exception is ignored' + noExceptionThrown() + } + + def 'Attempt Sync models for a cm handle with duplicate yang resources exception).'() { + given: 'a cm handle to be synced' + def yangModelCmHandle = createAdvisedCmHandle('existing tag') + and: 'dmi returns no new yang resources' + mockDmiModelOperations.getNewYangResourcesFromDmi(*_) >> [:] + and: 'duplicate yang resource exception occurs when creating schema' + def originalException = new DuplicatedYangResourceException('', '', null) + mockCpsModuleService.createSchemaSetFromModules(*_) >> { throw originalException } + when: 'module sync is triggered' + objectUnderTest.syncAndCreateSchemaSetAndAnchor(yangModelCmHandle) + then: 'same exception is thrown up' + def thrownException = thrown(Exception) + assert thrownException == originalException } def 'Model upgrade without using Module Set Tags (legacy) where the modules are in database.'() { @@ -115,8 +142,8 @@ class ModuleSyncServiceSpec extends Specification { def yangModelCmHandle = YangModelCmHandle.toYangModelCmHandle(dmiServiceName, '', '', ncmpServiceCmHandle,'', '', '') and: 'DMI operations returns some module references for upgraded cm handle' def moduleReferences = [ new ModuleReference('module1','1') ] - mockDmiModelOperations.getModuleReferences(yangModelCmHandle) >> moduleReferences - mockDmiModelOperations.getNewYangResourcesFromDmi(_, []) >> [:] + mockDmiModelOperations.getModuleReferences(yangModelCmHandle, NO_MODULE_SET_TAG) >> moduleReferences + mockDmiModelOperations.getNewYangResourcesFromDmi(_, NO_MODULE_SET_TAG, []) >> [:] and: 'none of these module references are new (all already known to the system)' mockCpsModuleService.identifyNewModuleReferences(_) >> [] when: 'module sync is triggered' @@ -138,7 +165,7 @@ class ModuleSyncServiceSpec extends Specification { mockCpsModuleService.schemaSetExists(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, tagTo) >> schemaExists and: 'DMI operations returns some module references for upgraded cm handle' def moduleReferences = [ new ModuleReference('module1','1') ] - expectedCallsToDmi * mockDmiModelOperations.getModuleReferences(yangModelCmHandle) >> moduleReferences + expectedCallsToDmi * mockDmiModelOperations.getModuleReferences(yangModelCmHandle, tagTo) >> moduleReferences and: 'dmi returns no new yang resources' mockDmiModelOperations.getNewYangResourcesFromDmi(*_) >> [:] and: 'none of these module references are new (all already known to the system)' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasksSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasksSpec.groovy index 92f4b38f31..a2f38c89eb 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasksSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncTasksSpec.groovy @@ -39,8 +39,6 @@ import org.onap.cps.ncmp.impl.inventory.sync.lcm.LcmEventsCmHandleStateHandler import org.slf4j.LoggerFactory import spock.lang.Specification -import java.util.concurrent.atomic.AtomicInteger - import static org.onap.cps.ncmp.api.inventory.models.LockReasonCategory.MODULE_SYNC_FAILED import static org.onap.cps.ncmp.api.inventory.models.LockReasonCategory.MODULE_UPGRADE import static org.onap.cps.ncmp.api.inventory.models.LockReasonCategory.MODULE_UPGRADE_FAILED @@ -70,8 +68,6 @@ class ModuleSyncTasksSpec extends Specification { .getOrCreateHazelcastInstance(new Config('hazelcastInstanceName')) .getMap('mapInstanceName') - def batchCount = new AtomicInteger(5) - def objectUnderTest = new ModuleSyncTasks(mockInventoryPersistence, mockSyncUtils, mockModuleSyncService, mockLcmEventsCmHandleStateHandler, moduleSyncStartedOnCmHandles) @@ -87,7 +83,7 @@ class ModuleSyncTasksSpec extends Specification { mockInventoryPersistence.getYangModelCmHandle('cm-handle-1') >> cmHandle1 mockInventoryPersistence.getYangModelCmHandle('cm-handle-2') >> cmHandle2 when: 'module sync poll is executed' - objectUnderTest.performModuleSync(['cm-handle-1', 'cm-handle-2'], batchCount) + objectUnderTest.performModuleSync(['cm-handle-1', 'cm-handle-2']) then: 'module sync service is invoked for each cm handle' 1 * mockModuleSyncService.syncAndCreateSchemaSetAndAnchor(_) >> { args -> assert args[0].id == 'cm-handle-1' } 1 * mockModuleSyncService.syncAndCreateSchemaSetAndAnchor(_) >> { args -> assert args[0].id == 'cm-handle-2' } @@ -95,8 +91,6 @@ class ModuleSyncTasksSpec extends Specification { 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleStateBatch(_) >> { args -> assertBatch(args, ['cm-handle-1', 'cm-handle-2'], CmHandleState.READY) } - and: 'batch count is decremented by one' - assert batchCount.get() == 4 } def 'Handle CM handle failure during #scenario and log MODULE_UPGRADE lock reason'() { @@ -108,15 +102,13 @@ class ModuleSyncTasksSpec extends Specification { mockModuleSyncService.syncAndCreateSchemaSetAndAnchor(_) >> { throw new Exception('some exception') } mockModuleSyncService.syncAndUpgradeSchemaSet(_) >> { throw new Exception('some exception') } when: 'module sync is executed' - objectUnderTest.performModuleSync(['cm-handle'], batchCount) + objectUnderTest.performModuleSync(['cm-handle']) then: 'lock reason is updated with number of attempts' 1 * mockSyncUtils.updateLockReasonWithAttempts(_, expectedLockReasonCategory, 'some exception') and: 'the state handler is called to update the state to LOCKED' 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleStateBatch(_) >> { args -> assertBatch(args, ['cm-handle'], CmHandleState.LOCKED) } - and: 'batch count is decremented by one' - assert batchCount.get() == 4 where: scenario | lockReasonCategory | lockReasonDetails || expectedLockReasonCategory 'module sync' | MODULE_SYNC_FAILED | 'some lock details' || MODULE_SYNC_FAILED @@ -132,7 +124,7 @@ class ModuleSyncTasksSpec extends Specification { and: 'a cm handle in advised state' mockInventoryPersistence.getYangModelCmHandle('cm-handle-3') >> cmHandleByIdAndState('cm-handle-3', CmHandleState.ADVISED) when: 'module sync poll is executed' - objectUnderTest.performModuleSync(['cm-handle-1', 'cm-handle-2', 'cm-handle-3'], batchCount) + objectUnderTest.performModuleSync(['cm-handle-1', 'cm-handle-2', 'cm-handle-3']) then: 'no exception is thrown' noExceptionThrown() and: 'the deleted cm-handle did not sync' @@ -176,7 +168,7 @@ class ModuleSyncTasksSpec extends Specification { and: 'entry in progress map for other cm handle' moduleSyncStartedOnCmHandles.put('other-cm-handle', 'started') when: 'module sync poll is executed' - objectUnderTest.performModuleSync(['cm-handle-1'], batchCount) + objectUnderTest.performModuleSync(['cm-handle-1']) then: 'module sync service is invoked for cm handle' 1 * mockModuleSyncService.syncAndCreateSchemaSetAndAnchor(_) >> { args -> assert args[0].id == 'cm-handle-1' } and: 'the entry for other cm handle is still in the progress map' @@ -192,7 +184,7 @@ class ModuleSyncTasksSpec extends Specification { def loggingEvent = getLoggingEvent() assert loggingEvent.level == Level.INFO and: 'the log indicates the cm handle entry is removed successfully' - assert loggingEvent.formattedMessage == 'ch-1 will be removed asynchronously from in progress map' + assert loggingEvent.formattedMessage == 'ch-1 removed from in progress map' } def 'Sync and upgrade CM handle if in upgrade state for #scenario'() { @@ -201,7 +193,7 @@ class ModuleSyncTasksSpec extends Specification { cmHandle.compositeState.setLockReason(CompositeState.LockReason.builder().lockReasonCategory(lockReasonCategory).build()) mockInventoryPersistence.getYangModelCmHandle('cm-handle') >> cmHandle when: 'module sync is executed' - objectUnderTest.performModuleSync(['cm-handle'], batchCount) + objectUnderTest.performModuleSync(['cm-handle']) then: 'the module sync service should attempt to sync and upgrade the CM handle' 1 * mockModuleSyncService.syncAndUpgradeSchemaSet(_) >> { args -> assert args[0].id == 'cm-handle' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdogSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdogSpec.groovy index a9b88c2d3b..68aa6a1b6a 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdogSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdogSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,14 +22,10 @@ package org.onap.cps.ncmp.impl.inventory.sync import com.hazelcast.map.IMap +import java.util.concurrent.ArrayBlockingQueue import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle -import org.onap.cps.ncmp.impl.utils.Sleeper -import org.onap.cps.api.model.DataNode import spock.lang.Specification -import java.util.concurrent.ArrayBlockingQueue -import java.util.concurrent.locks.Lock - class ModuleSyncWatchdogSpec extends Specification { def mockModuleOperationsUtils = Mock(ModuleOperationsUtils) @@ -42,17 +38,9 @@ class ModuleSyncWatchdogSpec extends Specification { def mockModuleSyncTasks = Mock(ModuleSyncTasks) - def spiedAsyncTaskExecutor = Spy(AsyncTaskExecutor) - def mockCpsAndNcmpLock = Mock(IMap<String,String>) - def spiedSleeper = Spy(Sleeper) - - def objectUnderTest = new ModuleSyncWatchdog(mockModuleOperationsUtils, moduleSyncWorkQueue , mockModuleSyncStartedOnCmHandles, mockModuleSyncTasks, spiedAsyncTaskExecutor, mockCpsAndNcmpLock, spiedSleeper) - - void setup() { - spiedAsyncTaskExecutor.setupThreadPool() - } + def objectUnderTest = new ModuleSyncWatchdog(mockModuleOperationsUtils, moduleSyncWorkQueue , mockModuleSyncStartedOnCmHandles, mockModuleSyncTasks, mockCpsAndNcmpLock) def 'Module sync advised cm handles with #scenario.'() { given: 'module sync utilities returns #numberOfAdvisedCmHandles advised cm handles' @@ -61,12 +49,10 @@ class ModuleSyncWatchdogSpec extends Specification { mockModuleOperationsUtils.getCmHandlesThatFailedModelSyncOrUpgrade() >> [] and: 'the work queue can be locked' mockCpsAndNcmpLock.tryLock('workQueueLock') >> true - and: 'the executor has enough available threads' - spiedAsyncTaskExecutor.getAsyncTaskParallelismLevel() >> 3 when: ' module sync is started' objectUnderTest.moduleSyncAdvisedCmHandles() then: 'it performs #expectedNumberOfTaskExecutions tasks' - expectedNumberOfTaskExecutions * spiedAsyncTaskExecutor.executeTask(*_) + expectedNumberOfTaskExecutions * mockModuleSyncTasks.performModuleSync(*_) and: 'the executing thread is unlocked' 1 * mockCpsAndNcmpLock.unlock('workQueueLock') where: 'the following parameter are used' @@ -84,12 +70,10 @@ class ModuleSyncWatchdogSpec extends Specification { mockModuleOperationsUtils.getAdvisedCmHandleIds() >> createCmHandleIds(1) and: 'the work queue can be locked' mockCpsAndNcmpLock.tryLock('workQueueLock') >> true - and: 'the executor first has no threads but has one thread on the second attempt' - spiedAsyncTaskExecutor.getAsyncTaskParallelismLevel() >>> [ 0, 1 ] when: ' module sync is started' objectUnderTest.moduleSyncAdvisedCmHandles() then: 'it performs one task' - 1 * spiedAsyncTaskExecutor.executeTask(*_) + 1 * mockModuleSyncTasks.performModuleSync(*_) } def 'Module sync advised cm handle already handled by other thread.'() { @@ -97,27 +81,21 @@ class ModuleSyncWatchdogSpec extends Specification { mockModuleOperationsUtils.getAdvisedCmHandleIds() >> createCmHandleIds(1) and: 'the work queue can be locked' mockCpsAndNcmpLock.tryLock('workQueueLock') >> true - and: 'the executor has a thread available' - spiedAsyncTaskExecutor.getAsyncTaskParallelismLevel() >> 1 and: 'the semaphore cache indicates the cm handle is already being processed' mockModuleSyncStartedOnCmHandles.putIfAbsent(*_) >> 'Started' - when: ' module sync is started' + when: 'module sync is started' objectUnderTest.moduleSyncAdvisedCmHandles() then: 'it does NOT execute a task to process the (empty) batch' - 0 * spiedAsyncTaskExecutor.executeTask(*_) + 0 * mockModuleSyncTasks.performModuleSync(*_) } def 'Module sync with previous cm handle(s) left in work queue.'() { given: 'there is still a cm handle in the queue' moduleSyncWorkQueue.offer('ch-1') - and: 'sync utilities returns many advise cm handles' - mockModuleOperationsUtils.getAdvisedCmHandleIds() >> createCmHandleIds(500) - and: 'the executor has plenty threads available' - spiedAsyncTaskExecutor.getAsyncTaskParallelismLevel() >> 10 - when: ' module sync is started' + when: 'module sync is started' objectUnderTest.moduleSyncAdvisedCmHandles() then: 'it does executes only one task to process the remaining handle in the queue' - 1 * spiedAsyncTaskExecutor.executeTask(*_) + 1 * mockModuleSyncTasks.performModuleSync(*_) } def 'Reset failed cm handles.'() { @@ -147,15 +125,6 @@ class ModuleSyncWatchdogSpec extends Specification { true || false || 1 } - def 'Sleeper gets interrupted.'() { - given: 'sleeper gets interrupted' - spiedSleeper.haveALittleRest(_) >> { throw new InterruptedException() } - when: 'the watchdog attempts to sleep to save cpu cycles' - objectUnderTest.preventBusyWait() - then: 'no exception is thrown' - noExceptionThrown() - } - def createCmHandleIds(numberOfCmHandles) { return (numberOfCmHandles > 0) ? (1..numberOfCmHandles).collect { 'ch-'+it } : [] } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/EventsProducerSpec.groovy index 3e7ed9aff6..9d2511a996 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsPublisherSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/EventsProducerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ package org.onap.cps.ncmp.impl.inventory.sync.lcm import com.fasterxml.jackson.databind.ObjectMapper import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.serialization.StringDeserializer -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.events.lcm.v1.Event import org.onap.cps.ncmp.events.lcm.v1.LcmEvent import org.onap.cps.ncmp.utils.TestUtils @@ -41,20 +41,20 @@ import java.time.Duration @SpringBootTest(classes = [ObjectMapper, JsonObjectMapper]) @Testcontainers @DirtiesContext -class LcmEventsPublisherSpec extends MessagingBaseSpec { +class EventsProducerSpec extends MessagingBaseSpec { def legacyEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('ncmp-group', StringDeserializer)) def testTopic = 'ncmp-events-test' @SpringBean - EventsPublisher<LcmEvent> lcmEventsPublisher = new EventsPublisher(legacyEventKafkaTemplate, cloudEventKafkaTemplate) + EventsProducer<LcmEvent> eventsProducer = new EventsProducer(legacyEventKafkaTemplate, cloudEventKafkaTemplate) @Autowired JsonObjectMapper jsonObjectMapper - def 'Produce and Consume Lcm Event'() { + def 'Produce and Consume Event'() { given: 'event key and event data' def eventKey = 'lcm' def eventId = 'test-uuid' @@ -84,8 +84,8 @@ class LcmEventsPublisherSpec extends MessagingBaseSpec { eventSchemaVersion: eventSchemaVersion] and: 'consumer has a subscription' legacyEventKafkaConsumer.subscribe([testTopic] as List<String>) - when: 'an event is published' - lcmEventsPublisher.publishEvent(testTopic, eventKey, eventHeader, eventData) + when: 'an event is sent' + eventsProducer.sendEvent(testTopic, eventKey, eventHeader, eventData) and: 'topic is polled' def records = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500)) then: 'poll returns one record' diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImplSpec.groovy index 62db2e34ad..ebf2eee120 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCmHandleStateHandlerImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2025 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -54,18 +54,18 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { } def mockInventoryPersistence = Mock(InventoryPersistence) - def mockLcmEventsCreator = Mock(LcmEventsCreator) - def mockLcmEventsService = Mock(LcmEventsService) + def mockLcmEventsCreator = Mock(LcmEventsProducerHelper) + def mockLcmEventsProducer = Mock(LcmEventsProducer) def mockCmHandleStateMonitor = Mock(CmHandleStateMonitor) - def lcmEventsCmHandleStateHandlerAsyncHelper = new LcmEventsCmHandleStateHandlerAsyncHelper(mockLcmEventsCreator, mockLcmEventsService) + def lcmEventsCmHandleStateHandlerAsyncHelper = new LcmEventsCmHandleStateHandlerAsyncHelper(mockLcmEventsCreator, mockLcmEventsProducer) def objectUnderTest = new LcmEventsCmHandleStateHandlerImpl(mockInventoryPersistence, lcmEventsCmHandleStateHandlerAsyncHelper, mockCmHandleStateMonitor) def cmHandleId = 'cmhandle-id-1' def compositeState def yangModelCmHandle - def 'Update and Publish Events on State Change #stateChange'() { + def 'Update and Send Events on State Change #stateChange'() { given: 'Cm Handle represented as YangModelCmHandle' compositeState = new CompositeState(cmHandleState: fromCmHandleState) yangModelCmHandle = new YangModelCmHandle(id: cmHandleId, dmiProperties: [], publicProperties: [], compositeState: compositeState) @@ -82,8 +82,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { def loggingEvent = (ILoggingEvent) logger.list[0] assert loggingEvent.level == Level.INFO assert loggingEvent.formattedMessage == "${cmHandleId} is now in ${toCmHandleState} state" - and: 'event service is called to publish event' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'event service is called to send event' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) where: 'state change parameters are provided' stateChange | fromCmHandleState | toCmHandleState 'ADVISED to READY' | ADVISED | READY @@ -92,20 +92,20 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { 'ADVISED to DELETING' | ADVISED | DELETING } - def 'Update and Publish Events on State Change from non-existing to ADVISED'() { + def 'Update and Send Events on State Change from non-existing to ADVISED'() { given: 'Cm Handle represented as YangModelCmHandle' yangModelCmHandle = new YangModelCmHandle(id: cmHandleId, dmiProperties: [], publicProperties: []) when: 'update state is invoked' objectUnderTest.updateCmHandleStateBatch(Map.of(yangModelCmHandle, ADVISED)) then: 'CM-handle is saved using inventory persistence' 1 * mockInventoryPersistence.saveCmHandleBatch(List.of(yangModelCmHandle)) - and: 'event service is called to publish event' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'event service is called to send event' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) and: 'a log entry is written' assert getLogMessage(0) == "${cmHandleId} is now in ADVISED state" } - def 'Update and Publish Events on State Change from LOCKED to ADVISED'() { + def 'Update and Send Events on State Change from LOCKED to ADVISED'() { given: 'Cm Handle represented as YangModelCmHandle in LOCKED state' compositeState = new CompositeState(cmHandleState: LOCKED, lockReason: CompositeState.LockReason.builder().lockReasonCategory(MODULE_SYNC_FAILED).details('some lock details').build()) @@ -119,13 +119,13 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { assert cmHandleStatePerCmHandleId.get(cmHandleId).lockReason.details == 'some lock details' } } - and: 'event service is called to publish event' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'event service is called to send event' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) and: 'a log entry is written' assert getLogMessage(0) == "${cmHandleId} is now in ADVISED state" } - def 'Update and Publish Events on State Change to from ADVISED to READY'() { + def 'Update and Send Events on State Change to from ADVISED to READY'() { given: 'Cm Handle represented as YangModelCmHandle' compositeState = new CompositeState(cmHandleState: ADVISED) yangModelCmHandle = new YangModelCmHandle(id: cmHandleId, dmiProperties: [], publicProperties: [], compositeState: compositeState) @@ -141,8 +141,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { assert cmHandleStatePerCmHandleId.get(cmHandleId).dataStores.operationalDataStore.dataStoreSyncState == DataStoreSyncState.NONE_REQUESTED } } - and: 'event service is called to publish event' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'event service is called to send event' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) and: 'a log entry is written' assert getLogMessage(0) == "${cmHandleId} is now in READY state" } @@ -157,8 +157,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { yangModelCmHandle.getCompositeState().getCmHandleState() == DELETING and: 'method to persist cm handle state is called once' 1 * mockInventoryPersistence.saveCmHandleStateBatch(Map.of(yangModelCmHandle.getId(), yangModelCmHandle.getCompositeState())) - and: 'the method to publish Lcm event is called once' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'the method to send Lcm event is called once' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) } def 'Update cmHandle state to DELETING to DELETED' (){ @@ -169,11 +169,11 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { objectUnderTest.updateCmHandleStateBatch(Map.of(yangModelCmHandle, DELETED)) then: 'the cm handle state is as expected' yangModelCmHandle.getCompositeState().getCmHandleState() == DELETED - and: 'the method to publish Lcm event is called once' - 1 * mockLcmEventsService.publishLcmEvent(cmHandleId, _, _) + and: 'the method to send Lcm event is called once' + 1 * mockLcmEventsProducer.sendLcmEvent(cmHandleId, _, _) } - def 'No state change and no event to be published'() { + def 'No state change and no event to be sent'() { given: 'Cm Handle batch with same state transition as before' def cmHandleStateMap = setupBatch('NO_CHANGE') when: 'updating a batch of changes' @@ -181,8 +181,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { then: 'no changes are persisted' 1 * mockInventoryPersistence.saveCmHandleBatch(EMPTY_LIST) 1 * mockInventoryPersistence.saveCmHandleStateBatch(EMPTY_MAP) - and: 'no event will be published' - 0 * mockLcmEventsService.publishLcmEvent(*_) + and: 'no event will be sent' + 0 * mockLcmEventsProducer.sendLcmEvent(*_) and: 'no log entries are written' assert logger.list.empty } @@ -200,8 +200,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { } and: 'no state updates are persisted' 1 * mockInventoryPersistence.saveCmHandleStateBatch(EMPTY_MAP) - and: 'event service is called to publish events' - 2 * mockLcmEventsService.publishLcmEvent(_, _, _) + and: 'event service is called to send events' + 2 * mockLcmEventsProducer.sendLcmEvent(_, _, _) and: 'two log entries are written' assert getLogMessage(0) == 'cmhandle1 is now in ADVISED state' assert getLogMessage(1) == 'cmhandle2 is now in ADVISED state' @@ -220,8 +220,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { } and: 'no new handles are persisted' 1 * mockInventoryPersistence.saveCmHandleBatch(EMPTY_LIST) - and: 'event service is called to publish events' - 2 * mockLcmEventsService.publishLcmEvent(_, _, _) + and: 'event service is called to send events' + 2 * mockLcmEventsProducer.sendLcmEvent(_, _, _) and: 'two log entries are written' assert getLogMessage(0) == 'cmhandle1 is now in READY state' assert getLogMessage(1) == 'cmhandle2 is now in DELETING state' @@ -236,8 +236,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { 1 * mockInventoryPersistence.saveCmHandleStateBatch(EMPTY_MAP) and: 'no new handles are persisted' 1 * mockInventoryPersistence.saveCmHandleBatch(EMPTY_LIST) - and: 'event service is called to publish events' - 2 * mockLcmEventsService.publishLcmEvent(_, _, _) + and: 'event service is called to send events' + 2 * mockLcmEventsProducer.sendLcmEvent(_, _, _) and: 'two log entries are written' assert getLogMessage(0) == 'cmhandle1 is now in DELETED state' assert getLogMessage(1) == 'cmhandle2 is now in DELETED state' @@ -252,8 +252,8 @@ class LcmEventsCmHandleStateHandlerImplSpec extends Specification { objectUnderTest.updateCmHandleStateBatch(cmHandleStateMap) then: 'the exception is not handled' thrown(RuntimeException) - and: 'no events are published' - 0 * mockLcmEventsService.publishLcmEvent(_, _, _) + and: 'no events are sent' + 0 * mockLcmEventsProducer.sendLcmEvent(_, _, _) and: 'no log entries are written' assert logger.list.empty } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreatorSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerHelperSpec.groovy index c63e3d9c8e..b7c486d3e3 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsCreatorSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerHelperSpec.groovy @@ -31,11 +31,11 @@ import static org.onap.cps.ncmp.api.inventory.models.CmHandleState.ADVISED import static org.onap.cps.ncmp.api.inventory.models.CmHandleState.DELETING import static org.onap.cps.ncmp.api.inventory.models.CmHandleState.READY -class LcmEventsCreatorSpec extends Specification { +class LcmEventsProducerHelperSpec extends Specification { LcmEventHeaderMapper lcmEventsHeaderMapper = Mappers.getMapper(LcmEventHeaderMapper) - def objectUnderTest = new LcmEventsCreator(lcmEventsHeaderMapper) + def objectUnderTest = new LcmEventsProducerHelper(lcmEventsHeaderMapper) def cmHandleId = 'test-cm-handle' def 'Map the LcmEvent for #operation'() { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsServiceSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerSpec.groovy index 73c66089a3..d9944a707a 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsServiceSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/sync/lcm/LcmEventsProducerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ import static org.onap.cps.ncmp.events.lcm.v1.Values.CmHandleState.READY import io.micrometer.core.instrument.Tag import io.micrometer.core.instrument.simple.SimpleMeterRegistry -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.events.lcm.v1.Event import org.onap.cps.ncmp.events.lcm.v1.LcmEvent import org.onap.cps.ncmp.events.lcm.v1.LcmEventHeader @@ -34,16 +34,16 @@ import org.onap.cps.utils.JsonObjectMapper import org.springframework.kafka.KafkaException import spock.lang.Specification -class LcmEventsServiceSpec extends Specification { +class LcmEventsProducerSpec extends Specification { - def mockLcmEventsPublisher = Mock(EventsPublisher) + def mockLcmEventsProducer = Mock(EventsProducer) def mockJsonObjectMapper = Mock(JsonObjectMapper) def meterRegistry = new SimpleMeterRegistry() - def objectUnderTest = new LcmEventsService(mockLcmEventsPublisher, mockJsonObjectMapper, meterRegistry) + def objectUnderTest = new LcmEventsProducer(mockLcmEventsProducer, mockJsonObjectMapper, meterRegistry) - def 'Create and Publish lcm event where events are #scenario'() { - given: 'a cm handle id, Lcm Event, and headers' + def 'Create and send lcm event where events are #scenario'() { + given: 'a cm handle id and Lcm Event' def cmHandleId = 'test-cm-handle-id' def eventId = UUID.randomUUID().toString() def event = getEventWithCmHandleState(ADVISED, READY) @@ -54,10 +54,10 @@ class LcmEventsServiceSpec extends Specification { objectUnderTest.notificationsEnabled = notificationsEnabled and: 'lcm event header is transformed to headers map' mockJsonObjectMapper.convertToValueType(lcmEventHeader, Map.class) >> ['eventId': eventId, 'eventCorrelationId': cmHandleId] - when: 'service is called to publish lcm event' - objectUnderTest.publishLcmEvent('test-cm-handle-id', lcmEvent, lcmEventHeader) - then: 'publisher is called #expectedTimesMethodCalled times' - expectedTimesMethodCalled * mockLcmEventsPublisher.publishEvent(_, cmHandleId, _, lcmEvent) >> { + when: 'service is called to send lcm event' + objectUnderTest.sendLcmEvent('test-cm-handle-id', lcmEvent, lcmEventHeader) + then: 'producer is called #expectedTimesMethodCalled times' + expectedTimesMethodCalled * mockLcmEventsProducer.sendEvent(_, cmHandleId, _, lcmEvent) >> { args -> { def eventHeaders = (args[2] as Map<String,Object>) assert eventHeaders.containsKey('eventId') @@ -67,7 +67,7 @@ class LcmEventsServiceSpec extends Specification { } } and: 'metrics are recorded with correct tags' - def timer = meterRegistry.find('cps.ncmp.lcm.events.publish').timer() + def timer = meterRegistry.find('cps.ncmp.lcm.events.send').timer() if (notificationsEnabled) { assert timer != null assert timer.count() == expectedTimesMethodCalled @@ -90,14 +90,14 @@ class LcmEventsServiceSpec extends Specification { def lcmEvent = new LcmEvent(event: event, eventId: eventId, eventCorrelationId: cmHandleId) def lcmEventHeader = new LcmEventHeader(eventId: eventId, eventCorrelationId: cmHandleId) objectUnderTest.notificationsEnabled = true - when: 'publisher set to throw an exception' - mockLcmEventsPublisher.publishEvent(_, _, _, _) >> { throw new KafkaException('publishing failed')} + when: 'producer set to throw an exception' + mockLcmEventsProducer.sendEvent(_, _, _, _) >> { throw new KafkaException('sending failed')} and: 'an event is publised' - objectUnderTest.publishLcmEvent(cmHandleId, lcmEvent, lcmEventHeader) + objectUnderTest.sendLcmEvent(cmHandleId, lcmEvent, lcmEventHeader) then: 'the exception is just logged and not bubbled up' noExceptionThrown() and: 'metrics are recorded with error tags' - def timer = meterRegistry.find('cps.ncmp.lcm.events.publish').timer() + def timer = meterRegistry.find('cps.ncmp.lcm.events.send').timer() assert timer != null assert timer.count() == 1 def expectedTags = [Tag.of('oldCmHandleState', 'N/A'), Tag.of('newCmHandleState', 'N/A')] diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManagerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManagerSpec.groovy index 1ab517cdcf..020834e6d6 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManagerSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/trustlevel/TrustLevelManagerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,7 +27,7 @@ import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle import org.onap.cps.ncmp.api.inventory.models.TrustLevel import org.onap.cps.ncmp.impl.inventory.InventoryPersistence import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle -import org.onap.cps.ncmp.utils.events.CmAvcEventPublisher +import org.onap.cps.ncmp.utils.events.InventoryEventProducer import spock.lang.Specification class TrustLevelManagerSpec extends Specification { @@ -39,13 +39,13 @@ class TrustLevelManagerSpec extends Specification { IMap<String, TrustLevel> trustLevelPerDmiPlugin def mockInventoryPersistence = Mock(InventoryPersistence) - def mockAttributeValueChangeEventPublisher = Mock(CmAvcEventPublisher) + def mockInventoryEventProducer = Mock(InventoryEventProducer) def setup() { hazelcastInstance = Hazelcast.newHazelcastInstance() trustLevelPerCmHandleId = hazelcastInstance.getMap("trustLevelPerCmHandle") trustLevelPerDmiPlugin = hazelcastInstance.getMap("trustLevelPerCmHandle") - objectUnderTest = new TrustLevelManager(trustLevelPerCmHandleId, trustLevelPerDmiPlugin, mockInventoryPersistence, mockAttributeValueChangeEventPublisher) + objectUnderTest = new TrustLevelManager(trustLevelPerCmHandleId, trustLevelPerDmiPlugin, mockInventoryPersistence, mockInventoryEventProducer) } def cleanup() { @@ -71,7 +71,7 @@ class TrustLevelManagerSpec extends Specification { when: 'method to register to the cache is called' objectUnderTest.registerCmHandles(cmHandleModelsToBeCreated) then: 'no notification sent' - 0 * mockAttributeValueChangeEventPublisher.publishAvcEvent(*_) + 0 * mockInventoryEventProducer.sendAvcEvent(*_) and: 'both cm handles are in the cache and are trusted' assert trustLevelPerCmHandleId.get('ch-1') == TrustLevel.COMPLETE assert trustLevelPerCmHandleId.get('ch-2') == TrustLevel.COMPLETE @@ -83,7 +83,7 @@ class TrustLevelManagerSpec extends Specification { when: 'method to register to the cache is called' objectUnderTest.registerCmHandles(cmHandleModelsToBeCreated) then: 'notification is sent' - 1 * mockAttributeValueChangeEventPublisher.publishAvcEvent(*_) + 1 * mockInventoryEventProducer.sendAvcEvent(*_) } def 'Dmi trust level updated'() { @@ -94,7 +94,7 @@ class TrustLevelManagerSpec extends Specification { when: 'the update is handled' objectUnderTest.updateDmi('my-dmi', ['ch-1'], TrustLevel.NONE) then: 'notification is sent' - 1 * mockAttributeValueChangeEventPublisher.publishAvcEvent('ch-1', 'trustLevel', 'COMPLETE', 'NONE') + 1 * mockInventoryEventProducer.sendAvcEvent('ch-1', 'trustLevel', 'COMPLETE', 'NONE') and: 'the dmi in the cache is not trusted' assert trustLevelPerDmiPlugin.get('my-dmi') == TrustLevel.NONE } @@ -107,7 +107,7 @@ class TrustLevelManagerSpec extends Specification { when: 'the update is handled' objectUnderTest.updateDmi('my-dmi', ['ch-1'], TrustLevel.COMPLETE) then: 'no notification is sent' - 0 * mockAttributeValueChangeEventPublisher.publishAvcEvent(*_) + 0 * mockInventoryEventProducer.sendAvcEvent(*_) and: 'the dmi in the cache is trusted' assert trustLevelPerDmiPlugin.get('my-dmi') == TrustLevel.COMPLETE } @@ -124,7 +124,7 @@ class TrustLevelManagerSpec extends Specification { then: 'the cm handle in the cache is trusted' assert trustLevelPerCmHandleId.get('ch-1', TrustLevel.COMPLETE) and: 'notification is sent' - 1 * mockAttributeValueChangeEventPublisher.publishAvcEvent('ch-1', 'trustLevel', 'NONE', 'COMPLETE') + 1 * mockInventoryEventProducer.sendAvcEvent('ch-1', 'trustLevel', 'NONE', 'COMPLETE') } def 'CmHandle trust level updated with same value'() { @@ -139,7 +139,7 @@ class TrustLevelManagerSpec extends Specification { then: 'the cm handle in the cache is not trusted' assert trustLevelPerCmHandleId.get('ch-1', TrustLevel.NONE) and: 'no notification is sent' - 0 * mockAttributeValueChangeEventPublisher.publishAvcEvent(*_) + 0 * mockInventoryEventProducer.sendAvcEvent(*_) } def 'Dmi trust level restored to complete with non trusted CmHandle'() { @@ -152,7 +152,7 @@ class TrustLevelManagerSpec extends Specification { then: 'the cm handle in the cache is still NONE' assert trustLevelPerCmHandleId.get('ch-1') == TrustLevel.NONE and: 'no notification is sent' - 0 * mockAttributeValueChangeEventPublisher.publishAvcEvent(*_) + 0 * mockInventoryEventProducer.sendAvcEvent(*_) } def 'Apply effective trust level among CmHandle and dmi plugin'() { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/utils/AlternateIdMatcherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/utils/AlternateIdMatcherSpec.groovy index a6d21afd30..984d45e420 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/utils/AlternateIdMatcherSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/utils/AlternateIdMatcherSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -20,42 +20,39 @@ package org.onap.cps.ncmp.impl.utils +import com.hazelcast.map.IMap import org.onap.cps.ncmp.api.exceptions.CmHandleNotFoundException +import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle import org.onap.cps.ncmp.exceptions.NoAlternateIdMatchFoundException -import org.onap.cps.ncmp.impl.inventory.InventoryPersistence -import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle import spock.lang.Specification class AlternateIdMatcherSpec extends Specification { - def mockInventoryPersistence = Mock(InventoryPersistence) - def objectUnderTest = new AlternateIdMatcher(mockInventoryPersistence) + def mockCmHandleIdPerAlternateId = Mock(IMap) - def setup() { - given: 'cm handle in the registry with alternate id /a/b' - mockInventoryPersistence.getYangModelCmHandleByAlternateId('/a/b') >> new YangModelCmHandle() - and: 'no other cm handle' - mockInventoryPersistence.getYangModelCmHandleByAlternateId(_) >> { throw new CmHandleNotFoundException('') } - } + def objectUnderTest = new AlternateIdMatcher(mockCmHandleIdPerAlternateId) def 'Finding longest alternate id matches.'() { + given: 'a cm handle with alternate id /a/b in the cached map of all cm handles' + def ch1 = new NcmpServiceCmHandle(cmHandleId: 'ch1', alternateId: '/a/b') + def cmHandlePerAlternateId = ['/a/b': ch1] expect: 'querying for alternate id a matching result found' - assert objectUnderTest.getYangModelCmHandleByLongestMatchingAlternateId(targetAlternateId, '/') != null + assert objectUnderTest.getCmHandleByLongestMatchingAlternateId(targetAlternateId, '/', cmHandlePerAlternateId) != null where: 'the following parameters are used' - scenario | targetAlternateId - 'exact match' | '/a/b' - 'parent match' | '/a/b/c' - 'grand parent match' | '/a/b/c/d' - 'trailing separator match' | '/a/b/' - 'trailing hash' | '/a/b#q' - 'trailing hash parent match' | '/a/b/c#q' - 'trailing hash grand parent match' | '/a/b/c/d#q' - 'trailing separator then hash match' | '/a/b/#q' + scenario | targetAlternateId + 'exact match' | '/a/b' + 'parent match' | '/a/b/c' + 'grand parent match' | '/a/b/c/d' + 'trailing separator match' | '/a/b/' + 'trailing hash' | '/a/b#q' + 'trailing hash parent match' | '/a/b/c#q' + 'trailing hash grand parent match' | '/a/b/c/d#q' + 'trailing separator then hash match' | '/a/b/#q' } def 'Attempt to find longest alternate id match without any matches.'() { when: 'attempt to find alternateId' - objectUnderTest.getYangModelCmHandleByLongestMatchingAlternateId(targetAlternateId, '/') + objectUnderTest.getCmHandleByLongestMatchingAlternateId(targetAlternateId, '/', [:]) then: 'no alternate id match found exception thrown' def thrown = thrown(NoAlternateIdMatchFoundException) and: 'the exception has the relevant details from the error response' @@ -68,17 +65,27 @@ class AlternateIdMatcherSpec extends Specification { 'no match at all' | '/x/y' } - def 'Get cmHandle id from passed cmHandleReference (cmHandleId scenario)' () { - when: 'a cmHandleCmReference is passed in' + def 'Get cm handle id from a cm handle reference that is a #scenario id.' () { + given: 'cmHandleIdPerAlternateId cache contains the given reference' + mockCmHandleIdPerAlternateId.get(cmHandleReference) >> returnedCacheValue + mockCmHandleIdPerAlternateId.containsValue(cmHandleReference) >> true + when: 'getting a cm handle id from the reference' def result = objectUnderTest.getCmHandleId(cmHandleReference) - then: 'the inventory persistence service returns a cm handle (or not)' - mockInventoryPersistence.isExistingCmHandleId(cmHandleReference) >> existingCmHandleIdResponse - mockInventoryPersistence.getYangModelCmHandleByAlternateId(cmHandleReference) >> alternateIdGetResponse - and: 'correct result is returned' - assert result == cmHandleReference + then: 'the expected cm handle id is returned' + assert result == expectedResult where: 'the following parameters are used' - cmHandleReference | existingCmHandleIdResponse | alternateIdGetResponse - 'ch-1' | true | '' - 'alt-1' | false | new YangModelCmHandle(id: 'alt-1') + scenario | cmHandleReference| returnedCacheValue|| expectedResult + 'standard' | 'ch-id-1' | null || 'ch-id-1' + 'alternate' | 'alt-id=1' | 'ch-id-2' || 'ch-id-2' + } + + def 'Get cm handle id when given reference DOES NOT exist in cache.'() { + given: 'cmHandleIdPerAlternateId cache returns null' + mockCmHandleIdPerAlternateId.get('nonExistingId') >> null + when: 'getting a cm handle id from the reference' + objectUnderTest.getCmHandleId('nonExistingId') + then: 'an exception is thrown' + def thrownException = thrown(CmHandleNotFoundException) + assert thrownException.getMessage().contains('Cm handle not found') } -} +}
\ No newline at end of file diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/init/AlternateIdCacheDataLoaderSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/init/AlternateIdCacheDataLoaderSpec.groovy new file mode 100644 index 0000000000..8941c4c5b3 --- /dev/null +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/init/AlternateIdCacheDataLoaderSpec.groovy @@ -0,0 +1,49 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.init + +import com.hazelcast.map.IMap +import org.onap.cps.api.model.DataNode +import org.onap.cps.ncmp.impl.inventory.CmHandleRegistrationService +import org.onap.cps.ncmp.impl.inventory.InventoryPersistence +import org.onap.cps.ncmp.utils.events.NcmpInventoryModelOnboardingFinishedEvent +import spock.lang.Specification + +class AlternateIdCacheDataLoaderSpec extends Specification { + + def mockInventoryPersistence = Mock(InventoryPersistence) + def mockCmHandleRegistrationService = Mock(CmHandleRegistrationService) + def mockCmHandleIdPerAlternateId = Mock(IMap) + + def objectUnderTest = new AlternateIdCacheDataLoader(mockInventoryPersistence, mockCmHandleRegistrationService, mockCmHandleIdPerAlternateId) + + def 'Populate cm handle id per alternate id cache.'() { + given: 'cache is empty' + mockCmHandleIdPerAlternateId.isEmpty() >> true + and: 'inventory persistence returns some data nodes' + def childDataNodes = [new DataNode(xpath: "", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])] + mockInventoryPersistence.getDataNode(_, _) >> [new DataNode(childDataNodes:childDataNodes, leaves: ['id':''])] + when: 'the method to populate the cache is invoked by the ncmp model onboarding event' + objectUnderTest.populateCmHandleIdPerAlternateIdMap(Mock(NcmpInventoryModelOnboardingFinishedEvent)) + then: 'the cm handle registration service is called once to add ids to cache' + 1 * mockCmHandleRegistrationService.addAlternateIdsToCache(_) + } +} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/CmAvcEventPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/InventoryEventProducerSpec.groovy index 051f5df4cf..21fc6563c2 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/CmAvcEventPublisherSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/InventoryEventProducerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023-2024 Nordix Foundation. + * Copyright (c) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ package org.onap.cps.ncmp.utils.events import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.CloudEvent -import org.onap.cps.events.EventsPublisher +import org.onap.cps.events.EventsProducer import org.onap.cps.ncmp.config.CpsApplicationContext import org.onap.cps.ncmp.events.avc.ncmp_to_client.Avc import org.onap.cps.ncmp.events.avc.ncmp_to_client.AvcEvent @@ -30,12 +30,12 @@ import org.onap.cps.utils.JsonObjectMapper import org.springframework.test.context.ContextConfiguration @ContextConfiguration(classes = [CpsApplicationContext, ObjectMapper, JsonObjectMapper]) -class CmAvcEventPublisherSpec extends MessagingBaseSpec { +class InventoryEventProducerSpec extends MessagingBaseSpec { - def mockEventsPublisher = Mock(EventsPublisher<CloudEvent>) - def objectUnderTest = new CmAvcEventPublisher(mockEventsPublisher) + def mockEventsProducer = Mock(EventsProducer<CloudEvent>) + def objectUnderTest = new InventoryEventProducer(mockEventsProducer) - def 'Publish an attribute value change event'() { + def 'Send an attribute value change event'() { given: 'the event key' def someEventKey = 'someEventKey' and: 'the name of the attribute being changed' @@ -44,10 +44,10 @@ class CmAvcEventPublisherSpec extends MessagingBaseSpec { def someOldAttributeValue = 'someOldAttributeValue' and: 'the new value of the attribute' def someNewAttributeValue = 'someNewAttributeValue' - when: 'an attribute value change event is published' - objectUnderTest.publishAvcEvent(someEventKey, someAttributeName, someOldAttributeValue, someNewAttributeValue) - then: 'the cloud event publisher is invoked with the correct data' - 1 * mockEventsPublisher.publishCloudEvent(_, someEventKey, + when: 'an attribute value change event is sent' + objectUnderTest.sendAvcEvent(someEventKey, someAttributeName, someOldAttributeValue, someNewAttributeValue) + then: 'the cloud event producer is invoked with the correct data' + 1 * mockEventsProducer.sendCloudEvent(_, someEventKey, cloudEvent -> { def actualAvcs = CloudEventMapper.toTargetEvent(cloudEvent, AvcEvent.class).data.attributeValueChange def expectedAvc = new Avc(attributeName: someAttributeName, diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy index 377a1a6637..ab6c3fddbf 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2023 Nordix Foundation. + * Copyright (c) 2023-2025 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,8 +29,7 @@ import org.springframework.kafka.core.KafkaTemplate import org.springframework.kafka.support.serializer.JsonSerializer import org.springframework.test.context.DynamicPropertyRegistry import org.springframework.test.context.DynamicPropertySource -import org.testcontainers.containers.KafkaContainer -import org.testcontainers.utility.DockerImageName +import org.testcontainers.kafka.ConfluentKafkaContainer import spock.lang.Specification class MessagingBaseSpec extends Specification { @@ -43,9 +42,9 @@ class MessagingBaseSpec extends Specification { kafkaTestContainer.stop() } - static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka')) + static kafkaTestContainer = new ConfluentKafkaContainer("confluentinc/cp-kafka:7.8.0") - def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, String>(eventProducerConfigProperties(JsonSerializer))) + def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, ?>(eventProducerConfigProperties(JsonSerializer))) def cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer))) diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml index 12db639633..3276ceb534 100644 --- a/cps-ncmp-service/src/test/resources/application.yml +++ b/cps-ncmp-service/src/test/resources/application.yml @@ -77,10 +77,6 @@ ncmp: trust-level: dmi-availability-watchdog-ms: 30000 - modules-sync-watchdog: - async-executor: - parallelism-level: 3 - policy-executor: enabled: true defaultDecision: "some default decision" diff --git a/cps-parent/pom.xml b/cps-parent/pom.xml index 554cc63956..28073d5c10 100644 --- a/cps-parent/pom.xml +++ b/cps-parent/pom.xml @@ -27,7 +27,7 @@ <modelVersion>4.0.0</modelVersion> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <packaging>pom</packaging> <properties> @@ -60,7 +60,7 @@ <sonar.version>4.0.0.4121</sonar.version> <spotbugs.plugin.version>4.8.6.4</spotbugs.plugin.version> <spotbugs.version>4.8.6</spotbugs.version> - <spring.boot.maven.plugin.version>3.4.1</spring.boot.maven.plugin.version> + <spring.boot.maven.plugin.version>3.4.4</spring.boot.maven.plugin.version> <swagger.codegen.version>1.2.1</swagger.codegen.version> <!-- Reporting paths and coverage --> diff --git a/cps-path-parser/pom.xml b/cps-path-parser/pom.xml index 469357462e..3e7cb30f10 100644 --- a/cps-path-parser/pom.xml +++ b/cps-path-parser/pom.xml @@ -23,7 +23,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-path-parser/src/main/java/org/onap/cps/cpspath/parser/CpsPathUtil.java b/cps-path-parser/src/main/java/org/onap/cps/cpspath/parser/CpsPathUtil.java index 4ede0d9c90..2c896dc3cd 100644 --- a/cps-path-parser/src/main/java/org/onap/cps/cpspath/parser/CpsPathUtil.java +++ b/cps-path-parser/src/main/java/org/onap/cps/cpspath/parser/CpsPathUtil.java @@ -1,6 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2022-2024 Nordix Foundation + * Modifications Copyright (C) 2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +40,9 @@ import org.onap.cps.cpspath.parser.antlr4.CpsPathParser; @NoArgsConstructor(access = AccessLevel.PACKAGE) public class CpsPathUtil { + public static final String ROOT_NODE_XPATH = "/"; + public static final String NO_PARENT_PATH = ""; + /** * Returns a normalized xpath path query. * @@ -46,6 +50,9 @@ public class CpsPathUtil { * @return a normalized xpath String. */ public static String getNormalizedXpath(final String xpathSource) { + if (ROOT_NODE_XPATH.equals(xpathSource)) { + return NO_PARENT_PATH; + } return getCpsPathBuilder(xpathSource).build().getNormalizedXpath(); } diff --git a/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/CpsPathUtilSpec.groovy b/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/CpsPathUtilSpec.groovy index 29bb3c7b58..03aecc2acd 100644 --- a/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/CpsPathUtilSpec.groovy +++ b/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/CpsPathUtilSpec.groovy @@ -1,6 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2022-2024 Nordix Foundation + * Modifications Copyright (C) 2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +25,11 @@ import spock.lang.Specification class CpsPathUtilSpec extends Specification { + def 'Normalized xpath for root.'() { + expect: 'root node xpath is parsed' + assert CpsPathUtil.getNormalizedXpath('/') == '' + } + def 'Normalized xpaths for list index values using #scenario'() { when: 'xpath with #scenario is parsed' def result = CpsPathUtil.getNormalizedXpath(xpath) @@ -36,7 +42,7 @@ class CpsPathUtilSpec extends Specification { 'single quotes' | "/parent/child[@common-leaf-name='123']" } - def 'Normalized parent paths of absolute paths'() { + def 'Normalized parent paths of absolute paths.'() { when: 'a given cps path is parsed' def result = CpsPathUtil.getNormalizedParentXpath(cpsPath) then: 'the result is the expected parent path' @@ -54,7 +60,7 @@ class CpsPathUtilSpec extends Specification { '/parent/child/name[text()="value"]' || '/parent' } - def 'Normalized parent paths of descendant paths'() { + def 'Normalized parent paths of descendant paths.'() { when: 'a given cps path is parsed' def result = CpsPathUtil.getNormalizedParentXpath(cpsPath) then: 'the result is the expected parent path' @@ -72,7 +78,7 @@ class CpsPathUtilSpec extends Specification { '//parent/child/name[text()="value"]' || '//parent' } - def 'Get node ID sequence for given xpath'() { + def 'Get node ID sequence for given xpath with #scenario.'() { when: 'a given xpath with #scenario is parsed' def result = CpsPathUtil.getXpathNodeIdSequence(xpath) then: 'the result is the expected node ID sequence' @@ -89,7 +95,7 @@ class CpsPathUtilSpec extends Specification { 'does not include ancestor node' | '/parent/child/ancestor::grandparent' || ["parent","child"] } - def 'Recognizing (absolute) xpaths to List elements'() { + def 'Recognizing (absolute) xpaths to List elements.'() { expect: 'check for list returns the correct values' assert CpsPathUtil.isPathToListElement(xpath) == expectList where: 'the following xpaths are used' @@ -101,7 +107,7 @@ class CpsPathUtilSpec extends Specification { '/parent/ancestor::grandparent[@id=1]' || false } - def 'Parsing Exception'() { + def 'Parsing Exception.'() { when: 'a invalid xpath is parsed' CpsPathUtil.getNormalizedXpath('///') then: 'a path parsing exception is thrown' diff --git a/cps-rest/docs/openapi/components.yml b/cps-rest/docs/openapi/components.yml index 1a7e4308d9..43a311872a 100644 --- a/cps-rest/docs/openapi/components.yml +++ b/cps-rest/docs/openapi/components.yml @@ -1,7 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (c) 2021-2022 Bell Canada. # Modifications Copyright (C) 2021-2023 Nordix Foundation -# Modifications Copyright (C) 2022-2024 TechMahindra Ltd. +# Modifications Copyright (C) 2022-2025 TechMahindra Ltd. # Modifications Copyright (C) 2022 Deutsche Telekom AG # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); @@ -157,7 +157,12 @@ components: name: "Funny" target-data: name: "Comic" - + NotificationSubscriptionsDataSample: + value: + cps-notification-subscriptions:dataspaces: + dataspace: + - name: dataspace01 + - name: dataspace02 parameters: dataspaceNameInQuery: name: dataspace-name @@ -236,6 +241,19 @@ components: value: /shops/bookstore list attributes xpath: value: /shops/bookstore/categories[@code=1] + notificationSubscriptionXpathInQuery: + name: xpath + in: query + description: For more details on xpath, please refer https://docs.onap.org/projects/onap-cps/en/latest/xpath.html + required: true + schema: + type: string + default: /dataspaces + examples: + subscription by dataspace xpath: + value: /dataspaces/dataspace[@name='dataspace01'] + subscription by anchor xpath: + value: /dataspaces/dataspace[@name='dataspace01']/anchors/anchor[@name='anchor01'] requiredXpathInQuery: name: xpath in: query diff --git a/cps-rest/docs/openapi/cpsAdmin.yml b/cps-rest/docs/openapi/cpsAdmin.yml index f394270dd5..6cfffa48f3 100644 --- a/cps-rest/docs/openapi/cpsAdmin.yml +++ b/cps-rest/docs/openapi/cpsAdmin.yml @@ -1,6 +1,6 @@ # ============LICENSE_START======================================================= # Copyright (c) 2021 Bell Canada. -# Modifications Copyright (C) 2021-2022 Nordix Foundation +# Modifications Copyright (C) 2021-2025 Nordix Foundation # Modifications Copyright (C) 2022 TechMahindra Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); @@ -230,3 +230,23 @@ adminDataspace: $ref: 'components.yml#/components/responses/Forbidden' '500': $ref: 'components.yml#/components/responses/InternalServerError' + +adminCleanDataspace: + post: + description: Clean the dataspace (remove orphaned schema sets and modules) + tags: + - cps-admin + summary: Clean the dataspace + operationId: cleanDataspace + parameters: + - $ref: 'components.yml#/components/parameters/apiVersionInPath' + - $ref: 'components.yml#/components/parameters/dataspaceNameInPath' + responses: + '204': + $ref: 'components.yml#/components/responses/NoContent' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '500': + $ref: 'components.yml#/components/responses/InternalServerError' diff --git a/cps-rest/docs/openapi/cpsAdminV2.yml b/cps-rest/docs/openapi/cpsAdminV2.yml index e501ad8b15..af2572a1f0 100644 --- a/cps-rest/docs/openapi/cpsAdminV2.yml +++ b/cps-rest/docs/openapi/cpsAdminV2.yml @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2022 TechMahindra Ltd. +# Copyright (C) 2022-2025 TechMahindra Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -87,3 +87,75 @@ schemaSet: $ref: 'components.yml#/components/responses/Conflict' '500': $ref: 'components.yml#/components/responses/InternalServerError' + +notificationSubscription: + get: + description: Get cps notification subscription + tags: + - cps-admin + summary: Get cps notification subscription + operationId: getNotificationSubscription + parameters: + - $ref: 'components.yml#/components/parameters/notificationSubscriptionXpathInQuery' + responses: + '200': + description: OK + content: + application/json: + schema: + $ref: 'components.yml#/components/examples/NotificationSubscriptionsDataSample' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '409': + $ref: 'components.yml#/components/responses/Conflict' + '500': + $ref: 'components.yml#/components/responses/InternalServerError' + post: + description: Create cps notification subscription + tags: + - cps-admin + summary: Create cps notification subscription + operationId: createNotificationSubscription + parameters: + - $ref: 'components.yml#/components/parameters/notificationSubscriptionXpathInQuery' + requestBody: + required: true + content: + application/json: + schema: + type: object + examples: + dataSample: + $ref: 'components.yml#/components/examples/NotificationSubscriptionsDataSample' + responses: + '201': + $ref: 'components.yml#/components/responses/CreatedV2' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '409': + $ref: 'components.yml#/components/responses/Conflict' + '500': + $ref: 'components.yml#/components/responses/InternalServerError' + delete: + description: Delete cps notification subscription + tags: + - cps-admin + summary: Delete cps notification subscription + operationId: deleteNotificationSubscription + parameters: + - $ref: 'components.yml#/components/parameters/notificationSubscriptionXpathInQuery' + responses: + '204': + $ref: 'components.yml#/components/responses/NoContent' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '409': + $ref: 'components.yml#/components/responses/Conflict' + '500': + $ref: 'components.yml#/components/responses/InternalServerError'
\ No newline at end of file diff --git a/cps-rest/docs/openapi/cpsDataV2.yml b/cps-rest/docs/openapi/cpsDataV2.yml index 999c5b2c19..7afda705f7 100644 --- a/cps-rest/docs/openapi/cpsDataV2.yml +++ b/cps-rest/docs/openapi/cpsDataV2.yml @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (c) 2022-2024 TechMahindra Ltd. +# Copyright (c) 2022-2025 TechMahindra Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -54,83 +54,3 @@ nodeByDataspaceAndAnchor: '500': $ref: 'components.yml#/components/responses/InternalServerError' x-codegen-request-body-name: xpath - -delta: - get: - description: Get delta between two anchors within a given dataspace - tags: - - cps-data - summary: Get delta between anchors in the same dataspace - operationId: getDeltaByDataspaceAndAnchors - parameters: - - $ref: 'components.yml#/components/parameters/dataspaceNameInPath' - - $ref: 'components.yml#/components/parameters/sourceAnchorNameInPath' - - $ref: 'components.yml#/components/parameters/targetAnchorNameInQuery' - - $ref: 'components.yml#/components/parameters/xpathInQuery' - - $ref: 'components.yml#/components/parameters/descendantsInQuery' - responses: - '200': - description: OK - content: - application/json: - schema: - type: object - examples: - dataSample: - $ref: 'components.yml#/components/examples/deltaReportSample' - '400': - $ref: 'components.yml#/components/responses/BadRequest' - '403': - $ref: 'components.yml#/components/responses/Forbidden' - '500': - $ref: 'components.yml#/components/responses/InternalServerError' - x-codegen-request-body-name: xpath - post: - description: Get delta between an anchor in a dataspace and JSON payload - tags: - - cps-data - summary: Get delta between an anchor and JSON payload - operationId: getDeltaByDataspaceAnchorAndPayload - parameters: - - $ref: 'components.yml#/components/parameters/dataspaceNameInPath' - - $ref: 'components.yml#/components/parameters/sourceAnchorNameInPath' - - $ref: 'components.yml#/components/parameters/xpathInQuery' - requestBody: - content: - multipart/form-data: - schema: - type: object - properties: - json: - type: object - example: - test:bookstore: - bookstore-name: Chapters - categories: - - code: 01 - name: SciFi - - code: 02 - name: kids - file: - type: string - format: binary - required: - - json - responses: - '200': - description: OK - content: - application/json: - schema: - type: object - examples: - dataSample: - $ref: 'components.yml#/components/examples/deltaReportSample' - '400': - $ref: 'components.yml#/components/responses/BadRequest' - '401': - $ref: 'components.yml#/components/responses/Unauthorized' - '403': - $ref: 'components.yml#/components/responses/Forbidden' - '500': - $ref: 'components.yml#/components/responses/InternalServerError'
\ No newline at end of file diff --git a/cps-rest/docs/openapi/cpsDelta.yml b/cps-rest/docs/openapi/cpsDelta.yml new file mode 100644 index 0000000000..67535ce832 --- /dev/null +++ b/cps-rest/docs/openapi/cpsDelta.yml @@ -0,0 +1,97 @@ +# ============LICENSE_START======================================================= +# Copyright (c) 2025 TechMahindra Ltd. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= + +delta: + get: + description: Get delta between two anchors within a given dataspace + tags: + - cps-delta + summary: Get delta between anchors in the same dataspace + operationId: getDeltaByDataspaceAndAnchors + parameters: + - $ref: 'components.yml#/components/parameters/dataspaceNameInPath' + - $ref: 'components.yml#/components/parameters/sourceAnchorNameInPath' + - $ref: 'components.yml#/components/parameters/targetAnchorNameInQuery' + - $ref: 'components.yml#/components/parameters/xpathInQuery' + - $ref: 'components.yml#/components/parameters/descendantsInQuery' + responses: + '200': + description: OK + content: + application/json: + schema: + type: object + examples: + dataSample: + $ref: 'components.yml#/components/examples/deltaReportSample' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '500': + $ref: 'components.yml#/components/responses/InternalServerError' + x-codegen-request-body-name: xpath + post: + description: Get delta between an anchor in a dataspace and JSON payload + tags: + - cps-delta + summary: Get delta between an anchor and JSON payload + operationId: getDeltaByDataspaceAnchorAndPayload + parameters: + - $ref: 'components.yml#/components/parameters/dataspaceNameInPath' + - $ref: 'components.yml#/components/parameters/sourceAnchorNameInPath' + - $ref: 'components.yml#/components/parameters/xpathInQuery' + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + json: + type: object + example: + test:bookstore: + bookstore-name: Chapters + categories: + - code: 01 + name: SciFi + - code: 02 + name: kids + file: + type: string + format: binary + required: + - json + responses: + '200': + description: OK + content: + application/json: + schema: + type: object + examples: + dataSample: + $ref: 'components.yml#/components/examples/deltaReportSample' + '400': + $ref: 'components.yml#/components/responses/BadRequest' + '401': + $ref: 'components.yml#/components/responses/Unauthorized' + '403': + $ref: 'components.yml#/components/responses/Forbidden' + '500': + $ref: 'components.yml#/components/responses/InternalServerError' diff --git a/cps-rest/docs/openapi/openapi.yml b/cps-rest/docs/openapi/openapi.yml index f4eab61875..747531b30e 100644 --- a/cps-rest/docs/openapi/openapi.yml +++ b/cps-rest/docs/openapi/openapi.yml @@ -1,8 +1,8 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2021-2024 Nordix Foundation +# Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. # Modifications Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021 Bell Canada. -# Modifications Copyright (C) 2022-2024 TechMahindra Ltd. +# Modifications Copyright (C) 2022-2025 TechMahindra Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ openapi: 3.0.3 info: title: ONAP Open API v3 Configuration Persistence Service description: Configuration Persistence Service is a Model Driven Generic Database - version: "3.6.0" + version: "3.6.2" contact: name: ONAP url: "https://onap.readthedocs.io" @@ -34,16 +34,13 @@ info: servers: - url: /cps/api -components: - securitySchemes: - basicAuth: - type: http - scheme: basic tags: - name: cps-admin description: cps Admin - name: cps-data description: cps Data + - name: cps-delta + description: CPS Delta paths: /v1/dataspaces: @@ -61,6 +58,9 @@ paths: /{apiVersion}/admin/dataspaces/{dataspace-name}: $ref: 'cpsAdmin.yml#/adminDataspace' + /{apiVersion}/admin/dataspaces/{dataspace-name}/actions/clean: + $ref: 'cpsAdmin.yml#/adminCleanDataspace' + /v1/dataspaces/{dataspace-name}/anchors: $ref: 'cpsAdminV1Deprecated.yml#/anchorsByDataspace' @@ -101,7 +101,7 @@ paths: $ref: 'cpsData.yml#/listElementByDataspaceAndAnchor' /v2/dataspaces/{dataspace-name}/anchors/{source-anchor-name}/delta: - $ref: 'cpsDataV2.yml#/delta' + $ref: 'cpsDelta.yml#/delta' /v1/dataspaces/{dataspace-name}/anchors/{anchor-name}/nodes/query: $ref: 'cpsQueryV1Deprecated.yml#/nodesByDataspaceAndAnchorAndCpsPath' @@ -112,5 +112,5 @@ paths: /v2/dataspaces/{dataspace-name}/nodes/query: $ref: 'cpsQueryV2.yml#/nodesByDataspaceAndCpsPath' -security: - - basicAuth: [] + /v2/notification-subscription: + $ref: 'cpsAdminV2.yml#/notificationSubscription' diff --git a/cps-rest/pom.xml b/cps-rest/pom.xml index b04daf03bd..8c2e472240 100644 --- a/cps-rest/pom.xml +++ b/cps-rest/pom.xml @@ -27,7 +27,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> @@ -62,10 +62,6 @@ <artifactId>spring-boot-starter-jetty</artifactId> </dependency> <dependency> - <groupId>org.springframework.retry</groupId> - <artifactId>spring-retry</artifactId> - </dependency> - <dependency> <groupId>org.springframework</groupId> <artifactId>spring-aspects</artifactId> </dependency> diff --git a/cps-rest/src/main/java/org/onap/cps/rest/controller/AdminRestController.java b/cps-rest/src/main/java/org/onap/cps/rest/controller/AdminRestController.java index 675c0eaec4..01a9746af0 100755 --- a/cps-rest/src/main/java/org/onap/cps/rest/controller/AdminRestController.java +++ b/cps-rest/src/main/java/org/onap/cps/rest/controller/AdminRestController.java @@ -1,9 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2020-2023 Nordix Foundation + * Copyright (C) 2020-2025 Nordix Foundation * Modifications Copyright (C) 2020-2021 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2022 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,11 +31,13 @@ import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataspaceService; import org.onap.cps.api.CpsModuleService; +import org.onap.cps.api.CpsNotificationService; import org.onap.cps.api.model.Anchor; import org.onap.cps.api.model.Dataspace; import org.onap.cps.api.model.SchemaSet; @@ -43,6 +45,7 @@ import org.onap.cps.rest.api.CpsAdminApi; import org.onap.cps.rest.model.AnchorDetails; import org.onap.cps.rest.model.DataspaceDetails; import org.onap.cps.rest.model.SchemaSetDetails; +import org.onap.cps.utils.JsonObjectMapper; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; @@ -58,6 +61,8 @@ public class AdminRestController implements CpsAdminApi { private final CpsModuleService cpsModuleService; private final CpsRestInputMapper cpsRestInputMapper; private final CpsAnchorService cpsAnchorService; + private final CpsNotificationService cpsNotificationService; + private final JsonObjectMapper jsonObjectMapper; /** * Create a dataspace. @@ -176,6 +181,20 @@ public class AdminRestController implements CpsAdminApi { } /** + * Clean the given dataspace of any orphaned (module) data. + * + * @param apiVersion api version + * @param dataspaceName dataspace name + * + * @return a {@Link ResponseEntity} of {@link HttpStatus} NO_CONTENT + */ + @Override + public ResponseEntity<Void> cleanDataspace(final String apiVersion, final String dataspaceName) { + cpsModuleService.deleteAllUnusedYangModuleData(dataspaceName); + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } + + /** * Create a new anchor. * * @param dataspaceName dataspace name @@ -266,4 +285,25 @@ public class AdminRestController implements CpsAdminApi { final DataspaceDetails dataspaceDetails = cpsRestInputMapper.toDataspaceDetails(dataspace); return new ResponseEntity<>(dataspaceDetails, HttpStatus.OK); } + + @Override + public ResponseEntity<Void> createNotificationSubscription(final String xpath, + final Object notificationSubscriptionAsJson) { + cpsNotificationService.createNotificationSubscription( + jsonObjectMapper.asJsonString(notificationSubscriptionAsJson), xpath); + return new ResponseEntity<>(HttpStatus.CREATED); + } + + @Override + public ResponseEntity<Void> deleteNotificationSubscription(final String xpath) { + cpsNotificationService.deleteNotificationSubscription(xpath); + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } + + @Override + public ResponseEntity<Object> getNotificationSubscription(final String xpath) { + final List<Map<String, Object>> dataMaps = cpsNotificationService.getNotificationSubscription(xpath); + return new ResponseEntity<>(jsonObjectMapper.asJsonString(dataMaps), HttpStatus.OK); + } + } diff --git a/cps-rest/src/main/java/org/onap/cps/rest/controller/DataRestController.java b/cps-rest/src/main/java/org/onap/cps/rest/controller/DataRestController.java index be552ecc6a..90500f3955 100755 --- a/cps-rest/src/main/java/org/onap/cps/rest/controller/DataRestController.java +++ b/cps-rest/src/main/java/org/onap/cps/rest/controller/DataRestController.java @@ -2,8 +2,8 @@ * ============LICENSE_START======================================================= * Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2021-2024 Nordix Foundation - * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2021-2025 Nordix Foundation + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * Modifications Copyright (C) 2022 Deutsche Telekom AG * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,36 +24,25 @@ package org.onap.cps.rest.controller; -import static org.onap.cps.rest.utils.MultipartFileUtil.extractYangResourcesMap; - import io.micrometer.core.annotation.Timed; import jakarta.validation.ValidationException; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.StringUtils; -import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; -import org.onap.cps.api.model.Anchor; -import org.onap.cps.api.model.DataNode; -import org.onap.cps.api.model.DeltaReport; +import org.onap.cps.api.CpsFacade; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.rest.api.CpsDataApi; import org.onap.cps.utils.ContentType; -import org.onap.cps.utils.DataMapUtils; import org.onap.cps.utils.JsonObjectMapper; -import org.onap.cps.utils.PrefixResolver; import org.onap.cps.utils.XmlFileUtils; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.multipart.MultipartFile; @RestController @RequestMapping("${rest.api.cps-base-path}") @@ -64,10 +53,9 @@ public class DataRestController implements CpsDataApi { private static final String ISO_TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; private static final DateTimeFormatter ISO_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern(ISO_TIMESTAMP_FORMAT); + private final CpsFacade cpsFacade; private final CpsDataService cpsDataService; - private final CpsAnchorService cpsAnchorService; private final JsonObjectMapper jsonObjectMapper; - private final PrefixResolver prefixResolver; @Override public ResponseEntity<String> createNode(final String apiVersion, @@ -116,24 +104,20 @@ public class DataRestController implements CpsDataApi { } @Override - @Timed(value = "cps.data.controller.datanode.get.v1", - description = "Time taken to get data node") + @Timed(value = "cps.data.controller.datanode.get.v1", description = "Time taken to get data node") public ResponseEntity<Object> getNodeByDataspaceAndAnchor(final String dataspaceName, final String anchorName, final String xpath, final Boolean includeDescendants) { - final FetchDescendantsOption fetchDescendantsOption = Boolean.TRUE.equals(includeDescendants) - ? FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS : FetchDescendantsOption.OMIT_DESCENDANTS; - final DataNode dataNode = cpsDataService.getDataNodes(dataspaceName, anchorName, xpath, - fetchDescendantsOption).iterator().next(); - final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); - return new ResponseEntity<>(DataMapUtils.toDataMapWithIdentifier(dataNode, prefix), HttpStatus.OK); + final FetchDescendantsOption fetchDescendantsOption = + FetchDescendantsOption.getFetchDescendantsOption(includeDescendants); + final Map<String, Object> dataNodeAsMap = + cpsFacade.getFirstDataNodeByAnchor(dataspaceName, anchorName, xpath, fetchDescendantsOption); + return new ResponseEntity<>(dataNodeAsMap, HttpStatus.OK); } @Override - @Timed(value = "cps.data.controller.datanode.get.v2", - description = "Time taken to get data node") + @Timed(value = "cps.data.controller.datanode.get.v2", description = "Time taken to get data node") public ResponseEntity<Object> getNodeByDataspaceAndAnchorV2(final String dataspaceName, final String anchorName, final String xpath, final String fetchDescendantsOptionAsString, @@ -141,16 +125,9 @@ public class DataRestController implements CpsDataApi { final ContentType contentType = ContentType.fromString(contentTypeInHeader); final FetchDescendantsOption fetchDescendantsOption = FetchDescendantsOption.getFetchDescendantsOption(fetchDescendantsOptionAsString); - final Collection<DataNode> dataNodes = cpsDataService.getDataNodes(dataspaceName, anchorName, xpath, - fetchDescendantsOption); - final List<Map<String, Object>> dataMaps = new ArrayList<>(dataNodes.size()); - final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - for (final DataNode dataNode: dataNodes) { - final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); - final Map<String, Object> dataMap = DataMapUtils.toDataMapWithIdentifier(dataNode, prefix); - dataMaps.add(dataMap); - } - return buildResponseEntity(dataMaps, contentType); + final List<Map<String, Object>> dataNodesAsMaps = + cpsFacade.getDataNodesByAnchor(dataspaceName, anchorName, xpath, fetchDescendantsOption); + return buildResponseEntity(dataNodesAsMaps, contentType); } @Override @@ -210,44 +187,6 @@ public class DataRestController implements CpsDataApi { return new ResponseEntity<>(HttpStatus.NO_CONTENT); } - @Override - public ResponseEntity<Object> getDeltaByDataspaceAnchorAndPayload(final String dataspaceName, - final String sourceAnchorName, - final Object jsonPayload, - final String xpath, - final MultipartFile multipartFile) { - final FetchDescendantsOption fetchDescendantsOption = FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS; - - final Map<String, String> yangResourceMap; - if (multipartFile == null) { - yangResourceMap = Collections.emptyMap(); - } else { - yangResourceMap = extractYangResourcesMap(multipartFile); - } - final Collection<DeltaReport> deltaReports = Collections.unmodifiableList( - cpsDataService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchorName, - xpath, yangResourceMap, jsonPayload.toString(), fetchDescendantsOption)); - - return new ResponseEntity<>(jsonObjectMapper.asJsonString(deltaReports), HttpStatus.OK); - } - - @Override - @Timed(value = "cps.data.controller.get.delta", - description = "Time taken to get delta between anchors") - public ResponseEntity<Object> getDeltaByDataspaceAndAnchors(final String dataspaceName, - final String sourceAnchorName, - final String targetAnchorName, - final String xpath, - final String descendants) { - final FetchDescendantsOption fetchDescendantsOption = - FetchDescendantsOption.getFetchDescendantsOption(descendants); - - final List<DeltaReport> deltaBetweenAnchors = - cpsDataService.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchorName, - targetAnchorName, xpath, fetchDescendantsOption); - return new ResponseEntity<>(jsonObjectMapper.asJsonString(deltaBetweenAnchors), HttpStatus.OK); - } - private ResponseEntity<Object> buildResponseEntity(final List<Map<String, Object>> dataMaps, final ContentType contentType) { final String responseData; diff --git a/cps-rest/src/main/java/org/onap/cps/rest/controller/DeltaRestController.java b/cps-rest/src/main/java/org/onap/cps/rest/controller/DeltaRestController.java new file mode 100644 index 0000000000..f27346cfa7 --- /dev/null +++ b/cps-rest/src/main/java/org/onap/cps/rest/controller/DeltaRestController.java @@ -0,0 +1,89 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.rest.controller; + +import static org.onap.cps.rest.utils.MultipartFileUtil.extractYangResourcesMap; + +import io.micrometer.core.annotation.Timed; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import lombok.RequiredArgsConstructor; +import org.onap.cps.api.CpsDeltaService; +import org.onap.cps.api.model.DeltaReport; +import org.onap.cps.api.parameters.FetchDescendantsOption; +import org.onap.cps.rest.api.CpsDeltaApi; +import org.onap.cps.utils.JsonObjectMapper; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@RestController +@RequestMapping("${rest.api.cps-base-path}") +@RequiredArgsConstructor +public class DeltaRestController implements CpsDeltaApi { + + private final CpsDeltaService cpsDeltaService; + private final JsonObjectMapper jsonObjectMapper; + + + @Timed(value = "cps.delta.controller.get.delta", + description = "Time taken to get delta between anchors") + @Override + public ResponseEntity<Object> getDeltaByDataspaceAndAnchors(final String dataspaceName, + final String sourceAnchorName, + final String targetAnchorName, + final String xpath, + final String descendants) { + final FetchDescendantsOption fetchDescendantsOption = + FetchDescendantsOption.getFetchDescendantsOption(descendants); + final List<DeltaReport> deltaBetweenAnchors = + cpsDeltaService.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchorName, + targetAnchorName, xpath, fetchDescendantsOption); + return new ResponseEntity<>(jsonObjectMapper.asJsonString(deltaBetweenAnchors), HttpStatus.OK); + } + + @Timed(value = "cps.delta.controller.get.delta", + description = "Time taken to get delta between anchors") + @Override + public ResponseEntity<Object> getDeltaByDataspaceAnchorAndPayload(final String dataspaceName, + final String sourceAnchorName, + final Object jsonPayload, + final String xpath, + final MultipartFile multipartFile) { + final FetchDescendantsOption fetchDescendantsOption = FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS; + + final Map<String, String> yangResourceMap; + if (multipartFile == null) { + yangResourceMap = Collections.emptyMap(); + } else { + yangResourceMap = extractYangResourcesMap(multipartFile); + } + final Collection<DeltaReport> deltaReports = Collections.unmodifiableList( + cpsDeltaService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchorName, + xpath, yangResourceMap, jsonPayload.toString(), fetchDescendantsOption)); + return new ResponseEntity<>(jsonObjectMapper.asJsonString(deltaReports), HttpStatus.OK); + } + +} diff --git a/cps-rest/src/main/java/org/onap/cps/rest/controller/QueryRestController.java b/cps-rest/src/main/java/org/onap/cps/rest/controller/QueryRestController.java index f8833094cf..11713ad5e7 100644 --- a/cps-rest/src/main/java/org/onap/cps/rest/controller/QueryRestController.java +++ b/cps-rest/src/main/java/org/onap/cps/rest/controller/QueryRestController.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada. * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. * ================================================================================ @@ -23,23 +23,15 @@ package org.onap.cps.rest.controller; import io.micrometer.core.annotation.Timed; -import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; -import org.onap.cps.api.CpsAnchorService; -import org.onap.cps.api.CpsQueryService; -import org.onap.cps.api.model.Anchor; -import org.onap.cps.api.model.DataNode; +import org.onap.cps.api.CpsFacade; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.api.parameters.PaginationOption; import org.onap.cps.rest.api.CpsQueryApi; import org.onap.cps.utils.ContentType; -import org.onap.cps.utils.DataMapUtils; import org.onap.cps.utils.JsonObjectMapper; -import org.onap.cps.utils.PrefixResolver; import org.onap.cps.utils.XmlFileUtils; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -51,27 +43,24 @@ import org.springframework.web.bind.annotation.RestController; @RequiredArgsConstructor public class QueryRestController implements CpsQueryApi { - private final CpsQueryService cpsQueryService; - private final CpsAnchorService cpsAnchorService; + private final CpsFacade cpsFacade; private final JsonObjectMapper jsonObjectMapper; - private final PrefixResolver prefixResolver; @Override - @Timed(value = "cps.data.controller.datanode.query.v1", - description = "Time taken to query data nodes") + @Timed(value = "cps.data.controller.datanode.query.v1", description = "Time taken to query data nodes") public ResponseEntity<Object> getNodesByDataspaceAndAnchorAndCpsPath(final String dataspaceName, final String anchorName, final String cpsPath, final Boolean includeDescendants) { - final FetchDescendantsOption fetchDescendantsOption = Boolean.TRUE.equals(includeDescendants) - ? FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS : FetchDescendantsOption.OMIT_DESCENDANTS; - return executeNodesByDataspaceQueryAndCreateResponse(dataspaceName, anchorName, cpsPath, - fetchDescendantsOption, ContentType.JSON); + final FetchDescendantsOption fetchDescendantsOption = + FetchDescendantsOption.getFetchDescendantsOption(includeDescendants); + final List<Map<String, Object>> dataNodesAsMaps + = cpsFacade.executeAnchorQuery(dataspaceName, anchorName, cpsPath, fetchDescendantsOption); + return buildResponseEntity(dataNodesAsMaps, ContentType.JSON); } @Override - @Timed(value = "cps.data.controller.datanode.query.v2", - description = "Time taken to query data nodes") + @Timed(value = "cps.data.controller.datanode.query.v2", description = "Time taken to query data nodes") public ResponseEntity<Object> getNodesByDataspaceAndAnchorAndCpsPathV2(final String dataspaceName, final String anchorName, final String cpsPath, @@ -80,8 +69,9 @@ public class QueryRestController implements CpsQueryApi { final ContentType contentType = ContentType.fromString(contentTypeInHeader); final FetchDescendantsOption fetchDescendantsOption = FetchDescendantsOption.getFetchDescendantsOption(fetchDescendantsOptionAsString); - return executeNodesByDataspaceQueryAndCreateResponse(dataspaceName, anchorName, cpsPath, - fetchDescendantsOption, contentType); + final List<Map<String, Object>> dataNodesAsMaps + = cpsFacade.executeAnchorQuery(dataspaceName, anchorName, cpsPath, fetchDescendantsOption); + return buildResponseEntity(dataNodesAsMaps, contentType); } @Override @@ -96,65 +86,21 @@ public class QueryRestController implements CpsQueryApi { FetchDescendantsOption.getFetchDescendantsOption(fetchDescendantsOptionAsString); final PaginationOption paginationOption = (pageIndex == null || pageSize == null) ? PaginationOption.NO_PAGINATION : new PaginationOption(pageIndex, pageSize); - final Collection<DataNode> dataNodes = cpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, - cpsPath, fetchDescendantsOption, paginationOption); - final List<Map<String, Object>> dataNodesAsListOfMaps = new ArrayList<>(dataNodes.size()); - String prefix = null; - final Map<String, List<DataNode>> dataNodesPerAnchor = groupDataNodesPerAnchor(dataNodes); - for (final Map.Entry<String, List<DataNode>> dataNodesPerAnchorEntry : dataNodesPerAnchor.entrySet()) { - final String anchorName = dataNodesPerAnchorEntry.getKey(); - if (prefix == null) { - final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - prefix = prefixResolver.getPrefix(anchor, dataNodesPerAnchorEntry.getValue().get(0).getXpath()); - } - final Map<String, Object> dataMap = DataMapUtils.toDataMapWithIdentifierAndAnchor( - dataNodesPerAnchorEntry.getValue(), anchorName, prefix); - dataNodesAsListOfMaps.add(dataMap); - } - final Integer totalPages = getTotalPages(dataspaceName, cpsPath, paginationOption); - return ResponseEntity.ok().header("total-pages", - totalPages.toString()).body(jsonObjectMapper.asJsonString(dataNodesAsListOfMaps)); - } + final List<Map<String, Object>> dataNodesAsMaps + = cpsFacade.executeDataspaceQuery(dataspaceName, cpsPath, fetchDescendantsOption, paginationOption); - private Integer getTotalPages(final String dataspaceName, final String cpsPath, - final PaginationOption paginationOption) { - if (paginationOption == PaginationOption.NO_PAGINATION) { - return 1; - } - final int totalAnchors = cpsQueryService.countAnchorsForDataspaceAndCpsPath(dataspaceName, cpsPath); - return totalAnchors <= paginationOption.getPageSize() ? 1 - : (int) Math.ceil((double) totalAnchors / paginationOption.getPageSize()); - } - - private static Map<String, List<DataNode>> groupDataNodesPerAnchor(final Collection<DataNode> dataNodes) { - return dataNodes.stream().collect(Collectors.groupingBy(DataNode::getAnchorName)); - } - - private ResponseEntity<Object> executeNodesByDataspaceQueryAndCreateResponse(final String dataspaceName, - final String anchorName, final String cpsPath, final FetchDescendantsOption fetchDescendantsOption, - final ContentType contentType) { - final Collection<DataNode> dataNodes = - cpsQueryService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption); - final List<Map<String, Object>> dataNodesAsListOfMaps = new ArrayList<>(dataNodes.size()); - final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - String prefix = null; - for (final DataNode dataNode : dataNodes) { - if (prefix == null) { - prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); - } - final Map<String, Object> dataMap = DataMapUtils.toDataMapWithIdentifier(dataNode, prefix); - dataNodesAsListOfMaps.add(dataMap); - } - return buildResponseEntity(dataNodesAsListOfMaps, contentType); + final int totalPages = cpsFacade.countAnchorsInDataspaceQuery(dataspaceName, cpsPath, paginationOption); + return ResponseEntity.ok().header("total-pages", String.valueOf(totalPages)) + .body(jsonObjectMapper.asJsonString(dataNodesAsMaps)); } - private ResponseEntity<Object> buildResponseEntity(final List<Map<String, Object>> dataNodesAsListOfMaps, + private ResponseEntity<Object> buildResponseEntity(final List<Map<String, Object>> dataNodesAsMaps, final ContentType contentType) { final String responseData; if (ContentType.XML.equals(contentType)) { - responseData = XmlFileUtils.convertDataMapsToXml(dataNodesAsListOfMaps); + responseData = XmlFileUtils.convertDataMapsToXml(dataNodesAsMaps); } else { - responseData = jsonObjectMapper.asJsonString(dataNodesAsListOfMaps); + responseData = jsonObjectMapper.asJsonString(dataNodesAsMaps); } return new ResponseEntity<>(responseData, HttpStatus.OK); } diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/AdminRestControllerSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/AdminRestControllerSpec.groovy index 2335a5e770..6d1ca40cd9 100755 --- a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/AdminRestControllerSpec.groovy +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/AdminRestControllerSpec.groovy @@ -2,8 +2,8 @@ * ============LICENSE_START======================================================= * Copyright (C) 2020-2021 Pantheon.tech * Modifications Copyright (C) 2020-2021 Bell Canada. - * Modifications Copyright (C) 2021-2022 Nordix Foundation - * Modifications Copyright (C) 2022 TechMahindra Ltd. + * Modifications Copyright (C) 2021-2025 Nordix Foundation + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,22 +23,26 @@ package org.onap.cps.rest.controller -import org.onap.cps.api.CpsAnchorService +import com.fasterxml.jackson.databind.ObjectMapper import static org.onap.cps.api.parameters.CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put import org.mapstruct.factory.Mappers +import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataspaceService import org.onap.cps.api.CpsModuleService +import org.onap.cps.api.CpsNotificationService import org.onap.cps.api.exceptions.AlreadyDefinedException import org.onap.cps.api.exceptions.SchemaSetInUseException import org.onap.cps.api.model.Anchor import org.onap.cps.api.model.Dataspace import org.onap.cps.api.model.SchemaSet +import org.onap.cps.utils.JsonObjectMapper import org.spockframework.spring.SpringBean import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Value @@ -51,6 +55,12 @@ import org.springframework.util.LinkedMultiValueMap import org.springframework.util.MultiValueMap import spock.lang.Specification +import static org.onap.cps.api.parameters.CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post + @WebMvcTest(AdminRestController) class AdminRestControllerSpec extends Specification { @@ -64,8 +74,15 @@ class AdminRestControllerSpec extends Specification { CpsAnchorService mockCpsAnchorService = Mock() @SpringBean + CpsNotificationService mockCpsNotificationService = Mock() + + @SpringBean CpsRestInputMapper cpsRestInputMapper = Mappers.getMapper(CpsRestInputMapper) + @SpringBean + JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + + @Autowired MockMvc mvc @@ -79,7 +96,7 @@ class AdminRestControllerSpec extends Specification { def dataspace = new Dataspace(name: dataspaceName) def 'Create new dataspace with #scenario.'() { - when: 'post is invoked' + when: 'post is invoked on endpoint for creating a dataspace' def response = mvc.perform( post("/cps/api/${apiVersion}/dataspaces") @@ -97,7 +114,7 @@ class AdminRestControllerSpec extends Specification { } def 'Create dataspace over existing with same name.'() { - given: 'an endpoint' + given: 'the endpoint to create a dataspace' def createDataspaceEndpoint = "$basePath/v1/dataspaces" and: 'the service method throws an exception indicating the dataspace is already defined' def thrownException = new AlreadyDefinedException(dataspaceName, new RuntimeException()) @@ -115,7 +132,7 @@ class AdminRestControllerSpec extends Specification { def 'Get a dataspace.'() { given: 'service method returns a dataspace' mockCpsDataspaceService.getDataspace(dataspaceName) >> dataspace - and: 'an endpoint' + and: 'the endpoint for getting a dataspace by name' def getDataspaceEndpoint = "$basePath/v1/admin/dataspaces/$dataspaceName" when: 'get dataspace API is invoked' def response = mvc.perform(get(getDataspaceEndpoint)).andReturn().response @@ -124,6 +141,17 @@ class AdminRestControllerSpec extends Specification { response.getContentAsString().contains(dataspaceName) } + def 'Clean a dataspace.'() { + given: 'service method returns a dataspace' + mockCpsDataspaceService.getDataspace(dataspaceName) >> dataspace + and: 'the endpoint for cleaning a dataspace' + def postCleanDataspaceEndpoint = "$basePath/v1/admin/dataspaces/$dataspaceName/actions/clean" + when: 'post is invoked on the clean dataspace endpoint' + def response = mvc.perform(post(postCleanDataspaceEndpoint)).andReturn().response + then: 'no content is returned' + response.status == HttpStatus.NO_CONTENT.value() + } + def 'Get all dataspaces.'() { given: 'service method returns all dataspace' mockCpsDataspaceService.getAllDataspaces() >> [dataspace, new Dataspace(name: "dataspace-test2")] @@ -173,8 +201,7 @@ class AdminRestControllerSpec extends Specification { .param('schema-set-name', schemaSetName)) .andReturn().response then: 'associated service method is invoked with expected parameters' - 1 * mockCpsModuleService.createSchemaSet(dataspaceName, schemaSetName, _) >> - { args -> yangResourceMapCapture = args[2] } + 1 * mockCpsModuleService.createSchemaSet(dataspaceName, schemaSetName, _) >> { args -> yangResourceMapCapture = args[2] } yangResourceMapCapture['assembly.yang'] == "fake assembly content 1\n" yangResourceMapCapture['component.yang'] == "fake component content 1\n" and: 'response code indicates success' @@ -208,7 +235,7 @@ class AdminRestControllerSpec extends Specification { } def 'Create schema set from zip archive having #caseDescriptor.'() { - given: 'an endpoint' + given: 'the endpoint to create a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets" when: 'zip archive having #caseDescriptor is uploaded with create schema set request' def response = @@ -228,7 +255,7 @@ class AdminRestControllerSpec extends Specification { def 'Create schema set from file with unsupported filename extension.'() { given: 'file with unsupported filename extension (.doc)' def multipartFile = createMultipartFile("filename.doc", "content") - and: 'an endpoint' + and: 'the endpoint to create a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets" when: 'file uploaded with schema set create request' def response = @@ -242,7 +269,7 @@ class AdminRestControllerSpec extends Specification { } def 'Create schema set from #fileType file with IOException occurrence on processing.'() { - given: 'an endpoint' + given: 'the endpoint to create a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets" when: 'file uploaded with schema set create request' def multipartFile = createMultipartFileForIOException(fileType) @@ -259,7 +286,7 @@ class AdminRestControllerSpec extends Specification { } def 'Delete schema set.'() { - given: 'an endpoint' + given: 'the endpoint for deleting a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets/$schemaSetName" when: 'delete schema set endpoint is invoked' def response = mvc.perform(delete(schemaSetEndpoint)).andReturn().response @@ -274,7 +301,7 @@ class AdminRestControllerSpec extends Specification { def thrownException = new SchemaSetInUseException(dataspaceName, schemaSetName) mockCpsModuleService.deleteSchemaSet(dataspaceName, schemaSetName, CASCADE_DELETE_PROHIBITED) >> { throw thrownException } - and: 'an endpoint' + and: 'the endpoint for deleting a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets/$schemaSetName" when: 'delete schema set endpoint is invoked' def response = mvc.perform(delete(schemaSetEndpoint)).andReturn().response @@ -286,7 +313,7 @@ class AdminRestControllerSpec extends Specification { given: 'service method returns a new schema set' mockCpsModuleService.getSchemaSet(dataspaceName, schemaSetName) >> new SchemaSet(name: schemaSetName, dataspaceName: dataspaceName) - and: 'an endpoint' + and: 'the endpoint for getting a schema set' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets/$schemaSetName" when: 'get schema set API is invoked' def response = mvc.perform(get(schemaSetEndpoint)).andReturn().response @@ -300,7 +327,7 @@ class AdminRestControllerSpec extends Specification { mockCpsModuleService.getSchemaSets(dataspaceName) >> [new SchemaSet(name: schemaSetName, dataspaceName: dataspaceName), new SchemaSet(name: "test-schemaset", dataspaceName: dataspaceName)] - and: 'an endpoint' + and: 'the endpoint for getting all schema sets' def schemaSetEndpoint = "$basePath/v1/dataspaces/$dataspaceName/schema-sets" when: 'get schema sets API is invoked' def response = mvc.perform(get(schemaSetEndpoint)).andReturn().response @@ -315,7 +342,7 @@ class AdminRestControllerSpec extends Specification { def requestParams = new LinkedMultiValueMap<>() requestParams.add('schema-set-name', schemaSetName) requestParams.add('anchor-name', anchorName) - when: 'post is invoked' + when: 'post is invoked on the create anchors endpoint' def response = mvc.perform( post("/cps/api/${apiVersion}/dataspaces/my_dataspace/anchors") @@ -332,10 +359,10 @@ class AdminRestControllerSpec extends Specification { 'V2 API' | 'v2' || '' } - def 'Get existing anchor.'() { - given: 'service method returns a list of anchors' + def 'Get existing anchors.'() { + given: 'service method returns a list of (one) anchors' mockCpsAnchorService.getAnchors(dataspaceName) >> [anchor] - and: 'an endpoint' + and: 'the endpoint for getting all anchors' def anchorEndpoint = "$basePath/v1/dataspaces/$dataspaceName/anchors" when: 'get all anchors API is invoked' def response = mvc.perform(get(anchorEndpoint)).andReturn().response @@ -348,7 +375,7 @@ class AdminRestControllerSpec extends Specification { given: 'service method returns an anchor' mockCpsAnchorService.getAnchor(dataspaceName, anchorName) >> new Anchor(name: anchorName, dataspaceName: dataspaceName, schemaSetName: schemaSetName) - and: 'an endpoint' + and: 'the endpoint for getting an anchor' def anchorEndpoint = "$basePath/v1/dataspaces/$dataspaceName/anchors/$anchorName" when: 'get anchor API is invoked' def response = mvc.perform(get(anchorEndpoint)).andReturn().response @@ -361,7 +388,7 @@ class AdminRestControllerSpec extends Specification { } def 'Delete anchor.'() { - given: 'an endpoint' + given: 'the endpoint for deleting an anchor' def anchorEndpoint = "$basePath/v1/dataspaces/$dataspaceName/anchors/$anchorName" when: 'delete method is invoked on anchor endpoint' def response = mvc.perform(delete(anchorEndpoint)).andReturn().response @@ -372,7 +399,7 @@ class AdminRestControllerSpec extends Specification { } def 'Delete dataspace.'() { - given: 'an endpoint' + given: 'the endpoint for deleting a dataspace' def dataspaceEndpoint = "$basePath/v1/dataspaces" when: 'delete dataspace endpoint is invoked' def response = mvc.perform(delete(dataspaceEndpoint) @@ -384,6 +411,48 @@ class AdminRestControllerSpec extends Specification { response.status == HttpStatus.NO_CONTENT.value() } + def 'Add notification subscription'() { + given: 'an endpoint and its payload' + def notificationSubscriptionEndpoint = "$basePath/v2/notification-subscription" + def xpath = '/dataspaces' + def jsonPayload = '{"dataspace":[{"name":"ds01"}]}' + when: 'post request is performed' + def response = + mvc.perform( + post(notificationSubscriptionEndpoint) + .contentType(MediaType.APPLICATION_JSON) + .content(jsonPayload)) + .andReturn().response + then: 'notification service method is invoked with expected parameter' + 1 * mockCpsNotificationService.createNotificationSubscription(jsonPayload, xpath) + and: 'HTTP response code indicates success' + response.status == HttpStatus.CREATED.value() + } + + def 'delete notification subscription'() { + given: 'an endpoint and xpath' + def notificationSubscriptionEndpoint = "$basePath/v2/notification-subscription" + def xpath = '/dataspaces' + when: 'delete request is performed' + def response = mvc.perform(delete(notificationSubscriptionEndpoint).param('xpath', xpath)).andReturn().response + then: 'notification service method is invoked with expected parameter' + 1 * mockCpsNotificationService.deleteNotificationSubscription(xpath) + and: 'HTTP response code indicates success' + response.status == HttpStatus.NO_CONTENT.value() + } + + def 'Get notification subscription.'() { + given: 'an endpoint and xpath' + def notificationSubscriptionEndpoint = "$basePath/v2/notification-subscription" + def xpath = '/dataspaces' + when: 'get notification subscription is invoked' + def response = mvc.perform(get(notificationSubscriptionEndpoint).param('xpath', xpath)).andReturn().response + then: 'HTTP response code indicates success' + response.status == HttpStatus.OK.value() + and: 'notification service is called with proper parameters' + 1 * mockCpsNotificationService.getNotificationSubscription(xpath) + } + def createMultipartFile(filename, content) { return new MockMultipartFile("file", filename, "text/plain", content.getBytes()) } diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DataRestControllerSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DataRestControllerSpec.groovy index ca89fafe83..ba5104acf9 100755 --- a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DataRestControllerSpec.groovy +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DataRestControllerSpec.groovy @@ -1,10 +1,10 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada. * Modifications Copyright (C) 2022 Deutsche Telekom AG - * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,26 +25,18 @@ package org.onap.cps.rest.controller import com.fasterxml.jackson.databind.ObjectMapper -import groovy.json.JsonSlurper -import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataService -import org.onap.cps.api.parameters.FetchDescendantsOption -import org.onap.cps.api.model.DataNode -import org.onap.cps.api.model.DataNodeBuilder -import org.onap.cps.api.model.DeltaReportBuilder +import org.onap.cps.api.CpsFacade import org.onap.cps.utils.ContentType import org.onap.cps.utils.DateTimeUtility import org.onap.cps.utils.JsonObjectMapper -import org.onap.cps.utils.PrefixResolver import org.spockframework.spring.SpringBean import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Value import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest import org.springframework.http.HttpStatus import org.springframework.http.MediaType -import org.springframework.mock.web.MockMultipartFile import org.springframework.test.web.servlet.MockMvc -import org.springframework.web.multipart.MultipartFile import spock.lang.Shared import spock.lang.Specification @@ -52,7 +44,6 @@ import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DES import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put @@ -61,17 +52,14 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder class DataRestControllerSpec extends Specification { @SpringBean - CpsDataService mockCpsDataService = Mock() + CpsFacade mockCpsFacade = Mock() @SpringBean - CpsAnchorService mockCpsAnchorService = Mock() + CpsDataService mockCpsDataService = Mock() @SpringBean JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - @SpringBean - PrefixResolver prefixResolver = Mock() - @Autowired MockMvc mvc @@ -96,22 +84,6 @@ class DataRestControllerSpec extends Specification { @Shared def expectedXmlData = '<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n<bookstore xmlns="org:onap:ccsdk:sample">\n</bookstore>' - @Shared - static DataNode dataNodeWithLeavesNoChildren = new DataNodeBuilder().withXpath('/parent-1') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - - @Shared - static DataNode dataNodeWithLeavesNoChildren2 = new DataNodeBuilder().withXpath('/parent-2') - .withLeaves([leaf: 'value']).build() - - @Shared - static DataNode dataNodeWithChild = new DataNodeBuilder().withXpath('/parent') - .withChildDataNodes([new DataNodeBuilder().withXpath("/parent/child").build()]).build() - - @Shared - static MultipartFile multipartYangFile = new MockMultipartFile("file", 'filename.yang', "text/plain", 'content'.getBytes()) - - def setup() { dataNodeBaseEndpointV1 = "$basePath/v1/dataspaces/$dataspaceName" dataNodeBaseEndpointV2 = "$basePath/v2/dataspaces/$dataspaceName" @@ -130,7 +102,7 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a created response is returned' response.status == HttpStatus.CREATED.value() - then: 'the java API was called with the correct parameters' + then: 'the cps data service was called with the correct parameters' 1 * mockCpsDataService.saveData(dataspaceName, anchorName, expectedData, noTimestamp, expectedContentType) where: 'following xpath parameters are are used' scenario | parentNodeXpath | contentType | expectedContentType | requestBody | expectedData @@ -140,7 +112,7 @@ class DataRestControllerSpec extends Specification { 'XML content: xpath parameter point root' | '/' | MediaType.APPLICATION_XML | ContentType.XML | requestBodyXml | expectedXmlData } - def 'Create a node with observed-timestamp'() { + def 'Create a node with observed-timestamp.'() { given: 'endpoint to create a node' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" when: 'post is invoked with datanode endpoint and json' @@ -154,7 +126,7 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a created response is returned' response.status == expectedHttpStatus.value() - then: 'the java API was called with the correct parameters' + then: 'the cps data service was called with the correct parameters' expectedApiCount * mockCpsDataService.saveData(dataspaceName, anchorName, expectedData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, expectedContentType) where: @@ -164,7 +136,7 @@ class DataRestControllerSpec extends Specification { 'with invalid observed-timestamp' | 'invalid' | MediaType.APPLICATION_JSON | requestBodyJson || 0 | HttpStatus.BAD_REQUEST | expectedJsonData | ContentType.JSON } - def 'Validate data using create a node API'() { + def 'Validate data using create a node API.'() { given: 'an endpoint to create a node' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" def parentNodeXpath = '/' @@ -181,11 +153,11 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a 200 OK response is returned' response.status == HttpStatus.OK.value() - then: 'the service was called with correct parameters' + then: 'the cps data service was called with correct parameters' 1 * mockCpsDataService.validateData(dataspaceName, anchorName, parentNodeXpath, requestBodyJson, ContentType.JSON) } - def 'Create a child node #scenario'() { + def 'Create a child node #scenario.'() { given: 'endpoint to create a node' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" and: 'parent node xpath' @@ -201,7 +173,7 @@ class DataRestControllerSpec extends Specification { mvc.perform(postRequestBuilder).andReturn().response then: 'a created response is returned' response.status == HttpStatus.CREATED.value() - then: 'the java API was called with the correct parameters' + then: 'the cps data service was called with the correct parameters' 1 * mockCpsDataService.saveData(dataspaceName, anchorName, parentNodeXpath, expectedData, DateTimeUtility.toOffsetDateTime(observedTimestamp), expectedContentType) where: @@ -251,10 +223,10 @@ class DataRestControllerSpec extends Specification { def response = mvc.perform(postRequestBuilder).andReturn().response then: 'a created response is returned' response.status == expectedHttpStatus.value() - then: 'the java API was called with the correct parameters' + then: 'the cps data service was called with the correct parameters when needed' expectedApiCount * mockCpsDataService.saveListElements(dataspaceName, anchorName, parentNodeXpath, expectedData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, expectedContentType) - where: + where: 'the following parameters are used' scenario | observedTimestamp | contentType | requestBody || expectedApiCount | expectedHttpStatus | expectedData | expectedContentType 'Content type JSON with observed-timestamp' | '2021-03-03T23:59:59.999-0400' | MediaType.APPLICATION_JSON | requestBodyJson || 1 | HttpStatus.CREATED | expectedJsonData | ContentType.JSON 'Content type JSON without observed-timestamp' | null | MediaType.APPLICATION_JSON | requestBodyJson || 1 | HttpStatus.CREATED | expectedJsonData | ContentType.JSON @@ -280,34 +252,14 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a 200 OK response is returned' response.status == HttpStatus.OK.value() - then: 'the service was called with correct parameters' + then: 'the cps data service was called with correct parameters' 1 * mockCpsDataService.validateData(dataspaceName, anchorName, '/', requestBodyJson, ContentType.JSON) } - def 'Get data node with leaves'() { - given: 'the service returns data node leaves' - def xpath = 'parent-1' - def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) >> [dataNodeWithLeavesNoChildren] - when: 'get request is performed through REST API' - def response = - mvc.perform(get(endpoint).param('xpath', xpath)) - .andReturn().response - then: 'a success response is returned' - response.status == HttpStatus.OK.value() - then: 'the response contains the the datanode in json format' - response.getContentAsString() == '{"parent-1":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}' - and: 'response contains expected leaf and value' - response.contentAsString.contains('"leaf":"value"') - and: 'response contains expected leaf-list and values' - response.contentAsString.contains('"leafList":["leaveListElement1","leaveListElement2"]') - } - - def 'Get data node with #scenario.'() { + def 'Get data nodes [V1] with #scenario.'() { given: 'the service returns data node with #scenario' - def xpath = 'some xPath' + def xpath = 'my/path' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, expectedCpsDataServiceOption) >> [dataNode] when: 'get request is performed through REST API' def response = mvc.perform( @@ -315,177 +267,39 @@ class DataRestControllerSpec extends Specification { .param('xpath', xpath) .param('include-descendants', includeDescendantsOption)) .andReturn().response + then: 'the cps facade is called with the correct parameters' + 1 * mockCpsFacade.getFirstDataNodeByAnchor(dataspaceName, anchorName, xpath, expectedCpsDataServiceOption) >> [mocked:'result'] then: 'a success response is returned' response.status == HttpStatus.OK.value() - and: 'the response contains the root node identifier: #expectedRootidentifier' - response.contentAsString.contains(expectedRootidentifier) - and: 'the response contains child is #expectChildInResponse' - response.contentAsString.contains('"child"') == expectChildInResponse - where: - scenario | dataNode | includeDescendantsOption || expectedCpsDataServiceOption | expectChildInResponse | expectedRootidentifier - 'no descendants by default' | dataNodeWithLeavesNoChildren | '' || OMIT_DESCENDANTS | false | 'parent-1' - 'no descendant explicitly' | dataNodeWithLeavesNoChildren | 'false' || OMIT_DESCENDANTS | false | 'parent-1' - 'with descendants' | dataNodeWithChild | 'true' || INCLUDE_ALL_DESCENDANTS | true | 'parent' - } - - def 'Get all the data trees as json array with root node xPath using V2'() { - given: 'the service returns all data node leaves' - def xpath = '/' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) >> [dataNodeWithLeavesNoChildren, dataNodeWithLeavesNoChildren2] - when: 'V2 of get request is performed through REST API' - def response = - mvc.perform(get(endpoint) - .contentType(MediaType.APPLICATION_JSON) - .param('xpath', xpath)) - .andReturn().response - then: 'a success response is returned' - response.status == HttpStatus.OK.value() - and: 'the response contains the datanode in json array format' - response.getContentAsString() == '[{"parent-1":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}},' + - '{"parent-2":{"leaf":"value"}}]' - and: 'the json array contains expected number of data trees' - def numberOfDataTrees = new JsonSlurper().parseText(response.getContentAsString()).iterator().size() - assert numberOfDataTrees == 2 - } - - def 'Get all the data trees using V2 without Content-Type defaults to json'() { - given: 'the service returns all data node leaves' - def xpath = '/' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) >> [dataNodeWithLeavesNoChildren, dataNodeWithLeavesNoChildren2] - when: 'V2 of get request is performed through REST API without specifying content-type header' - def response = - mvc.perform(get(endpoint) - .param('xpath', xpath)) - .andReturn().response - then: 'a success response is returned' - response.status == HttpStatus.OK.value() - and: 'the response contains the datanode in json array format' - response.getContentAsString() == '[{"parent-1":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}},' + - '{"parent-2":{"leaf":"value"}}]' - } - - def 'Get all the data trees as XML with root node xPath using V2'() { - given: 'the service returns all data node leaves' - def xpath = '/' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) >> [dataNodeWithLeavesNoChildren] - when: 'V2 of get request is performed through REST API with XML content type' - def response = - mvc.perform(get(endpoint).contentType(MediaType.APPLICATION_XML).param('xpath', xpath)) - .andReturn().response - then: 'a success response is returned' - response.status == HttpStatus.OK.value() - and: 'the response contains the datanode in XML format' - response.getContentAsString() == '<parent-1><leaf>value</leaf><leafList>leaveListElement1</leafList><leafList>leaveListElement2</leafList></parent-1>' + and: 'the response contains the facade result in json format' + response.getContentAsString() == '{"mocked":"result"}' + where: 'the following parameters are used' + scenario | includeDescendantsOption || expectedCpsDataServiceOption + 'no descendants (default) ' | '' || OMIT_DESCENDANTS + 'with descendants' | 'true' || INCLUDE_ALL_DESCENDANTS } - def 'Get data node with #scenario using V2.'() { + def 'Get data node with #scenario using V2. output type #scenario.'() { given: 'the service returns data nodes with #scenario' def xpath = 'some xPath' def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, expectedCpsDataServiceOption) >> [dataNode] when: 'V2 of get request is performed through REST API' def response = - mvc.perform( - get(endpoint) - .contentType(MediaType.APPLICATION_JSON) - .param('xpath', xpath) - .param('descendants', includeDescendantsOption)) - .andReturn().response - then: 'a success response is returned' - response.status == HttpStatus.OK.value() - and: 'the response contains the root node identifier: #expectedRootidentifier' - response.contentAsString.contains(expectedRootidentifier) - and: 'the response contains child is #expectChildInResponse' - response.contentAsString.contains('"child"') == expectChildInResponse - where: - scenario | dataNode | includeDescendantsOption || expectedCpsDataServiceOption | expectChildInResponse | expectedRootidentifier - 'no descendants by default' | dataNodeWithLeavesNoChildren | '' || OMIT_DESCENDANTS | false | 'parent-1' - 'no descendant explicitly' | dataNodeWithLeavesNoChildren | '0' || OMIT_DESCENDANTS | false | 'parent-1' - 'with descendants' | dataNodeWithChild | '-1' || INCLUDE_ALL_DESCENDANTS | true | 'parent' - } - - def 'Get data node using v2 api'() { - given: 'the service returns data node' - def xpath = 'some xPath' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/node" - mockCpsDataService.getDataNodes(dataspaceName, anchorName, xpath, { descendantsOption -> { - assert descendantsOption.depth == 2}} as FetchDescendantsOption) >> [dataNodeWithChild] - when: 'get request is performed through REST API' - def response = - mvc.perform( - get(endpoint) - .contentType(MediaType.APPLICATION_JSON) - .param('xpath', xpath) - .param('descendants', '2')) - .andReturn().response - then: 'a success response is returned' - assert response.status == HttpStatus.OK.value() - and: 'the response contains the root node identifier' - assert response.contentAsString.contains('parent') - and: 'the response contains child is true' - assert response.contentAsString.contains('"child"') - } - - def 'Get delta between two anchors'() { - given: 'the service returns a list containing delta reports' - def deltaReports = new DeltaReportBuilder().actionReplace().withXpath('some xpath').withSourceData('some key': 'some value').withTargetData('some key': 'some value').build() - def xpath = 'some xpath' - def endpoint = "$dataNodeBaseEndpointV2/anchors/sourceAnchor/delta" - mockCpsDataService.getDeltaByDataspaceAndAnchors(dataspaceName, 'sourceAnchor', 'targetAnchor', xpath, OMIT_DESCENDANTS) >> [deltaReports] - when: 'get delta request is performed using REST API' - def response = mvc.perform(get(endpoint) - .param('target-anchor-name', 'targetAnchor') - .param('xpath', xpath)) + .contentType(contentType) + .param('xpath', xpath) + .param('descendants', 'all')) .andReturn().response - then: 'expected response code is returned' - assert response.status == HttpStatus.OK.value() - and: 'the response contains expected value' - assert response.contentAsString.contains("[{\"action\":\"replace\",\"xpath\":\"some xpath\",\"sourceData\":{\"some key\":\"some value\"},\"targetData\":{\"some key\":\"some value\"}}]") - } - - def 'Get delta between anchor and JSON payload with multipart file'() { - given: 'sample delta report, xpath, yang model file and json payload' - def deltaReports = new DeltaReportBuilder().actionCreate().withXpath('some xpath').build() - def xpath = 'some xpath' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/delta" - and: 'the service layer returns a list containing delta reports' - mockCpsDataService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, ['filename.yang':'content'], expectedJsonData, INCLUDE_ALL_DESCENDANTS) >> [deltaReports] - when: 'get delta request is performed using REST API' - def response = - mvc.perform(multipart(endpoint) - .file(multipartYangFile) - .param("json", requestBodyJson) - .param('xpath', xpath) - .contentType(MediaType.MULTIPART_FORM_DATA)) - .andReturn().response - then: 'expected response code is returned' - assert response.status == HttpStatus.OK.value() - and: 'the response contains expected value' - assert response.contentAsString.contains("[{\"action\":\"create\",\"xpath\":\"some xpath\"}]") - } - - def 'Get delta between anchor and JSON payload without multipart file'() { - given: 'sample delta report, xpath, and json payload' - def deltaReports = new DeltaReportBuilder().actionRemove().withXpath('some xpath').build() - def xpath = 'some xpath' - def endpoint = "$dataNodeBaseEndpointV2/anchors/$anchorName/delta" - and: 'the service layer returns a list containing delta reports' - mockCpsDataService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, [:], expectedJsonData, INCLUDE_ALL_DESCENDANTS) >> [deltaReports] - when: 'get delta request is performed using REST API' - def response = - mvc.perform(multipart(endpoint) - .param("json", requestBodyJson) - .param('xpath', xpath) - .contentType(MediaType.MULTIPART_FORM_DATA)) - .andReturn().response - then: 'expected response code is returned' + then: 'the cps service facade is called with the correct parameters and returns some data' + 1 * mockCpsFacade.getDataNodesByAnchor(dataspaceName, anchorName, xpath, INCLUDE_ALL_DESCENDANTS) >> [[mocked:'result1'], [mocked:'result2']] + and: 'a success response is returned' assert response.status == HttpStatus.OK.value() - and: 'the response contains expected value' - assert response.contentAsString.contains("[{\"action\":\"remove\",\"xpath\":\"some xpath\"}]") + and: 'the response is in the expected format' + assert response.contentAsString == expectedResult + where: 'the following content types are used' + scenario | contentType || expectedResult + 'XML' | MediaType.APPLICATION_XML || '<mocked>result1</mocked><mocked>result2</mocked>' + 'JSON' | MediaType.APPLICATION_JSON || '[{"mocked":"result1"},{"mocked":"result2"}]' } def 'Update data node leaves: #scenario.'() { @@ -499,7 +313,7 @@ class DataRestControllerSpec extends Specification { .content(requestBody) .param('xpath', inputXpath) ).andReturn().response - then: 'the service method is invoked with expected parameters' + then: 'the cps data service method is invoked with expected parameters' 1 * mockCpsDataService.updateNodeLeaves(dataspaceName, anchorName, xpathServiceParameter, expectedData, null, expectedContentType) and: 'response status indicates success' response.status == HttpStatus.OK.value() @@ -513,7 +327,7 @@ class DataRestControllerSpec extends Specification { 'XML content: some xpath by parent' | '/some/xpath' | MediaType.APPLICATION_XML || '/some/xpath' | requestBodyXml | expectedXmlData | ContentType.XML } - def 'Update data node leaves with observedTimestamp'() { + def 'Update data node leaves with observedTimestamp.'() { given: 'endpoint to update a node leaves ' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" when: 'patch request is performed' @@ -525,7 +339,7 @@ class DataRestControllerSpec extends Specification { .param('xpath', '/') .param('observed-timestamp', observedTimestamp) ).andReturn().response - then: 'the service method is invoked with expected parameters' + then: 'the cps data service method is invoked with expected parameters' expectedApiCount * mockCpsDataService.updateNodeLeaves(dataspaceName, anchorName, '/', expectedJsonData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, ContentType.JSON) and: 'response status indicates success' @@ -536,7 +350,7 @@ class DataRestControllerSpec extends Specification { 'with invalid observed-timestamp' | 'invalid' || 0 | HttpStatus.BAD_REQUEST } - def 'Validate data using Update a node API'() { + def 'Validate data using Update a node API.'() { given: 'endpoint to update a node leaves' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" and: 'dryRunEnabled flag is set to true' @@ -552,7 +366,7 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a 200 OK response is returned' response.status == HttpStatus.OK.value() - then: 'the service was called with correct parameters' + then: 'the cps data service was called with correct parameters' 1 * mockCpsDataService.validateData(dataspaceName, anchorName, '/', requestBodyJson, ContentType.JSON) } @@ -567,7 +381,7 @@ class DataRestControllerSpec extends Specification { .content(requestBody) .param('xpath', inputXpath)) .andReturn().response - then: 'the service method is invoked with expected parameters' + then: 'the cps data service method is invoked with expected parameters' 1 * mockCpsDataService.updateDataNodeAndDescendants(dataspaceName, anchorName, xpathServiceParameter, expectedData, noTimestamp, expectedContentType) and: 'response status indicates success' response.status == HttpStatus.OK.value() @@ -581,7 +395,7 @@ class DataRestControllerSpec extends Specification { 'XML content: some xpath by parent' | '/some/xpath' | MediaType.APPLICATION_XML || '/some/xpath' | requestBodyXml | expectedXmlData | ContentType.XML } - def 'Validate data using Replace data node API'() { + def 'Validate data using Replace data node API.'() { given: 'endpoint to replace node' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/nodes" and: 'dryRunEnabled flag is set to true' @@ -597,7 +411,7 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a 200 OK response is returned' response.status == HttpStatus.OK.value() - then: 'the service was called with correct parameters' + then: 'the cps data service was called with correct parameters' 1 * mockCpsDataService.validateData(dataspaceName, anchorName, '/', requestBodyJson, ContentType.JSON) } @@ -613,7 +427,7 @@ class DataRestControllerSpec extends Specification { .param('xpath', '') .param('observed-timestamp', observedTimestamp)) .andReturn().response - then: 'the service method is invoked with expected parameters' + then: 'the cps data service method is invoked with expected parameters' expectedApiCount * mockCpsDataService.updateDataNodeAndDescendants(dataspaceName, anchorName, '/', expectedJsonData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, ContentType.JSON) and: 'response status indicates success' @@ -635,7 +449,7 @@ class DataRestControllerSpec extends Specification { def response = mvc.perform(putRequestBuilder).andReturn().response then: 'a success response is returned' response.status == expectedHttpStatus.value() - and: 'the java API was called with the correct parameters' + and: 'the cps data service was called with the correct parameters' expectedApiCount * mockCpsDataService.replaceListContent(dataspaceName, anchorName, 'parent xpath', expectedJsonData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, ContentType.JSON) where: @@ -656,7 +470,7 @@ class DataRestControllerSpec extends Specification { def response = mvc.perform(putRequestBuilder).andReturn().response then: 'a success response is returned' response.status == expectedHttpStatus.value() - and: 'the java API was called with the correct parameters' + and: 'the cps data service was called with the correct parameters' expectedApiCount * mockCpsDataService.replaceListContent(dataspaceName, anchorName, 'parent xpath', expectedXmlData, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }, ContentType.XML) where: @@ -666,7 +480,7 @@ class DataRestControllerSpec extends Specification { 'with invalid observed-timestamp' | 'invalid' || 0 | HttpStatus.BAD_REQUEST } - def 'Validate data using Replace list content API'() { + def 'Validate data using Replace list content API.'() { given: 'endpoint to replace list-nodes' def endpoint = "$dataNodeBaseEndpointV1/anchors/$anchorName/list-nodes" and: 'dryRunEnabled flag is set to true' @@ -682,7 +496,7 @@ class DataRestControllerSpec extends Specification { ).andReturn().response then: 'a 200 OK response is returned' response.status == HttpStatus.OK.value() - then: 'the service was called with correct parameters' + then: 'the cps data service was called with correct parameters' 1 * mockCpsDataService.validateData(dataspaceName, anchorName, '/', requestBodyJson, ContentType.JSON) } @@ -695,7 +509,7 @@ class DataRestControllerSpec extends Specification { def response = mvc.perform(deleteRequestBuilder).andReturn().response then: 'a success response is returned' response.status == expectedHttpStatus.value() - and: 'the java API was called with the correct parameters' + and: 'the cps data service was called with the correct parameters' expectedApiCount * mockCpsDataService.deleteListOrListElement(dataspaceName, anchorName, 'list element xpath', { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }) where: @@ -717,7 +531,7 @@ class DataRestControllerSpec extends Specification { def response = mvc.perform(deleteDataNodeRequest).andReturn().response then: 'a successful response is returned' response.status == expectedHttpStatus.value() - and: 'the api is called with the correct parameters' + and: 'the cps data service is called with the correct parameters' expectedApiCount * mockCpsDataService.deleteDataNode(dataspaceName, anchorName, dataNodeXpath, { it == DateTimeUtility.toOffsetDateTime(observedTimestamp) }) where: diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DeltaRestControllerSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DeltaRestControllerSpec.groovy new file mode 100644 index 0000000000..18c0f1369e --- /dev/null +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/DeltaRestControllerSpec.groovy @@ -0,0 +1,129 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.rest.controller + +import com.fasterxml.jackson.databind.ObjectMapper +import org.onap.cps.api.CpsDeltaService +import org.onap.cps.impl.DeltaReportBuilder +import org.onap.cps.utils.JsonObjectMapper +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.beans.factory.annotation.Value +import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest +import org.springframework.http.HttpStatus +import org.springframework.http.MediaType +import org.springframework.mock.web.MockMultipartFile +import org.springframework.test.web.servlet.MockMvc +import org.springframework.web.multipart.MultipartFile +import spock.lang.Shared +import spock.lang.Specification + +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart + +@WebMvcTest(DeltaRestController) +class DeltaRestControllerSpec extends Specification { + + @SpringBean + CpsDeltaService mockCpsDeltaService = Mock() + + @SpringBean + JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + + @Autowired + MockMvc mvc + + @Value('${rest.api.cps-base-path}') + def basePath + + def dataNodeBaseEndpointV2 + def dataspaceName = 'my_dataspace' + def anchorName = 'my_anchor' + + @Shared + def requestBodyJson = '{"some-key":"some-value","categories":[{"books":[{"authors":["Iain M. Banks"]}]}]}' + @Shared + def expectedJsonData = '{"some-key":"some-value","categories":[{"books":[{"authors":["Iain M. Banks"]}]}]}' + @Shared + static MultipartFile multipartYangFile = new MockMultipartFile('file', 'filename.yang', 'text/plain', 'content'.getBytes()) + + def setup() { + dataNodeBaseEndpointV2 = "$basePath/v2/dataspaces/$dataspaceName/anchors/$anchorName/delta" + } + + def 'Get delta between two anchors'() { + given: 'the service returns a list containing delta reports' + def deltaReports = new DeltaReportBuilder().actionReplace().withXpath('some xpath').withSourceData('some key': 'some value').withTargetData('some key': 'some value').build() + def xpath = 'some xpath' + mockCpsDeltaService.getDeltaByDataspaceAndAnchors(dataspaceName, anchorName, 'targetAnchor', xpath, OMIT_DESCENDANTS) >> [deltaReports] + when: 'get delta request is performed using REST API' + def response = + mvc.perform(get(dataNodeBaseEndpointV2) + .param('target-anchor-name', 'targetAnchor') + .param('xpath', xpath)) + .andReturn().response + then: 'expected response code is returned' + assert response.status == HttpStatus.OK.value() + and: 'the response contains expected value' + assert response.contentAsString.contains('[{\"action\":\"replace\",\"xpath\":\"some xpath\",\"sourceData\":{\"some key\":\"some value\"},\"targetData\":{\"some key\":\"some value\"}}]') + } + + def 'Get delta between anchor and JSON payload with multipart file'() { + given: 'sample delta report, xpath, yang model file and json payload' + def deltaReports = new DeltaReportBuilder().actionCreate().withXpath('some xpath').build() + def xpath = 'some xpath' + and: 'the service layer returns a list containing delta reports' + mockCpsDeltaService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, ['filename.yang':'content'], expectedJsonData, INCLUDE_ALL_DESCENDANTS) >> [deltaReports] + when: 'get delta request is performed using REST API' + def response = + mvc.perform(multipart(dataNodeBaseEndpointV2) + .file(multipartYangFile) + .param('json', requestBodyJson) + .param('xpath', xpath) + .contentType(MediaType.MULTIPART_FORM_DATA)) + .andReturn().response + then: 'expected response code is returned' + assert response.status == HttpStatus.OK.value() + and: 'the response contains expected value' + assert response.contentAsString.contains('[{\"action\":\"create\",\"xpath\":\"some xpath\"}]') + } + + def 'Get delta between anchor and JSON payload without multipart file'() { + given: 'sample delta report, xpath, and json payload' + def deltaReports = new DeltaReportBuilder().actionRemove().withXpath('some xpath').build() + def xpath = 'some xpath' + and: 'the service layer returns a list containing delta reports' + mockCpsDeltaService.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, [:], expectedJsonData, INCLUDE_ALL_DESCENDANTS) >> [deltaReports] + when: 'get delta request is performed using REST API' + def response = + mvc.perform(multipart(dataNodeBaseEndpointV2) + .param('json', requestBodyJson) + .param('xpath', xpath) + .contentType(MediaType.MULTIPART_FORM_DATA)) + .andReturn().response + then: 'expected response code is returned' + assert response.status == HttpStatus.OK.value() + and: 'the response contains expected value' + assert response.contentAsString.contains('[{\"action\":\"remove\",\"xpath\":\"some xpath\"}]') + } +} diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/QueryRestControllerSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/QueryRestControllerSpec.groovy index f29654c99f..b49afb4798 100644 --- a/cps-rest/src/test/groovy/org/onap/cps/rest/controller/QueryRestControllerSpec.groovy +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/controller/QueryRestControllerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2021-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. @@ -24,12 +24,9 @@ package org.onap.cps.rest.controller import com.fasterxml.jackson.databind.ObjectMapper -import org.onap.cps.api.CpsAnchorService -import org.onap.cps.api.CpsQueryService +import org.onap.cps.api.CpsFacade import org.onap.cps.api.parameters.PaginationOption -import org.onap.cps.api.model.DataNodeBuilder import org.onap.cps.utils.JsonObjectMapper -import org.onap.cps.utils.PrefixResolver import org.spockframework.spring.SpringBean import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Value @@ -39,216 +36,115 @@ import org.springframework.http.MediaType import org.springframework.test.web.servlet.MockMvc import spock.lang.Specification -import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS +import static org.onap.cps.api.parameters.PaginationOption.NO_PAGINATION import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get @WebMvcTest(QueryRestController) class QueryRestControllerSpec extends Specification { @SpringBean - CpsQueryService mockCpsQueryService = Mock() - - @SpringBean - CpsAnchorService mockCpsAnchorService = Mock() + CpsFacade mockCpsFacade = Mock() @SpringBean JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - @SpringBean - PrefixResolver prefixResolver = Mock() - @Autowired MockMvc mvc @Value('${rest.api.cps-base-path}') def basePath - def dataspaceName = 'my_dataspace' - def anchorName = 'my_anchor' - def cpsPath = 'some cps-path' - def dataNodeEndpointV2 + def dataNodeAsMap = ['prefixedPath':[path:[leaf:'value']]] - def setup() { - dataNodeEndpointV2 = "$basePath/v2/dataspaces/$dataspaceName/anchors/$anchorName/nodes/query" - } - - def 'Query data node by cps path for the given dataspace and anchor with #scenario.'() { - given: 'service method returns a list containing a data node' - def dataNode1 = new DataNodeBuilder().withXpath('/xpath') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - mockCpsQueryService.queryDataNodes(dataspaceName, anchorName, cpsPath, expectedCpsDataServiceOption) >> [dataNode1, dataNode1] - and: 'the query endpoint' - def dataNodeEndpoint = "$basePath/v1/dataspaces/$dataspaceName/anchors/$anchorName/nodes/query" + def 'Query data node (v1) by cps path for the given dataspace and anchor with #scenario.'() { + given: 'the query endpoint' + def dataNodeEndpoint = "$basePath/v1/dataspaces/my_dataspace/anchors/my_anchor/nodes/query" when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpoint) - .param('cps-path', cpsPath) - .param('include-descendants', includeDescendantsOption)) - .andReturn().response + def response = mvc.perform(get(dataNodeEndpoint).param('cps-path', 'my/path').param('include-descendants', includeDescendantsOption)) + .andReturn().response + then: 'the call is delegated to the cps service facade which returns a list containing one data node as a map' + 1 * mockCpsFacade.executeAnchorQuery('my_dataspace', 'my_anchor', 'my/path', expectedCpsDataServiceOption) >> [dataNodeAsMap] then: 'the response contains the the datanode in json format' - response.status == HttpStatus.OK.value() - response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}') + assert response.status == HttpStatus.OK.value() + assert response.getContentAsString() == '[{"prefixedPath":{"path":{"leaf":"value"}}}]' where: 'the following options for include descendants are provided in the request' scenario | includeDescendantsOption || expectedCpsDataServiceOption 'no descendants by default' | '' || OMIT_DESCENDANTS - 'no descendant explicitly' | 'false' || OMIT_DESCENDANTS 'descendants' | 'true' || INCLUDE_ALL_DESCENDANTS } - def 'Query data node v2 API by cps path for the given dataspace and anchor with #scenario and media type JSON'() { - given: 'service method returns a list containing a data node' - def dataNode = new DataNodeBuilder().withXpath('/xpath') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - mockCpsQueryService.queryDataNodes(dataspaceName, anchorName, cpsPath, { descendantsOption -> - assert descendantsOption.depth == expectedDepth - }) >> [dataNode, dataNode] + def 'Query data node (v2) by cps path for given dataspace and anchor with #scenario'() { + given: 'the query endpoint' + def dataNodeEndpointV2 = "$basePath/v2/dataspaces/my_dataspace/anchors/my_anchor/nodes/query" when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpointV2) - .contentType(MediaType.APPLICATION_JSON) - .param('cps-path', cpsPath) - .param('descendants', includeDescendantsOptionString)) + def response = mvc.perform(get(dataNodeEndpointV2).contentType(contentType).param('cps-path', 'my/path') .param('descendants', includeDescendantsOptionString)) .andReturn().response - then: 'the response contains the datanode in the expected JSON format' + then: 'the call is delegated to the cps service facade which returns a list containing one data node as a map' + 1 * mockCpsFacade.executeAnchorQuery('my_dataspace', 'my_anchor', 'my/path', + { descendantsOption -> assert descendantsOption.depth == expectedDepth }) >> [dataNodeAsMap] + and: 'the response contains the datanode in the expected format' assert response.status == HttpStatus.OK.value() - assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}') + assert response.getContentAsString() == expectedOutput where: 'the following options for include descendants are provided in the request' - scenario | includeDescendantsOptionString || expectedDepth - 'direct children' | 'direct' || 1 - 'descendants' | '2' || 2 + scenario | includeDescendantsOptionString | contentType || expectedDepth || expectedOutput + 'direct children JSON' | 'direct' | MediaType.APPLICATION_JSON || 1 || '[{"prefixedPath":{"path":{"leaf":"value"}}}]' + 'descendants JSON' | '2' | MediaType.APPLICATION_JSON || 2 || '[{"prefixedPath":{"path":{"leaf":"value"}}}]' + 'descendants XML' | '2' | MediaType.APPLICATION_XML || 2 || '<prefixedPath><path><leaf>value</leaf></path></prefixedPath>' } - def 'Query data node v2 API by cps path for the given dataspace and anchor with #scenario and media type XML'() { - given: 'service method returns a list containing a data node' - def dataNode = new DataNodeBuilder().withXpath('/xpath') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - mockCpsQueryService.queryDataNodes(dataspaceName, anchorName, cpsPath, { descendantsOption -> - assert descendantsOption.depth == expectedDepth - }) >> [dataNode, dataNode] + def 'Query data node (v2) by cps path for given dataspace and anchor with attribute-axis and #scenario'() { + given: 'the query endpoint' + def dataNodeEndpointV2 = "$basePath/v2/dataspaces/my_dataspace/anchors/my_anchor/nodes/query" when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpointV2) - .contentType(MediaType.APPLICATION_XML) - .param('cps-path', cpsPath) - .param('descendants', includeDescendantsOptionString)) + def response = mvc.perform(get(dataNodeEndpointV2).contentType(contentType).param('cps-path', '/my/path/@myAttribute').param('descendants', '0')) .andReturn().response - then: 'the response contains the datanode in the expected XML format' + then: 'the call is delegated to the cps service facade which returns a list containing two attributes as maps' + 1 * mockCpsFacade.executeAnchorQuery('my_dataspace', 'my_anchor', '/my/path/@myAttribute', OMIT_DESCENDANTS) >> [['myAttribute':'value1'], ['myAttribute':'value2']] + and: 'the response contains the datanode in the expected format' assert response.status == HttpStatus.OK.value() - assert response.getContentAsString().contains('<xpath><leaf>value</leaf><leafList>leaveListElement1</leafList><leafList>leaveListElement2</leafList></xpath>') - where: 'the following options for include descendants are provided in the request' - scenario | includeDescendantsOptionString || expectedDepth - 'direct children' | 'direct' || 1 - 'descendants' | '2' || 2 - } - - def 'Query data node by cps path for the given dataspace across all anchors with #scenario.'() { - given: 'service method returns a list containing a data node from different anchors' - def dataNode1 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - def dataNode2 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor_2') - .withLeaves([leaf: 'value', leafList: ['leaveListElement3', 'leaveListElement4']]).build() - and: 'second data node for the same anchor' - def dataNode3 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor_2') - .withLeaves([leaf: 'value', leafList: ['leaveListElement5', 'leaveListElement6']]).build() - and: 'the query endpoint' - def dataspaceName = 'my_dataspace' - def cpsPath = 'some/cps/path' - def dataNodeEndpoint = "$basePath/v2/dataspaces/$dataspaceName/nodes/query" - mockCpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, - expectedCpsDataServiceOption, PaginationOption.NO_PAGINATION) >> [dataNode1, dataNode2, dataNode3] - mockCpsQueryService.countAnchorsForDataspaceAndCpsPath(dataspaceName, cpsPath) >> 2 - when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpoint) - .param('cps-path', cpsPath) - .param('descendants', includeDescendantsOptionString)) - .andReturn().response - then: 'the response contains the the datanode in json format' - response.status == HttpStatus.OK.value() - response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}') - response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement3","leaveListElement4"]}}') - response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement5","leaveListElement6"]}}') - where: 'the following options for include descendants are provided in the request' - scenario | includeDescendantsOptionString || expectedCpsDataServiceOption - 'no descendants by default' | '' || OMIT_DESCENDANTS - 'no descendant explicitly' | 'none' || OMIT_DESCENDANTS - 'descendants' | 'all' || INCLUDE_ALL_DESCENDANTS - 'direct children' | 'direct' || DIRECT_CHILDREN_ONLY + assert response.getContentAsString() == expectedOutput + where: 'the following options for content type are provided in the request' + scenario | contentType || expectedOutput + 'JSON' | MediaType.APPLICATION_JSON || '[{"myAttribute":"value1"},{"myAttribute":"value2"}]' + 'XML' | MediaType.APPLICATION_XML || '<myAttribute>value1</myAttribute><myAttribute>value2</myAttribute>' } - def 'Query data node by cps path for the given dataspace across all anchors with pagination #scenario.'() { - given: 'service method returns a list containing a data node from different anchors' - def dataNode1 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - def dataNode2 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor_2') - .withLeaves([leaf: 'value', leafList: ['leaveListElement3', 'leaveListElement4']]).build() - and: 'the query endpoint' - def dataspaceName = 'my_dataspace' - def cpsPath = 'some/cps/path' - def dataNodeEndpoint = "$basePath/v2/dataspaces/$dataspaceName/nodes/query" - mockCpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, - INCLUDE_ALL_DESCENDANTS, new PaginationOption(pageIndex,pageSize)) >> [dataNode1, dataNode2] - mockCpsQueryService.countAnchorsForDataspaceAndCpsPath(dataspaceName, cpsPath) >> totalAnchors + def 'Query data node by cps path for given dataspace across all anchors'() { + given: 'the query endpoint' + def dataNodeEndpoint = "$basePath/v2/dataspaces/my_dataspace/nodes/query" + and: 'the cps service facade will say there are 123 pages ' + mockCpsFacade.countAnchorsInDataspaceQuery('my_dataspace', 'my/path', new PaginationOption(2,5) ) >> 123 when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpoint) - .param('cps-path', cpsPath) - .param('descendants', "all") - .param('pageIndex', String.valueOf(pageIndex)) - .param('pageSize', String.valueOf(pageSize))) + def response = mvc.perform( + get(dataNodeEndpoint).param('cps-path', 'my/path').param('pageIndex', String.valueOf(2)).param('pageSize', String.valueOf(5))) .andReturn().response - then: 'the response contains the the datanode in json format' + then: 'the call is delegated to the cps service facade which returns a list containing one data node as a map' + 1 * mockCpsFacade.executeDataspaceQuery('my_dataspace', 'my/path', OMIT_DESCENDANTS, new PaginationOption(2,5)) >> [dataNodeAsMap] + then: 'the response is OK and contains the the datanode in json format' assert response.status == HttpStatus.OK.value() - assert Integer.valueOf(response.getHeaderValue("total-pages")) == expectedTotalPageSize - assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}') - assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement3","leaveListElement4"]}}') - where: 'the following options for include descendants are provided in the request' - scenario | pageIndex | pageSize | totalAnchors || expectedTotalPageSize - '1st page with all anchors' | 1 | 3 | 3 || 1 - '1st page with less anchors' | 1 | 2 | 3 || 2 + assert response.getContentAsString() == '[{"prefixedPath":{"path":{"leaf":"value"}}}]' + and: 'the header indicates the correct number of pages' + assert response.getHeaderValue('total-pages') == '123' } - def 'Query data node across all anchors with pagination option with #scenario.'() { - given: 'service method returns a list containing a data node from different anchors' - def dataNode1 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor') - .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build() - def dataNode2 = new DataNodeBuilder().withXpath('/xpath') - .withAnchor('my_anchor_2') - .withLeaves([leaf: 'value', leafList: ['leaveListElement3', 'leaveListElement4']]).build() - and: 'the query endpoint' - def dataspaceName = 'my_dataspace' - def cpsPath = 'some/cps/path' - def dataNodeEndpoint = "$basePath/v2/dataspaces/$dataspaceName/nodes/query" - mockCpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, - INCLUDE_ALL_DESCENDANTS, PaginationOption.NO_PAGINATION) >> [dataNode1, dataNode2] - mockCpsQueryService.countAnchorsForDataspaceAndCpsPath(dataspaceName, cpsPath) >> 2 + def 'Query data node across all anchors with pagination option with #scenario i.e. no pagination.'() { + given: 'the query endpoint' + def dataNodeEndpoint = "$basePath/v2/dataspaces/my_dataspace/nodes/query" + and: 'the cps service facade will say there is 1 page ' + mockCpsFacade.countAnchorsInDataspaceQuery('my_dataspace', 'my/path', NO_PAGINATION ) >> 1 when: 'query data nodes API is invoked' - def response = - mvc.perform( - get(dataNodeEndpoint) - .param('cps-path', cpsPath) - .param('descendants', "all") - .param(parameterName, "1")) - .andReturn().response - then: 'the response contains the the datanode in json format' + def response = mvc.perform(get(dataNodeEndpoint).param('cps-path', 'my/path').param(parameterName, '1')) + .andReturn().response + then: 'the call is delegated to the cps service facade which returns a list containing one data node as a map' + 1 * mockCpsFacade.executeDataspaceQuery('my_dataspace', 'my/path', OMIT_DESCENDANTS, PaginationOption.NO_PAGINATION) >> [dataNodeAsMap] + then: 'the response is OK and contains the datanode in json format' assert response.status == HttpStatus.OK.value() - assert Integer.valueOf(response.getHeaderValue("total-pages")) == 1 - assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}') - assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement3","leaveListElement4"]}}') - where: + assert response.getContentAsString() == '[{"prefixedPath":{"path":{"leaf":"value"}}}]' + and: 'the header indicates the correct number of pages' + assert response.getHeaderValue('total-pages') == '1' + where: 'only the following rest parameter is used' scenario | parameterName 'only page size' | 'pageSize' 'only page index' | 'pageIndex' diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/exceptions/CpsRestExceptionHandlerSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/exceptions/CpsRestExceptionHandlerSpec.groovy index f0fc4cca62..1d58197dcc 100644 --- a/cps-rest/src/test/groovy/org/onap/cps/rest/exceptions/CpsRestExceptionHandlerSpec.groovy +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/exceptions/CpsRestExceptionHandlerSpec.groovy @@ -1,9 +1,9 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2021-2023 Nordix Foundation + * Modifications Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2021 Bell Canada. - * Modifications Copyright (C) 2022 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * Modifications Copyright (C) 2022 Deutsche Telekom AG * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,22 +26,25 @@ package org.onap.cps.rest.exceptions import com.fasterxml.jackson.databind.ObjectMapper import groovy.json.JsonSlurper -import org.onap.cps.api.CpsDataspaceService import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataService +import org.onap.cps.api.CpsDataspaceService +import org.onap.cps.api.CpsDeltaService +import org.onap.cps.api.CpsFacade import org.onap.cps.api.CpsModuleService +import org.onap.cps.api.CpsNotificationService import org.onap.cps.api.CpsQueryService -import org.onap.cps.rest.controller.CpsRestInputMapper import org.onap.cps.api.exceptions.AlreadyDefinedException import org.onap.cps.api.exceptions.CpsException import org.onap.cps.api.exceptions.CpsPathException import org.onap.cps.api.exceptions.DataInUseException import org.onap.cps.api.exceptions.DataNodeNotFoundException import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.exceptions.DataspaceInUseException import org.onap.cps.api.exceptions.ModelValidationException import org.onap.cps.api.exceptions.NotFoundInDataspaceException import org.onap.cps.api.exceptions.SchemaSetInUseException -import org.onap.cps.api.exceptions.DataspaceInUseException +import org.onap.cps.rest.controller.CpsRestInputMapper import org.onap.cps.utils.JsonObjectMapper import org.onap.cps.utils.PrefixResolver import org.spockframework.spring.SpringBean @@ -64,6 +67,9 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder class CpsRestExceptionHandlerSpec extends Specification { @SpringBean + CpsFacade mockCpsFacade = Stub() + + @SpringBean CpsDataspaceService mockCpsAdminService = Stub() @SpringBean @@ -85,7 +91,13 @@ class CpsRestExceptionHandlerSpec extends Specification { CpsRestInputMapper cpsRestInputMapper = Stub() @SpringBean - PrefixResolver prefixResolver = Mock() + PrefixResolver prefixResolver = Stub() + + @SpringBean + CpsNotificationService mockCpsNotificationService = Stub() + + @SpringBean + CpsDeltaService cpsDeltaService = Stub() @Autowired MockMvc mvc diff --git a/cps-ri/pom.xml b/cps-ri/pom.xml index 2492cb837e..ae1dc89428 100644 --- a/cps-ri/pom.xml +++ b/cps-ri/pom.xml @@ -3,7 +3,7 @@ ============LICENSE_START=======================================================
Copyright (C) 2020-2021 Pantheon.tech
Modifications Copyright (C) 2020-2021 Bell Canada
- Modifications Copyright (C) 2020-2024 Nordix Foundation
+ Modifications Copyright (C) 2020-2025 Nordix Foundation
================================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -26,14 +26,14 @@ <parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.6.0-SNAPSHOT</version>
+ <version>3.6.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
<artifactId>cps-ri</artifactId>
<properties>
- <minimum-coverage>0.29</minimum-coverage>
+ <minimum-coverage>0.31</minimum-coverage>
<!-- Additional coverage is provided by integration-test module -->
</properties>
@@ -56,10 +56,6 @@ <artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
- <groupId>org.springframework.retry</groupId>
- <artifactId>spring-retry</artifactId>
- </dependency>
- <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
</dependency>
diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java index 494d6919da..588a639ab8 100755 --- a/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation. + * Copyright (C) 2020-2025 Nordix Foundation. * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2022 TechMahindra Ltd. @@ -91,10 +91,10 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic @Override public void createAnchor(final String dataspaceName, final String schemaSetName, final String anchorName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final var schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - final var anchorEntity = AnchorEntity.builder() + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = schemaSetRepository + .getByDataspaceAndName(dataspaceEntity, schemaSetName); + final AnchorEntity anchorEntity = AnchorEntity.builder() .name(anchorName) .dataspace(dataspaceEntity) .schemaSet(schemaSetEntity) @@ -114,7 +114,7 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic @Override public Collection<Anchor> getAnchors(final String dataspaceName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); final Collection<AnchorEntity> anchorEntities = anchorRepository.findAllByDataspace(dataspaceEntity); return anchorEntities.stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); } @@ -154,14 +154,14 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic @Transactional @Override public void deleteAnchor(final String dataspaceName, final String anchorName) { - final var anchorEntity = getAnchorEntity(dataspaceName, anchorName); + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); anchorRepository.delete(anchorEntity); } @Transactional @Override public void deleteAnchors(final String dataspaceName, final Collection<String> anchorNames) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); anchorRepository.deleteAllByDataspaceAndNameIn(dataspaceEntity, anchorNames); } @@ -178,7 +178,7 @@ public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceServic } private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); } diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java index c43c8e2999..472da34833 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. @@ -23,6 +23,7 @@ package org.onap.cps.ri; +import static org.onap.cps.api.CpsQueryService.NO_LIMIT; import static org.onap.cps.api.parameters.PaginationOption.NO_PAGINATION; import com.google.common.collect.ImmutableSet; @@ -50,12 +51,12 @@ import org.onap.cps.api.exceptions.CpsPathException; import org.onap.cps.api.exceptions.DataNodeNotFoundException; import org.onap.cps.api.exceptions.DataNodeNotFoundExceptionBatch; import org.onap.cps.api.model.DataNode; -import org.onap.cps.api.model.DataNodeBuilder; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.api.parameters.PaginationOption; import org.onap.cps.cpspath.parser.CpsPathQuery; import org.onap.cps.cpspath.parser.CpsPathUtil; import org.onap.cps.cpspath.parser.PathParsingException; +import org.onap.cps.impl.DataNodeBuilder; import org.onap.cps.ri.models.AnchorEntity; import org.onap.cps.ri.models.DataspaceEntity; import org.onap.cps.ri.models.FragmentEntity; @@ -222,14 +223,36 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService description = "Time taken to query data nodes") public List<DataNode> queryDataNodes(final String dataspaceName, final String anchorName, final String cpsPath, final FetchDescendantsOption fetchDescendantsOption) { + return queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption, NO_LIMIT); + } + + @Override + public List<DataNode> queryDataNodes(final String dataspaceName, + final String anchorName, + final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption, + final int queryResultLimit) { final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); final CpsPathQuery cpsPathQuery = getCpsPathQuery(cpsPath); final Collection<FragmentEntity> fragmentEntities = - fragmentRepository.findByAnchorAndCpsPath(anchorEntity, cpsPathQuery); + fragmentRepository.findByAnchorAndCpsPath(anchorEntity, cpsPathQuery, queryResultLimit); return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); } @Override + public <T> Set<T> queryDataLeaf(final String dataspaceName, final String anchorName, final String cpsPath, + final int queryResultLimit, final Class<T> targetClass) { + final CpsPathQuery cpsPathQuery = getCpsPathQuery(cpsPath); + if (!cpsPathQuery.hasAttributeAxis()) { + throw new IllegalArgumentException( + "Only Cps Path Queries with attribute-axis are supported by queryDataLeaf"); + } + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + return fragmentRepository.findAttributeValuesByAnchorAndCpsPath(anchorEntity, cpsPathQuery, + cpsPathQuery.getAttributeAxisAttributeName(), queryResultLimit, targetClass); + } + + @Override @Timed(value = "cps.data.persistence.service.datanode.query.anchors", description = "Time taken to query data nodes across all anchors or list of anchors") public List<DataNode> queryDataNodesAcrossAnchors(final String dataspaceName, final String cpsPath, diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java index cf9fb021a6..412c6f9c0b 100755 --- a/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java @@ -24,6 +24,7 @@ package org.onap.cps.ri; import static com.google.common.base.Preconditions.checkNotNull; +import static org.opendaylight.yangtools.yang.common.YangConstants.RFC6020_YANG_FILE_EXTENSION; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableSet; @@ -36,7 +37,6 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -70,10 +70,6 @@ import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; import org.opendaylight.yangtools.yang.parser.api.YangSyntaxErrorException; import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangModelDependencyInfo; import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.retry.RetryContext; -import org.springframework.retry.annotation.Backoff; -import org.springframework.retry.annotation.Retryable; -import org.springframework.retry.support.RetrySynchronizationManager; import org.springframework.stereotype.Component; @Slf4j @@ -107,10 +103,9 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ @Override public Collection<ModuleReference> getYangResourceModuleReferences(final String dataspaceName) { - final Set<YangResourceModuleReference> yangResourceModuleReferenceList = + final Collection<YangResourceModuleReference> yangResourceModuleReferences = yangResourceRepository.findAllModuleReferencesByDataspace(dataspaceName); - return yangResourceModuleReferenceList.stream().map(CpsModulePersistenceServiceImpl::toModuleReference) - .collect(Collectors.toList()); + return yangResourceModuleReferences.stream().map(CpsModulePersistenceServiceImpl::toModuleReference).toList(); } @Override @@ -154,23 +149,24 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ @Override @Transactional - // A retry is made to store the schema set if it fails because of duplicated yang resource exception that - // can occur in case of specific concurrent requests. - @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = - @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) - public void storeSchemaSet(final String dataspaceName, final String schemaSetName, - final Map<String, String> moduleReferenceNameToContentMap) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final Set<YangResourceEntity> yangResourceEntities = synchronizeYangResources(moduleReferenceNameToContentMap); - final SchemaSetEntity schemaSetEntity = new SchemaSetEntity(); - schemaSetEntity.setName(schemaSetName); - schemaSetEntity.setDataspace(dataspaceEntity); - schemaSetEntity.setYangResources(yangResourceEntities); - try { - schemaSetRepository.save(schemaSetEntity); - } catch (final DataIntegrityViolationException e) { - throw AlreadyDefinedException.forSchemaSet(schemaSetName, dataspaceName, e); - } + @Timed(value = "cps.module.persistence.schemaset.create", + description = "Time taken to store a schemaset (list of module references)") + public void createSchemaSet(final String dataspaceName, final String schemaSetName, + final Map<String, String> yangResourceContentPerName) { + final Set<YangResourceEntity> yangResourceEntities = synchronizeYangResources(yangResourceContentPerName); + createAndSaveSchemaSetEntity(dataspaceName, schemaSetName, yangResourceEntities); + } + + @Override + @Transactional + @Timed(value = "cps.module.persistence.schemaset.createFromNewAndExistingModules", + description = "Time taken to store a schemaset (from new and existing)") + public void createSchemaSetFromNewAndExistingModules(final String dataspaceName, final String schemaSetName, + final Map<String, String> newYangResourceContentPerName, + final Collection<ModuleReference> allModuleReferences) { + synchronizeYangResources(newYangResourceContentPerName); + final Set<YangResourceEntity> allYangResourceEntities = getYangResourceEntities(allModuleReferences); + createAndSaveSchemaSetEntity(dataspaceName, schemaSetName, allYangResourceEntities); } @Override @@ -189,26 +185,6 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ @Override @Transactional - // A retry is made to store the schema set if it fails because of duplicated yang resource exception that - // can occur in case of specific concurrent requests. - @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = - @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) - @Timed(value = "cps.module.persistence.schemaset.store", - description = "Time taken to store a schemaset (list of module references") - public void storeSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map<String, String> newModuleNameToContentMap, - final Collection<ModuleReference> allModuleReferences) { - storeSchemaSet(dataspaceName, schemaSetName, newModuleNameToContentMap); - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - final List<Integer> allYangResourceIds = - yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); - yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntity.getId(), allYangResourceIds); - } - - @Override - @Transactional public void deleteSchemaSet(final String dataspaceName, final String schemaSetName) { final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); final SchemaSetEntity schemaSetEntity = @@ -225,20 +201,23 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ @Override @Transactional - public void updateSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map<String, String> newModuleNameToContentMap, - final Collection<ModuleReference> allModuleReferences) { + public void updateSchemaSetFromNewAndExistingModules(final String dataspaceName, final String schemaSetName, + final Map<String, String> newYangResourcesPerName, + final Collection<ModuleReference> allModuleReferences) { final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); final SchemaSetEntity schemaSetEntity = schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - storeAndLinkNewModules(newModuleNameToContentMap, schemaSetEntity); - updateAllModuleReferences(allModuleReferences, schemaSetEntity.getId()); + synchronizeYangResources(newYangResourcesPerName); + final Set<YangResourceEntity> allYangResourceEntities = getYangResourceEntities(allModuleReferences); + schemaSetEntity.setYangResources(allYangResourceEntities); + schemaSetRepository.save(schemaSetEntity); } @Override @Transactional - public void deleteAllUnusedYangModuleData() { - schemaSetRepository.deleteOrphanedSchemaSets(); + public void deleteAllUnusedYangModuleData(final String dataspaceName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + schemaSetRepository.deleteOrphanedSchemaSets(dataspaceEntity.getId()); yangResourceRepository.deleteOrphanedYangResources(); } @@ -248,56 +227,66 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ return moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck); } - private Set<YangResourceEntity> synchronizeYangResources( - final Map<String, String> moduleReferenceNameToContentMap) { - final Map<String, YangResourceEntity> checksumToEntityMap = moduleReferenceNameToContentMap.entrySet().stream() - .map(entry -> { - final String checksum = DigestUtils.sha256Hex(entry.getValue().getBytes(StandardCharsets.UTF_8)); - final Map<String, String> moduleNameAndRevisionMap = createModuleNameAndRevisionMap(entry.getKey(), - entry.getValue()); - final YangResourceEntity yangResourceEntity = new YangResourceEntity(); - yangResourceEntity.setFileName(entry.getKey()); - yangResourceEntity.setContent(entry.getValue()); - yangResourceEntity.setModuleName(moduleNameAndRevisionMap.get("moduleName")); - yangResourceEntity.setRevision(moduleNameAndRevisionMap.get("revision")); - yangResourceEntity.setChecksum(checksum); - return yangResourceEntity; - }) - .collect(Collectors.toMap( - YangResourceEntity::getChecksum, - entity -> entity - )); + private Set<YangResourceEntity> synchronizeYangResources(final Map<String, String> yangResourceContentPerName) { + final Map<String, YangResourceEntity> yangResourceEntitiesPerChecksum = + getYangResourceEntityPerChecksum(yangResourceContentPerName); final List<YangResourceEntity> existingYangResourceEntities = - yangResourceRepository.findAllByChecksumIn(checksumToEntityMap.keySet()); - existingYangResourceEntities.forEach(yangFile -> checksumToEntityMap.remove(yangFile.getChecksum())); + yangResourceRepository.findAllByChecksumIn(yangResourceEntitiesPerChecksum.keySet()); + + existingYangResourceEntities.forEach(exist -> yangResourceEntitiesPerChecksum.remove(exist.getChecksum())); + final Collection<YangResourceEntity> newYangResourceEntities = yangResourceEntitiesPerChecksum.values(); - final Collection<YangResourceEntity> newYangResourceEntities = checksumToEntityMap.values(); if (!newYangResourceEntities.isEmpty()) { try { yangResourceRepository.saveAll(newYangResourceEntities); } catch (final DataIntegrityViolationException dataIntegrityViolationException) { - // Throw a CPS duplicated Yang resource exception if the cause of the error is a yang checksum - // database constraint violation. - // If it is not, then throw the original exception - final Optional<DuplicatedYangResourceException> convertedException = - convertToDuplicatedYangResourceException( - dataIntegrityViolationException, newYangResourceEntities); - convertedException.ifPresent( - e -> { - final RetryContext retryContext = RetrySynchronizationManager.getContext(); - int retryCount = retryContext == null ? 0 : retryContext.getRetryCount(); - log.warn("Cannot persist duplicated yang resource. System will attempt this method " - + "up to 5 times. Current retry count : {}", ++retryCount, e); - }); - throw convertedException.isPresent() ? convertedException.get() : dataIntegrityViolationException; + convertExceptionIfNeeded(dataIntegrityViolationException, newYangResourceEntities); } } + // return ALL yang resourceEntities return ImmutableSet.<YangResourceEntity>builder() - .addAll(existingYangResourceEntities) - .addAll(newYangResourceEntities) - .build(); + .addAll(existingYangResourceEntities) + .addAll(newYangResourceEntities) + .build(); + } + + private static Map<String, YangResourceEntity> getYangResourceEntityPerChecksum( + final Map<String, String> yangResourceContentPerName) { + return yangResourceContentPerName.entrySet().stream().map(entry -> { + final String checksum = DigestUtils.sha256Hex(entry.getValue().getBytes(StandardCharsets.UTF_8)); + final Map<String, String> moduleNameAndRevisionMap = createModuleNameAndRevisionMap(entry.getKey(), + entry.getValue()); + final YangResourceEntity yangResourceEntity = new YangResourceEntity(); + yangResourceEntity.setContent(entry.getValue()); + final String moduleName = moduleNameAndRevisionMap.get("moduleName"); + final String revision = moduleNameAndRevisionMap.get("revision"); + yangResourceEntity.setModuleName(moduleName); + yangResourceEntity.setRevision(revision); + yangResourceEntity.setFileName(moduleName + "@" + revision + RFC6020_YANG_FILE_EXTENSION); + yangResourceEntity.setChecksum(checksum); + return yangResourceEntity; + }) + .collect(Collectors.toMap( + YangResourceEntity::getChecksum, + entity -> entity + )); + } + + private void createAndSaveSchemaSetEntity(final String dataspaceName, + final String schemaSetName, + final Set<YangResourceEntity> yangResourceEntities) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = new SchemaSetEntity(); + schemaSetEntity.setName(schemaSetName); + schemaSetEntity.setDataspace(dataspaceEntity); + schemaSetEntity.setYangResources(yangResourceEntities); + try { + schemaSetRepository.save(schemaSetEntity); + } catch (final DataIntegrityViolationException e) { + throw AlreadyDefinedException.forSchemaSet(schemaSetName, dataspaceName, e); + } } private static Map<String, String> createModuleNameAndRevisionMap(final String sourceName, final String source) { @@ -342,6 +331,14 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ return RevisionSourceIdentifier.create(sourceName); } + private void convertExceptionIfNeeded(final DataIntegrityViolationException dataIntegrityViolationException, + final Collection<YangResourceEntity> newYangResourceEntities) { + final Optional<DuplicatedYangResourceException> convertedException = + convertToDuplicatedYangResourceException( + dataIntegrityViolationException, newYangResourceEntities); + throw convertedException.isPresent() ? convertedException.get() : dataIntegrityViolationException; + } + /** * Convert the specified data integrity violation exception into a CPS duplicated Yang resource exception * if the cause of the error is a yang checksum database constraint violation. @@ -391,6 +388,13 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ return "no checksum found"; } + private Set<YangResourceEntity> getYangResourceEntities(final Collection<ModuleReference> moduleReferences) { + return moduleReferences.stream().map(moduleReference -> + yangResourceRepository + .findByModuleNameAndRevision(moduleReference.getModuleName(), moduleReference.getRevision())) + .collect(Collectors.toSet()); + } + private static ModuleReference toModuleReference( final YangResourceModuleReference yangResourceModuleReference) { return ModuleReference.builder() @@ -411,20 +415,4 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ .dataspaceName(schemaSetEntity.getDataspace().getName()).build(); } - private void storeAndLinkNewModules(final Map<String, String> newModuleNameToContentMap, - final SchemaSetEntity schemaSetEntity) { - final Set<YangResourceEntity> yangResourceEntities - = new HashSet<>(synchronizeYangResources(newModuleNameToContentMap)); - schemaSetEntity.setYangResources(yangResourceEntities); - schemaSetRepository.save(schemaSetEntity); - } - - private void updateAllModuleReferences(final Collection<ModuleReference> allModuleReferences, - final Integer schemaSetEntityId) { - yangResourceRepository.deleteSchemaSetYangResourceForSchemaSetId(schemaSetEntityId); - final List<Integer> allYangResourceIds = - yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); - yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntityId, allYangResourceIds); - } - } diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java index 87d7697df4..75853319da 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation. + * Copyright (C) 2023 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java index 6b95213a13..decd864611 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation. + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -52,8 +52,7 @@ public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepositor return proxiedFragmentEntities; } - final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream() - .map(FragmentEntity::getId).collect(Collectors.toList()); + final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream().map(FragmentEntity::getId).toList(); final Map<Long, AnchorEntity> anchorEntityPerId = proxiedFragmentEntities.stream() .map(FragmentEntity::getAnchor) @@ -69,19 +68,20 @@ public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepositor final Collection<Long> fragmentEntityIds, final Map<Long, AnchorEntity> anchorEntityPerId, final int maxDepth) { - final String sql - = "WITH RECURSIVE parent_search AS (" - + " SELECT id, 0 AS depth " - + " FROM fragment " - + " WHERE id = ANY (?) " - + " UNION " - + " SELECT child.id, depth + 1 " - + " FROM fragment child INNER JOIN parent_search parent ON child.parent_id = parent.id" - + " WHERE depth < ?" - + ") " - + "SELECT fragment.id, anchor_id AS anchorId, xpath, parent_id AS parentId, " - + " CAST(attributes AS TEXT) AS attributes " - + "FROM fragment INNER JOIN parent_search ON fragment.id = parent_search.id"; + final String sql = """ + WITH RECURSIVE fragment_hierarchy AS ( + SELECT id, anchor_id, xpath, parent_id, attributes, 0 AS depth + FROM fragment + WHERE id = ANY(?) + UNION + SELECT child.id, child.anchor_id, child.xpath, child.parent_id, child.attributes, depth + 1 + FROM fragment child + INNER JOIN fragment_hierarchy parent ON child.parent_id = parent.id + WHERE depth < ? + ) + SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId, attributes + FROM fragment_hierarchy; + """; final PreparedStatementSetter preparedStatementSetter = preparedStatement -> { final Connection connection = preparedStatement.getConnection(); diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java index 3f3ca79900..3b88748545 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -51,21 +51,33 @@ public class FragmentQueryBuilder { private EntityManager entityManager; /** - * Create a sql query to retrieve by anchor(id) and cps path. + * Create a sql query to retrieve by anchor(id) and cps path with an optional queryResultLimit on results. * * @param anchorEntity the anchor * @param cpsPathQuery the cps path query to be transformed into a sql query + * @param queryResultLimit queryResultLimit number of returned entities + * (if the queryResultLimit is less than 1 the method returns all related entities) + * * @return a executable query object */ - public Query getQueryForAnchorAndCpsPath(final AnchorEntity anchorEntity, final CpsPathQuery cpsPathQuery) { + public Query getQueryForAnchorAndCpsPath(final AnchorEntity anchorEntity, + final CpsPathQuery cpsPathQuery, + final int queryResultLimit) { final StringBuilder sqlStringBuilder = new StringBuilder(); final Map<String, Object> queryParameters = new HashMap<>(); addSearchPrefix(cpsPathQuery, sqlStringBuilder); addWhereClauseForAnchor(anchorEntity, sqlStringBuilder, queryParameters); + if (cpsPathQuery.hasAttributeAxis() && !cpsPathQuery.hasAncestorAxis()) { + sqlStringBuilder.append(" AND jsonb_exists(fragment.attributes, :attributeName)"); + } addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, false); addSearchSuffix(cpsPathQuery, sqlStringBuilder, queryParameters); - + addLimitClause(sqlStringBuilder, queryParameters, queryResultLimit); + if (cpsPathQuery.hasAttributeAxis()) { + queryParameters.put("attributeName", cpsPathQuery.getAttributeAxisAttributeName()); + return getQuery(sqlStringBuilder.toString(), queryParameters, String.class); + } return getQuery(sqlStringBuilder.toString(), queryParameters, FragmentEntity.class); } @@ -118,7 +130,7 @@ public class FragmentQueryBuilder { private Query getQuery(final String sql, final Map<String, Object> queryParameters, final Class<?> returnType) { final Query query = entityManager.createNativeQuery(sql, returnType); - setQueryParameters(query, queryParameters); + queryParameters.forEach(query::setParameter); return query; } @@ -219,6 +231,15 @@ public class FragmentQueryBuilder { } } + private static void addLimitClause(final StringBuilder sqlStringBuilder, + final Map<String, Object> queryParameters, + final int queryResultLimit) { + if (queryResultLimit > 0) { + sqlStringBuilder.append(" LIMIT :queryResultLimit"); + queryParameters.put("queryResultLimit", queryResultLimit); + } + } + private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) { try { return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue()); @@ -297,7 +318,10 @@ public class FragmentQueryBuilder { WHERE parentFragment.id IN ( SELECT parent_id FROM fragment"""); } else { - sqlStringBuilder.append("SELECT fragment.* FROM fragment"); + final String fieldsToSelect = cpsPathQuery.hasAttributeAxis() + ? "DISTINCT (attributes -> :attributeName)" + : "fragment.*"; + sqlStringBuilder.append("SELECT ").append(fieldsToSelect).append(" FROM fragment"); } } @@ -312,9 +336,14 @@ public class FragmentQueryBuilder { FROM fragment JOIN ancestors ON ancestors.parent_id = fragment.id ) - SELECT * FROM ancestors - WHERE"""); - + """); + if (cpsPathQuery.hasAttributeAxis()) { + sqlStringBuilder.append(""" + SELECT DISTINCT (attributes -> :attributeName) FROM ancestors WHERE + jsonb_exists(ancestors.attributes, :attributeName) AND"""); + } else { + sqlStringBuilder.append("SELECT * FROM ancestors WHERE"); + } final String ancestorPath = DESCENDANT_PATH + cpsPathQuery.getAncestorSchemaNodeIdentifier(); final CpsPathQuery ancestorCpsPathQuery = CpsPathUtil.getCpsPathQuery(ancestorPath); addAncestorNodeSearchCondition(ancestorCpsPathQuery, sqlStringBuilder, queryParameters); @@ -338,10 +367,4 @@ public class FragmentQueryBuilder { return stringSubstitutor.replace(template); } - private static void setQueryParameters(final Query query, final Map<String, Object> queryParameters) { - for (final Map.Entry<String, Object> queryParameter : queryParameters.entrySet()) { - query.setParameter(queryParameter.getKey(), queryParameter.getValue()); - } - } - } diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java index 9c1929eaf7..a24b280bf8 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java @@ -1,6 +1,6 @@ /*- * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -22,6 +22,7 @@ package org.onap.cps.ri.repository; import java.util.List; +import java.util.Set; import org.onap.cps.api.parameters.PaginationOption; import org.onap.cps.cpspath.parser.CpsPathQuery; import org.onap.cps.ri.models.AnchorEntity; @@ -29,7 +30,12 @@ import org.onap.cps.ri.models.DataspaceEntity; import org.onap.cps.ri.models.FragmentEntity; public interface FragmentRepositoryCpsPathQuery { - List<FragmentEntity> findByAnchorAndCpsPath(AnchorEntity anchorEntity, CpsPathQuery cpsPathQuery); + + List<FragmentEntity> findByAnchorAndCpsPath(AnchorEntity anchorEntity, CpsPathQuery cpsPathQuery, + int queryResultLimit); + + <T> Set<T> findAttributeValuesByAnchorAndCpsPath(AnchorEntity anchorEntity, CpsPathQuery cpsPathQuery, + String attributeName, int queryResultLimit, Class<T> targetClass); List<FragmentEntity> findByDataspaceAndCpsPath(DataspaceEntity dataspaceEntity, CpsPathQuery cpsPathQuery, List<Long> anchorIds); diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java index e8c2725670..cc8055d3c1 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java @@ -1,6 +1,6 @@ /*- * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation. + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,6 +24,8 @@ package org.onap.cps.ri.repository; import jakarta.persistence.Query; import jakarta.transaction.Transactional; import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.parameters.PaginationOption; @@ -31,25 +33,47 @@ import org.onap.cps.cpspath.parser.CpsPathQuery; import org.onap.cps.ri.models.AnchorEntity; import org.onap.cps.ri.models.DataspaceEntity; import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.utils.JsonObjectMapper; @RequiredArgsConstructor @Slf4j public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCpsPathQuery { private final FragmentQueryBuilder fragmentQueryBuilder; + private final JsonObjectMapper jsonObjectMapper; @Override @Transactional public List<FragmentEntity> findByAnchorAndCpsPath(final AnchorEntity anchorEntity, - final CpsPathQuery cpsPathQuery) { - final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorEntity, cpsPathQuery); + final CpsPathQuery cpsPathQuery, + final int queryResultLimit) { + final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorEntity, cpsPathQuery, + queryResultLimit); final List<FragmentEntity> fragmentEntities = query.getResultList(); log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); + if (queryResultLimit > 0) { + log.debug("Result limited to {} entries", queryResultLimit); + } return fragmentEntities; } @Override @Transactional + public <T> Set<T> findAttributeValuesByAnchorAndCpsPath(final AnchorEntity anchorEntity, + final CpsPathQuery cpsPathQuery, + final String attributeName, + final int queryResultLimit, + final Class<T> targetClass) { + final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorEntity, cpsPathQuery, + queryResultLimit); + final List<String> jsonResultList = query.getResultList(); + return jsonResultList.stream() + .map(jsonValue -> jsonObjectMapper.convertJsonString(jsonValue, targetClass)) + .collect(Collectors.toSet()); + } + + @Override + @Transactional public List<FragmentEntity> findByDataspaceAndCpsPath(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery, final List<Long> anchorIds) { final Query query = fragmentQueryBuilder.getQueryForDataspaceAndCpsPath( diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java index b8dd7b755c..fdd72624ba 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java @@ -83,8 +83,8 @@ public interface SchemaSetRepository extends JpaRepository<SchemaSetEntity, Inte */ @Modifying @Query(value = """ - DELETE FROM schema_set WHERE NOT EXISTS - (SELECT 1 FROM anchor WHERE anchor.schema_set_id = schema_set.id) + DELETE FROM schema_set WHERE schema_set.dataspace_id = :dataspaceId AND + NOT EXISTS (SELECT 1 FROM anchor WHERE anchor.schema_set_id = schema_set.id) """, nativeQuery = true) - void deleteOrphanedSchemaSets(); + void deleteOrphanedSchemaSets(@Param("dataspaceId") final int dataspaceId); } diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java deleted file mode 100644 index 989809af5b..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ri.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import java.sql.PreparedStatement; -import java.util.List; -import org.hibernate.Session; -import org.springframework.transaction.annotation.Transactional; - -@Transactional -public class SchemaSetYangResourceRepositoryImpl implements SchemaSetYangResourceRepository { - - private static final int MAX_INSERT_BATCH_SIZE = 100; - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List<Integer> yangResourceIds) { - final Session session = entityManager.unwrap(Session.class); - session.doWork(connection -> { - try (PreparedStatement preparedStatement = connection.prepareStatement( - "INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES ( ?, ?)")) { - int sqlQueryCount = 1; - for (final int yangResourceId : yangResourceIds) { - preparedStatement.setInt(1, schemaSetId); - preparedStatement.setInt(2, yangResourceId); - preparedStatement.addBatch(); - if (sqlQueryCount % MAX_INSERT_BATCH_SIZE == 0 || sqlQueryCount == yangResourceIds.size()) { - preparedStatement.executeBatch(); - } - sqlQueryCount++; - } - } - }); - } -} - diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java deleted file mode 100644 index 34f1ee362a..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ri.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import jakarta.persistence.Query; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.StringJoiner; -import lombok.extern.slf4j.Slf4j; -import org.hibernate.type.StandardBasicTypes; -import org.onap.cps.api.model.ModuleReference; -import org.springframework.stereotype.Repository; -import org.springframework.transaction.annotation.Transactional; - -@Slf4j -@Repository -public class YangResourceNativeRepositoryImpl implements YangResourceNativeRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - @Transactional - public List<Integer> getResourceIdsByModuleReferences(final Collection<ModuleReference> moduleReferences) { - if (moduleReferences.isEmpty()) { - return Collections.emptyList(); - } - final Query query = entityManager.createNativeQuery(getCombinedSelectSqlQuery(moduleReferences)) - .unwrap(org.hibernate.query.NativeQuery.class) - .addScalar("id", StandardBasicTypes.INTEGER); - final List<Integer> yangResourceIds = query.getResultList(); - if (yangResourceIds.size() != moduleReferences.size()) { - log.warn("ModuleReferences size : {} and QueryResult size : {}", moduleReferences.size(), - yangResourceIds.size()); - } - return yangResourceIds; - } - - private String getCombinedSelectSqlQuery(final Collection<ModuleReference> moduleReferences) { - final StringJoiner sqlQueryJoiner = new StringJoiner(" UNION ALL "); - moduleReferences.forEach(moduleReference -> - sqlQueryJoiner.add(String.format("SELECT id FROM yang_resource WHERE module_name='%s' and revision='%s'", - moduleReference.getModuleName(), - moduleReference.getRevision())) - ); - return sqlQueryJoiner.toString(); - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java index 628502f846..e36e376bc6 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java @@ -32,8 +32,10 @@ import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; @Repository -public interface YangResourceRepository extends JpaRepository<YangResourceEntity, Integer>, - YangResourceNativeRepository, SchemaSetYangResourceRepository { +public interface YangResourceRepository extends JpaRepository<YangResourceEntity, Integer> { + + YangResourceEntity findByModuleNameAndRevision(@Param("moduleName") String moduleName, + @Param("revision") String revision); List<YangResourceEntity> findAllByChecksumIn(Collection<String> checksums); @@ -88,10 +90,6 @@ public interface YangResourceRepository extends JpaRepository<YangResourceEntity @Param("moduleName") String moduleName, @Param("revision") String revision); @Modifying - @Query(value = "DELETE FROM schema_set_yang_resources WHERE schema_set_id = :schemaSetId", nativeQuery = true) - void deleteSchemaSetYangResourceForSchemaSetId(@Param("schemaSetId") int schemaSetId); - - @Modifying @Query(value = """ DELETE FROM yang_resource WHERE NOT EXISTS (SELECT 1 FROM schema_set_yang_resources WHERE schema_set_yang_resources.yang_resource_id = yang_resource.id) diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java index c0291176f4..d4cd366157 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,22 +20,20 @@ package org.onap.cps.ri.utils; +import lombok.RequiredArgsConstructor; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.SessionFactory; -import org.onap.cps.ri.models.AnchorEntity; -import org.onap.cps.ri.models.DataspaceEntity; -import org.onap.cps.ri.models.SchemaSetEntity; -import org.onap.cps.ri.models.YangResourceEntity; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; @Component @Scope(ConfigurableBeanFactory.SCOPE_SINGLETON) +@RequiredArgsConstructor public class CpsSessionFactory { - private SessionFactory sessionFactory = null; + private final SessionFactory sessionFactory; /** * Open a session from session factory. @@ -44,7 +42,7 @@ public class CpsSessionFactory { * @throws HibernateException hibernate exception */ public Session openSession() throws HibernateException { - return getSessionFactory().openSession(); + return sessionFactory.openSession(); } /** @@ -53,18 +51,6 @@ public class CpsSessionFactory { * @throws HibernateException hibernate exception */ public void closeSessionFactory() throws HibernateException { - getSessionFactory().close(); - } - - private SessionFactory getSessionFactory() { - if (sessionFactory == null) { - sessionFactory = new org.hibernate.cfg.Configuration().configure("hibernate.cfg.xml") - .addAnnotatedClass(AnchorEntity.class) - .addAnnotatedClass(DataspaceEntity.class) - .addAnnotatedClass(SchemaSetEntity.class) - .addAnnotatedClass(YangResourceEntity.class) - .buildSessionFactory(); - } - return sessionFactory; + sessionFactory.close(); } } diff --git a/cps-ri/src/main/resources/changelog/changelog-master.yaml b/cps-ri/src/main/resources/changelog/changelog-master.yaml index 2011655c37..40a3e3e009 100644 --- a/cps-ri/src/main/resources/changelog/changelog-master.yaml +++ b/cps-ri/src/main/resources/changelog/changelog-master.yaml @@ -1,6 +1,6 @@ # ============LICENSE_START======================================================= # Copyright (c) 2021 Bell Canada. -# Modifications Copyright (C) 2022-2024 Nordix Foundation. +# Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,3 +22,9 @@ databaseChangeLog: file: changelog/db/changes/22-fragment-id-sequence.yaml - include: file: changelog/db/changes/23-yang-resource-index.yaml + - changeSet: + id: 24 + author: cps + changes: + - tagDatabase: + tag: 24-initial database tag diff --git a/cps-ri/src/main/resources/hibernate.cfg.xml b/cps-ri/src/main/resources/hibernate.cfg.xml deleted file mode 100644 index 1b822b9de9..0000000000 --- a/cps-ri/src/main/resources/hibernate.cfg.xml +++ /dev/null @@ -1,16 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE hibernate-configuration PUBLIC - "-//Hibernate/Hibernate Configuration DTD 3.0//EN" - "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd"> - -<hibernate-configuration> - <session-factory> - <property name="hibernate.connection.driver_class">org.postgresql.Driver</property> - <property name="hibernate.connection.url">jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb</property> - <property name="hibernate.connection.username">${DB_USERNAME}</property> - <property name="hibernate.connection.password">${DB_PASSWORD}</property> - <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQLDialect</property> - <property name="show_sql">true</property> - <property name="hibernate.hbm2ddl.auto">update</property> - </session-factory> -</hibernate-configuration>
\ No newline at end of file diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy index c818f3ba1f..e927922acf 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy @@ -33,7 +33,7 @@ import org.onap.cps.api.parameters.FetchDescendantsOption import org.onap.cps.api.exceptions.ConcurrencyException import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.model.DataNode -import org.onap.cps.api.model.DataNodeBuilder +import org.onap.cps.impl.DataNodeBuilder import org.onap.cps.utils.JsonObjectMapper import org.springframework.dao.DataIntegrityViolationException import spock.lang.Specification diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy deleted file mode 100644 index 28a615b0e8..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy +++ /dev/null @@ -1,145 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Bell Canada. - * Modifications Copyright (C) 2021-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ -package org.onap.cps.ri - -import org.hibernate.exception.ConstraintViolationException -import org.onap.cps.ri.models.DataspaceEntity -import org.onap.cps.ri.models.SchemaSetEntity -import org.onap.cps.ri.repository.DataspaceRepository -import org.onap.cps.ri.repository.ModuleReferenceRepository -import org.onap.cps.ri.repository.SchemaSetRepository -import org.onap.cps.ri.repository.YangResourceRepository -import org.onap.cps.spi.CpsAdminPersistenceService -import org.onap.cps.spi.CpsModulePersistenceService -import org.onap.cps.api.exceptions.DuplicatedYangResourceException -import org.onap.cps.api.model.ModuleReference -import org.spockframework.spring.SpringBean -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.dao.DataIntegrityViolationException -import org.springframework.retry.annotation.EnableRetry -import spock.lang.Specification - -import java.sql.SQLException - -@SpringBootTest(classes=[CpsModulePersistenceServiceImpl]) -@EnableRetry -class CpsModulePersistenceServiceConcurrencySpec extends Specification { - - @Autowired - CpsModulePersistenceService objectUnderTest - - @SpringBean - DataspaceRepository dataspaceRepository = Mock() - - @SpringBean - YangResourceRepository yangResourceRepository = Mock() - - @SpringBean - SchemaSetRepository schemaSetRepository = Mock() - - @SpringBean - CpsAdminPersistenceService cpsAdminPersistenceService = Mock() - - @SpringBean - ModuleReferenceRepository moduleReferenceRepository = Mock() - - def NEW_RESOURCE_NAME = 'some new resource' - def NEW_RESOURCE_CONTENT = 'module stores {\n' + - ' yang-version 1.1;\n' + - ' namespace "org:onap:ccsdk:sample";\n' + - '}' - - def newYangResourcesNameToContentMap = [(NEW_RESOURCE_NAME):NEW_RESOURCE_CONTENT] - - def yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' - - def yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' - - def sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) - - def checksumIntegrityException = new DataIntegrityViolationException("checksum integrity exception", - new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) - - def 'Store new schema set, maximum retries.'() { - given: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored' - objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) - then: 'a duplicated yang resource exception is thrown ' - thrown(DuplicatedYangResourceException) - and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - } - - def 'Store new schema set, succeed on third attempt.'() { - given: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored' - objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) - then: 'no exception is thrown ' - noExceptionThrown() - and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' - 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - 1 * yangResourceRepository.saveAll(_) >> [] - } - - def 'Store schema set using modules, maximum retries.'() { - given: 'map of new modules, a list of existing modules, module reference' - def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] - def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") - def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] - and: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored from a module' - objectUnderTest.storeSchemaSetFromModules('some dataspace', 'some new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) - then: 'a duplicated yang resource exception is thrown ' - thrown(DuplicatedYangResourceException) - and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - } - - def 'Store schema set using modules, succeed on third attempt.'() { - given: 'map of new modules, a list of existing modules, module reference' - def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] - def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") - def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] - and: 'no pre-existing schemaset in database' - def dataspaceEntity = new DataspaceEntity() - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - yangResourceRepository.getResourceIdsByModuleReferences(_) >> [] - and: 'can retrieve schemaset details after storing it' - def schemaSetEntity = new SchemaSetEntity() - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'new schema set') >> schemaSetEntity - when: 'a new schemaset is stored from a module' - objectUnderTest.storeSchemaSetFromModules('some dataspace', 'new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) - then: 'no exception is thrown ' - noExceptionThrown() - and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' - 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - 1 * yangResourceRepository.saveAll(_) >> [] - } - -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy index 2915bc8e8c..4bf8c7ca09 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2022-2023 Nordix Foundation + * Modifications Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ package org.onap.cps.ri import org.hibernate.exception.ConstraintViolationException +import org.onap.cps.ri.models.DataspaceEntity import org.onap.cps.ri.models.SchemaSetEntity import org.onap.cps.ri.repository.DataspaceRepository import org.onap.cps.ri.repository.ModuleReferenceRepository @@ -30,7 +31,6 @@ import org.onap.cps.api.exceptions.DuplicatedYangResourceException import org.onap.cps.api.model.ModuleReference import org.springframework.dao.DataIntegrityViolationException import spock.lang.Specification - import java.sql.SQLException /** @@ -78,8 +78,8 @@ class CpsModulePersistenceServiceImplSpec extends Specification { and: 'persisting yang resource raises db constraint exception (in case of concurrent requests for example)' mockYangResourceRepository.saveAll(_) >> { throw dbException } when: 'attempt to store schema set ' - def newYangResourcesNameToContentMap = [(yangResourceName):yangResourceContent] - objectUnderTest.storeSchemaSet('my-dataspace', 'my-schema-set', newYangResourcesNameToContentMap) + def newYangResourceContentPerName = [(yangResourceName):yangResourceContent] + objectUnderTest.createSchemaSet('my-dataspace', 'my-schema-set', newYangResourceContentPerName) then: 'an #expectedThrownException is thrown' def e = thrown(expectedThrownException) assert e.getMessage().contains(expectedThrownExceptionMessage) @@ -96,9 +96,37 @@ class CpsModulePersistenceServiceImplSpec extends Specification { def schemaSetEntity = new SchemaSetEntity(id: 1) mockSchemaSetRepository.getByDataspaceAndName(_, _) >> schemaSetEntity when: 'schema set update is requested' - objectUnderTest.updateSchemaSetFromModules('my-dataspace', 'my-schemaset', [:], [new ModuleReference('some module name', 'some revision name')]) + objectUnderTest.updateSchemaSetFromNewAndExistingModules('my-dataspace', 'my-schemaset', [:], [new ModuleReference('some module name', 'some revision name')]) then: 'no exception is thrown ' noExceptionThrown() } + def 'Get yang schema resources.' () { + given: 'mocked methods for dataspace and schema set repositories' + mockDataspaceRepository.getByName('someDataspaceName') >> new DataspaceEntity() + mockSchemaSetRepository.getByDataspaceAndName(_,_) >> new SchemaSetEntity(yangResources: []) + when: 'the get yang schema resources method is called' + def result = objectUnderTest.getYangSchemaResources('someDataspaceName', 'someSchemaSetName') + then: 'an empty map is returned' + assert result.isEmpty() + } + + def 'Get yang module references with just dataspace name.' () { + given: 'mocked method return yang resource repository' + mockYangResourceRepository.findAllModuleReferencesByDataspace('someDataspaceName') >> [] + when: 'the get yang resource module reference method is called with 1 parameter' + def result = objectUnderTest.getYangResourceModuleReferences('someDataspaceName') + then: 'an empty collection is returned' + assert result.isEmpty() + } + + def 'Get yang module references with dataspace name and anchor.' () { + given: 'mocked method return yang resource repository' + mockYangResourceRepository.findAllModuleReferencesByDataspaceAndAnchor('someDataspaceName', 'someAnchorName') >> [] + when: 'the get yang resource module reference method is called with 2 parameters' + def result = objectUnderTest.getYangResourceModuleReferences('someDataspaceName','someAnchorName') + then: 'an empty collection is returned' + assert result.isEmpty() + } + } diff --git a/cps-service/pom.xml b/cps-service/pom.xml index 7dda1327e0..7b468b5718 100644 --- a/cps-service/pom.xml +++ b/cps-service/pom.xml @@ -30,7 +30,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java index 345bc8825b..5d48812d58 100644 --- a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java @@ -4,7 +4,7 @@ * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada * Modifications Copyright (C) 2022 Deutsche Telekom AG - * Modifications Copyright (C) 2023-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2023-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,10 +26,8 @@ package org.onap.cps.api; import java.time.OffsetDateTime; import java.util.Collection; -import java.util.List; import java.util.Map; import org.onap.cps.api.model.DataNode; -import org.onap.cps.api.model.DeltaReport; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.utils.ContentType; @@ -294,38 +292,6 @@ public interface CpsDataService { void lockAnchor(String sessionID, String dataspaceName, String anchorName, Long timeoutInMilliseconds); /** - * Retrieves the delta between two anchors by xpath within a dataspace. - * - * @param dataspaceName dataspace name - * @param sourceAnchorName source anchor name - * @param targetAnchorName target anchor name - * @param xpath xpath - * @param fetchDescendantsOption defines the scope of data to fetch: either single node or all the descendant - * nodes (recursively) as well - * @return list containing {@link DeltaReport} objects - */ - List<DeltaReport> getDeltaByDataspaceAndAnchors(String dataspaceName, String sourceAnchorName, - String targetAnchorName, String xpath, - FetchDescendantsOption fetchDescendantsOption); - - /** - * Retrieves the delta between an anchor and JSON payload by xpath, using dataspace name and anchor name. - * - * @param dataspaceName source dataspace name - * @param sourceAnchorName source anchor name - * @param xpath xpath - * @param yangResourcesNameToContentMap YANG resources (files) map where key is a name and value is content - * @param targetData target data to be compared in JSON string format - * @param fetchDescendantsOption defines the scope of data to fetch: defaulted to INCLUDE_ALL_DESCENDANTS - * @return list containing {@link DeltaReport} objects - */ - List<DeltaReport> getDeltaByDataspaceAnchorAndPayload(String dataspaceName, String sourceAnchorName, String xpath, - Map<String, String> yangResourcesNameToContentMap, - String targetData, - FetchDescendantsOption fetchDescendantsOption); - - - /** * Validates JSON or XML data by parsing it using the schema associated to an anchor within the given dataspace. * Validation is performed without persisting the data. * diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsDeltaService.java b/cps-service/src/main/java/org/onap/cps/api/CpsDeltaService.java index 67c4da63ac..671b1d60db 100644 --- a/cps-service/src/main/java/org/onap/cps/api/CpsDeltaService.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsDeltaService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 TechMahindra Ltd. + * Copyright (C) 2023-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,23 +20,47 @@ package org.onap.cps.api; -import java.util.Collection; import java.util.List; -import org.onap.cps.api.model.DataNode; +import java.util.Map; import org.onap.cps.api.model.DeltaReport; +import org.onap.cps.api.parameters.FetchDescendantsOption; public interface CpsDeltaService { /** - * Retrieves delta between source data nodes and target data nodes. Source data nodes contain the data which acts as - * the point of reference for delta report, whereas target data nodes contain the data being compared against - * source data node. List of {@link DeltaReport}. Each Delta Report contains information such as action, xpath, - * source-payload and target-payload. + * Retrieves the delta between two anchors by xpath within a dataspace. Returns a List of {@link DeltaReport}. + * Each Delta Report contains following information: action, xpath, source-payload and/or target-payload. * - * @param sourceDataNodes collection of {@link DataNode} as source/reference for delta generation - * @param targetDataNodes collection of {@link DataNode} as target data for delta generation - * @return list of {@link DeltaReport} containing delta information + * @param dataspaceName dataspace name + * @param sourceAnchorName source anchor name + * @param targetAnchorName target anchor name + * @param xpath xpath + * @param fetchDescendantsOption defines the scope of data to fetch: either single node or all the descendant + * nodes (recursively) as well + * @return list containing {@link DeltaReport} objects */ - List<DeltaReport> getDeltaReports(Collection<DataNode> sourceDataNodes, - Collection<DataNode> targetDataNodes); + List<DeltaReport> getDeltaByDataspaceAndAnchors(String dataspaceName, String sourceAnchorName, + String targetAnchorName, String xpath, + FetchDescendantsOption fetchDescendantsOption); + + /** + * Retrieves the delta between an anchor and JSON payload by xpath, using dataspace name and anchor name. + * Returns a List of {@link DeltaReport}. Each Delta Report contains following information: action, xpath, + * source-payload and/or target-payload. + * + * @param dataspaceName source dataspace name + * @param sourceAnchorName source anchor name + * @param xpath xpath + * @param yangResourceContentPerName YANG resources (files) map where key is a name and value is content + * @param targetData target data to be compared in JSON string format + * @param fetchDescendantsOption defines the scope of data to fetch: defaulted to INCLUDE_ALL_DESCENDANTS + * + * @return list containing {@link DeltaReport} objects + */ + List<DeltaReport> getDeltaByDataspaceAnchorAndPayload(String dataspaceName, String sourceAnchorName, String xpath, + Map<String, String> yangResourceContentPerName, + String targetData, + FetchDescendantsOption fetchDescendantsOption); + + } diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsFacade.java b/cps-service/src/main/java/org/onap/cps/api/CpsFacade.java new file mode 100644 index 0000000000..8933f02cb4 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/api/CpsFacade.java @@ -0,0 +1,96 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.api; + +import java.util.List; +import java.util.Map; +import org.onap.cps.api.parameters.FetchDescendantsOption; +import org.onap.cps.api.parameters.PaginationOption; + +public interface CpsFacade { + + /** + * Get the first data node for a given dataspace, anchor and xpath. + * + * @param dataspaceName the name of the dataspace + * @param anchorName the name of the anchor + * @param xpath the xpath + * @param fetchDescendantsOption control what level of descendants should be returned + * @return a map representing the data node and its descendants + */ + Map<String, Object> getFirstDataNodeByAnchor(String dataspaceName, + String anchorName, + String xpath, + FetchDescendantsOption fetchDescendantsOption); + + /** + * Get data nodes for a given dataspace, anchor and xpath. + * + * @param dataspaceName the name of the dataspace + * @param anchorName the name of the anchor + * @param xpath the xpath + * @param fetchDescendantsOption control what level of descendants should be returned + * @return a map representing the data nodes and their descendants + */ + List<Map<String, Object>> getDataNodesByAnchor(String dataspaceName, + String anchorName, + String xpath, + FetchDescendantsOption fetchDescendantsOption); + + /** + * Query the given anchor using a cps path expression. + * + * @param dataspaceName the name of the dataspace + * @param anchorName the name of the anchor + * @param cpsPath the xpath i.e. query + * @param fetchDescendantsOption control what level of descendants should be returned + * @return a map representing the data nodes and their descendants + */ + List<Map<String, Object>> executeAnchorQuery(String dataspaceName, + String anchorName, + String cpsPath, + FetchDescendantsOption fetchDescendantsOption); + + /** + * Query the given dataspace (all anchors) using a cps path expression. + * + * @param dataspaceName the name of the dataspace + * @param cpsPath the xpath i.e. query + * @param fetchDescendantsOption control what level of descendants should be returned + * @return a map representing the data nodes and their descendants + */ + List<Map<String, Object>> executeDataspaceQuery(String dataspaceName, + String cpsPath, + FetchDescendantsOption fetchDescendantsOption, + PaginationOption paginationOption); + + /** + * Query how many anchors wil be returned for the given dataspace and a cps path query. + * + * @param dataspaceName the name of the dataspace + * @param cpsPath the xpath i.e. query + * @param paginationOption the options for pagination + * @return the number of anchors involved in the output + */ + int countAnchorsInDataspaceQuery(String dataspaceName, + String cpsPath, + PaginationOption paginationOption); +} diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsModuleService.java b/cps-service/src/main/java/org/onap/cps/api/CpsModuleService.java index 81b6439efc..2494be4021 100644 --- a/cps-service/src/main/java/org/onap/cps/api/CpsModuleService.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsModuleService.java @@ -40,21 +40,21 @@ public interface CpsModuleService { * * @param dataspaceName dataspace name * @param schemaSetName schema set name - * @param yangResourcesNameToContentMap yang resources (files) as a mep where key is resource name + * @param yangResourceContentPerName yang resources (files) as a mep where key is resource name * and value is content */ void createSchemaSet(String dataspaceName, String schemaSetName, - Map<String, String> yangResourcesNameToContentMap); + Map<String, String> yangResourceContentPerName); /** * Create or upgrade a schema set from new modules and existing modules or only existing modules. - * @param dataspaceName Dataspace name - * @param schemaSetName schema set name - * @param newModuleNameToContentMap YANG resources map where key is a module name and value is content - * @param allModuleReferences All YANG resource module references + * @param dataspaceName Dataspace name + * @param schemaSetName schema set name + * @param yangResourceContentPerName YANG resources map where key is a name and value is content + * @param allModuleReferences All YANG resource module references */ void createSchemaSetFromModules(String dataspaceName, String schemaSetName, - Map<String, String> newModuleNameToContentMap, + Map<String, String> yangResourceContentPerName, Collection<ModuleReference> allModuleReferences); /** @@ -164,8 +164,11 @@ public interface CpsModuleService { Collection<ModuleReference> identifyNewModuleReferences(Collection<ModuleReference> moduleReferencesToCheck); /** - * Remove any Yang Resource Modules and Schema Sets from the DB that are no longer referenced by any anchor. + * Remove any Yang Resource Modules and Schema Sets from the given dataspace that are no longer referenced + * by any anchor. + * + * @param dataspaceName dataspace name */ - void deleteAllUnusedYangModuleData(); + void deleteAllUnusedYangModuleData(String dataspaceName); } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/Sleeper.java b/cps-service/src/main/java/org/onap/cps/api/CpsNotificationService.java index 7a02fa06e0..ae437753c0 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/utils/Sleeper.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsNotificationService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,18 +18,18 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.ncmp.impl.utils; +package org.onap.cps.api; -import java.util.concurrent.TimeUnit; -import org.springframework.stereotype.Service; +import java.util.List; +import java.util.Map; -/** - * This class is to extract out sleep functionality so the interrupted exception handling can - * be covered with a test (e.g. using spy on Sleeper) and help to get to 100% code coverage. - */ -@Service -public class Sleeper { - public void haveALittleRest(final long timeInMillis) throws InterruptedException { - TimeUnit.MILLISECONDS.sleep(timeInMillis); - } +public interface CpsNotificationService { + + void createNotificationSubscription(String notificationSubscriptionAsJson, String xpath); + + void deleteNotificationSubscription(String xpath); + + boolean isNotificationEnabled(String dataspaceName, String anchorName); + + List<Map<String, Object>> getNotificationSubscription(String xpath); } diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsQueryService.java b/cps-service/src/main/java/org/onap/cps/api/CpsQueryService.java index d783b9ed0e..d6c1f7fc60 100644 --- a/cps-service/src/main/java/org/onap/cps/api/CpsQueryService.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsQueryService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation + * Copyright (C) 2020-2025 Nordix Foundation * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,6 +32,8 @@ import org.onap.cps.api.parameters.PaginationOption; */ public interface CpsQueryService { + int NO_LIMIT = 0; + /** * Get data nodes for the given dataspace and anchor by cps path. * @@ -45,6 +47,20 @@ public interface CpsQueryService { Collection<DataNode> queryDataNodes(String dataspaceName, String anchorName, String cpsPath, FetchDescendantsOption fetchDescendantsOption); + /** + * Retrieves a collection of data nodes based on the specified CPS path query. + * + * @param dataspaceName the name of the dataspace (must not be null or empty) + * @param anchorName the name of the anchor (must not be null or empty) + * @param cpsPath the CPS path used for querying (must not be null or empty) + * @param fetchDescendantsOption specifies whether to include descendant nodes in the output + * @param queryResultLimit the maximum number of data nodes to return; if less than 1, returns all matching nodes + * + * @return a collection of matching {@link DataNode} instances (can be empty if no nodes are found) + */ + Collection<DataNode> queryDataNodes(String dataspaceName, String anchorName, + String cpsPath, FetchDescendantsOption fetchDescendantsOption, + int queryResultLimit); /** * Get data leaf for the given dataspace and anchor by cps path. @@ -58,6 +74,19 @@ public interface CpsQueryService { <T> Set<T> queryDataLeaf(String dataspaceName, String anchorName, String cpsPath, Class<T> targetClass); /** + * Get data leaf for the given dataspace and anchor by cps path. + * + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param cpsPath cps path + * @param queryResultLimit the maximum number of data nodes to return; if less than 1, returns all matching nodes + * @param targetClass class of the expected data type + * @return a collection of data objects of expected type + */ + <T> Set<T> queryDataLeaf(String dataspaceName, String anchorName, String cpsPath, int queryResultLimit, + Class<T> targetClass); + + /** * Get data nodes for the given dataspace across all anchors by cps path. * * @param dataspaceName dataspace name diff --git a/cps-service/src/main/java/org/onap/cps/api/DataNodeFactory.java b/cps-service/src/main/java/org/onap/cps/api/DataNodeFactory.java new file mode 100644 index 0000000000..1e3410c7f4 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/api/DataNodeFactory.java @@ -0,0 +1,83 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.api; + +import java.util.Collection; +import java.util.Map; +import org.onap.cps.api.model.Anchor; +import org.onap.cps.api.model.DataNode; +import org.onap.cps.utils.ContentType; + +public interface DataNodeFactory { + + /** + * Create data nodes using an anchor, xpath, and JSON/XML string. + * + * @param anchor name of Anchor sharing same schema structure as the JSON/XML string + * @param xpath xpath of the data node + * @param nodeData JSON/XML data string + * @param contentType JSON or XML content type + * @return a collection of {@link DataNode} + */ + Collection<DataNode> createDataNodesWithAnchorXpathAndNodeData(Anchor anchor, String xpath, + String nodeData, ContentType contentType); + + /** + * Create data nodes using an anchor, parent data node xpath, and JSON/XML string. + * + * @param anchor name of Anchor sharing same schema structure as the JSON/XML string + * @param parentNodeXpath xpath of the parent data node + * @param nodeData JSON/XML data string + * @param contentType JSON or XML content type + * @return a collection of {@link DataNode} + */ + Collection<DataNode> createDataNodesWithAnchorParentXpathAndNodeData(Anchor anchor, + String parentNodeXpath, + String nodeData, + ContentType contentType); + + /** + * Create data nodes using a map of xpath to JSON/XML data, and anchor name. + * + * @param anchor name of Anchor sharing same schema structure as the JSON/XML string + * @param nodesData map of xpath and node JSON/XML data + * @param contentType JSON or XML content type + * @return a collection of {@link DataNode} + */ + Collection<DataNode> createDataNodesWithAnchorAndXpathToNodeData(Anchor anchor, + Map<String, String> nodesData, + ContentType contentType); + + /** + * Create data nodes using a map of YANG resource name to content, xpath, and JSON/XML string. + * + * @param yangResourcesNameToContentMap map of YANG resource name to content + * @param xpath xpath of the data node + * @param nodeData JSON/XML data string + * @param contentType JSON or XML content type + * @return a collection of {@link DataNode} + */ + Collection<DataNode> createDataNodesWithYangResourceXpathAndNodeData( + Map<String, String> yangResourcesNameToContentMap, + String xpath, String nodeData, + ContentType contentType); + +} diff --git a/cps-service/src/main/java/org/onap/cps/api/model/DataNode.java b/cps-service/src/main/java/org/onap/cps/api/model/DataNode.java index be80b636ad..6597aa3908 100644 --- a/cps-service/src/main/java/org/onap/cps/api/model/DataNode.java +++ b/cps-service/src/main/java/org/onap/cps/api/model/DataNode.java @@ -26,20 +26,19 @@ import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.Map; -import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; +import lombok.NoArgsConstructor; import lombok.Setter; -@Setter(AccessLevel.PROTECTED) +@Setter @Getter @EqualsAndHashCode +@NoArgsConstructor public class DataNode implements Serializable { private static final long serialVersionUID = 1482619410918597467L; - DataNode() {} - private String dataspace; private String schemaSetName; private String anchorName; diff --git a/cps-service/src/main/java/org/onap/cps/api/model/DeltaReport.java b/cps-service/src/main/java/org/onap/cps/api/model/DeltaReport.java index df642628d0..761c6ad01d 100644 --- a/cps-service/src/main/java/org/onap/cps/api/model/DeltaReport.java +++ b/cps-service/src/main/java/org/onap/cps/api/model/DeltaReport.java @@ -23,21 +23,20 @@ package org.onap.cps.api.model; import com.fasterxml.jackson.annotation.JsonInclude; import java.io.Serializable; import java.util.Map; -import lombok.AccessLevel; import lombok.Getter; +import lombok.NoArgsConstructor; import lombok.Setter; -@Setter(AccessLevel.PROTECTED) +@Setter @Getter @JsonInclude(JsonInclude.Include.NON_NULL) +@NoArgsConstructor public class DeltaReport { public static final String CREATE_ACTION = "create"; public static final String REMOVE_ACTION = "remove"; public static final String REPLACE_ACTION = "replace"; - DeltaReport() {} - private String action; private String xpath; private Map<String, Serializable> sourceData; diff --git a/cps-service/src/main/java/org/onap/cps/api/parameters/FetchDescendantsOption.java b/cps-service/src/main/java/org/onap/cps/api/parameters/FetchDescendantsOption.java index 46022ba46b..05fa366239 100644 --- a/cps-service/src/main/java/org/onap/cps/api/parameters/FetchDescendantsOption.java +++ b/cps-service/src/main/java/org/onap/cps/api/parameters/FetchDescendantsOption.java @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 Pantheon.tech - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,6 +24,7 @@ package org.onap.cps.api.parameters; import com.google.common.base.Strings; import java.util.regex.Matcher; import java.util.regex.Pattern; +import lombok.Getter; import lombok.RequiredArgsConstructor; import org.onap.cps.api.exceptions.DataValidationException; @@ -44,6 +45,12 @@ public class FetchDescendantsOption { private static final Pattern FETCH_DESCENDANTS_OPTION_PATTERN = Pattern.compile("^$|^all$|^none$|^direct$|^[0-9]+$|^-1$|^1$"); + /** + * Get depth. + * + * @return depth: -1 for all descendants, 0 for no descendants, or positive value for fixed level of descendants + */ + @Getter private final int depth; private final String optionName; @@ -76,15 +83,7 @@ public class FetchDescendantsOption { } /** - * Get depth. - * @return depth: -1 for all descendants, 0 for no descendants, or positive value for fixed level of descendants - */ - public int getDepth() { - return depth; - } - - /** - * get fetch descendants option for given descendant. + * Convert fetch descendants option from string to enum with depth. * * @param fetchDescendantsOptionAsString fetch descendants option string * @return fetch descendants option for given descendant @@ -99,11 +98,22 @@ public class FetchDescendantsOption { } else if ("1".equals(fetchDescendantsOptionAsString) || "direct".equals(fetchDescendantsOptionAsString)) { return FetchDescendantsOption.DIRECT_CHILDREN_ONLY; } else { - final Integer depth = Integer.valueOf(fetchDescendantsOptionAsString); + final int depth = Integer.parseInt(fetchDescendantsOptionAsString); return new FetchDescendantsOption(depth); } } + /** + * Convert include all-descendants boolean parameter to FetchDescendantsOption enum. + * + * @param includedDescendantsOptionAsBoolean fetch descendants option as Boolean + * @return fetch descendants option for given descendant + */ + public static FetchDescendantsOption getFetchDescendantsOption(final Boolean includedDescendantsOptionAsBoolean) { + return Boolean.TRUE.equals(includedDescendantsOptionAsBoolean) + ? FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS : FetchDescendantsOption.OMIT_DESCENDANTS; + } + @Override public String toString() { return optionName; diff --git a/cps-service/src/main/java/org/onap/cps/events/CpsDataUpdateEventsService.java b/cps-service/src/main/java/org/onap/cps/events/CpsDataUpdateEventsProducer.java index f1b5ff8d10..6d82245998 100644 --- a/cps-service/src/main/java/org/onap/cps/events/CpsDataUpdateEventsService.java +++ b/cps-service/src/main/java/org/onap/cps/events/CpsDataUpdateEventsProducer.java @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 TechMahindra Ltd. - * Copyright (C) 2024 Nordix Foundation. + * Copyright (C) 2024-2025 TechMahindra Ltd. + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import java.util.HashMap; import java.util.Map; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.onap.cps.api.CpsNotificationService; import org.onap.cps.api.model.Anchor; import org.onap.cps.events.model.CpsDataUpdatedEvent; import org.onap.cps.events.model.Data; @@ -39,31 +40,33 @@ import org.springframework.stereotype.Service; @Slf4j @Service @RequiredArgsConstructor -public class CpsDataUpdateEventsService { +public class CpsDataUpdateEventsProducer { - private final EventsPublisher<CpsDataUpdatedEvent> eventsPublisher; + private final EventsProducer<CpsDataUpdatedEvent> eventsProducer; + + private final CpsNotificationService cpsNotificationService; @Value("${app.cps.data-updated.topic:cps-data-updated-events}") private String topicName; - @Value("${app.cps.data-updated.change-event-notifications-enabled:true}") + @Value("${app.cps.data-updated.change-event-notifications-enabled:false}") private boolean cpsChangeEventNotificationsEnabled; @Value("${notification.enabled:false}") private boolean notificationsEnabled; /** - * Publish the cps data update event with header to the public topic. + * Send the cps data update event with header to the public topic. * * @param anchor Anchor of the updated data * @param xpath xpath of the updated data * @param operation operation performed on the data * @param observedTimestamp timestamp when data was updated. */ - @Timed(value = "cps.dataupdate.events.publish", description = "Time taken to publish Data Update event") - public void publishCpsDataUpdateEvent(final Anchor anchor, final String xpath, - final Operation operation, final OffsetDateTime observedTimestamp) { - if (notificationsEnabled && cpsChangeEventNotificationsEnabled) { + @Timed(value = "cps.dataupdate.events.send", description = "Time taken to send Data Update event") + public void sendCpsDataUpdateEvent(final Anchor anchor, final String xpath, + final Operation operation, final OffsetDateTime observedTimestamp) { + if (notificationsEnabled && cpsChangeEventNotificationsEnabled && isNotificationEnabledForAnchor(anchor)) { final CpsDataUpdatedEvent cpsDataUpdatedEvent = createCpsDataUpdatedEvent(anchor, observedTimestamp, xpath, operation); final String updateEventId = anchor.getDataspaceName() + ":" + anchor.getName(); @@ -71,13 +74,17 @@ public class CpsDataUpdateEventsService { final CloudEvent cpsDataUpdatedEventAsCloudEvent = CpsEvent.builder().type(CpsDataUpdatedEvent.class.getTypeName()).data(cpsDataUpdatedEvent) .extensions(extensions).build().asCloudEvent(); - eventsPublisher.publishCloudEvent(topicName, updateEventId, cpsDataUpdatedEventAsCloudEvent); + eventsProducer.sendCloudEvent(topicName, updateEventId, cpsDataUpdatedEventAsCloudEvent); } else { log.debug("State of Overall Notifications : {} and Cps Change Event Notifications : {}", notificationsEnabled, cpsChangeEventNotificationsEnabled); } } + private boolean isNotificationEnabledForAnchor(final Anchor anchor) { + return cpsNotificationService.isNotificationEnabled(anchor.getDataspaceName(), anchor.getName()); + } + private CpsDataUpdatedEvent createCpsDataUpdatedEvent(final Anchor anchor, final OffsetDateTime observedTimestamp, final String xpath, final Operation rootNodeOperation) { diff --git a/cps-service/src/main/java/org/onap/cps/events/EventsPublisher.java b/cps-service/src/main/java/org/onap/cps/events/EventsProducer.java index 46384b5933..01e1ad183a 100644 --- a/cps-service/src/main/java/org/onap/cps/events/EventsPublisher.java +++ b/cps-service/src/main/java/org/onap/cps/events/EventsProducer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,13 +34,13 @@ import org.springframework.stereotype.Service; import org.springframework.util.SerializationUtils; /** - * EventsPublisher to publish events. + * EventsProducer to send events. */ @Slf4j @Service @RequiredArgsConstructor -public class EventsPublisher<T> { +public class EventsProducer<T> { /** * KafkaTemplate for legacy (non-cloud) events. @@ -51,49 +51,49 @@ public class EventsPublisher<T> { private final KafkaTemplate<String, CloudEvent> cloudEventKafkaTemplate; /** - * Generic CloudEvent publisher. + * Generic CloudEvent sender. * * @param topicName valid topic name * @param eventKey message key * @param event message payload */ - public void publishCloudEvent(final String topicName, final String eventKey, final CloudEvent event) { + public void sendCloudEvent(final String topicName, final String eventKey, final CloudEvent event) { final CompletableFuture<SendResult<String, CloudEvent>> eventFuture = cloudEventKafkaTemplate.send(topicName, eventKey, event); eventFuture.whenComplete((result, e) -> { if (e == null) { - log.debug("Successfully published event to topic : {} , Event : {}", result.getRecordMetadata().topic(), + log.debug("Successfully sent event to topic : {} , Event : {}", result.getRecordMetadata().topic(), result.getProducerRecord().value()); } else { - log.error("Unable to publish event to topic : {} due to {}", topicName, e.getMessage()); + log.error("Unable to send event to topic : {} due to {}", topicName, e.getMessage()); } }); } /** - * Generic Event publisher. + * Generic Event sender. * Note: Cloud events should be used. This will be addressed as part of https://lf-onap.atlassian.net/browse/CPS-1717. * * @param topicName valid topic name * @param eventKey message key * @param event message payload */ - public void publishEvent(final String topicName, final String eventKey, final T event) { + public void sendEvent(final String topicName, final String eventKey, final T event) { final CompletableFuture<SendResult<String, T>> eventFuture = legacyKafkaEventTemplate.send(topicName, eventKey, event); handleLegacyEventCallback(topicName, eventFuture); } /** - * Generic Event Publisher with headers. + * Generic Event sender with headers. * * @param topicName valid topic name * @param eventKey message key * @param eventHeaders event headers * @param event message payload */ - public void publishEvent(final String topicName, final String eventKey, final Headers eventHeaders, final T event) { + public void sendEvent(final String topicName, final String eventKey, final Headers eventHeaders, final T event) { final ProducerRecord<String, T> producerRecord = new ProducerRecord<>(topicName, null, eventKey, event, eventHeaders); @@ -102,27 +102,27 @@ public class EventsPublisher<T> { } /** - * Generic Event Publisher with headers. + * Generic Event sender with headers. * * @param topicName valid topic name * @param eventKey message key * @param eventHeaders map of event headers * @param event message payload */ - public void publishEvent(final String topicName, final String eventKey, final Map<String, Object> eventHeaders, - final T event) { + public void sendEvent(final String topicName, final String eventKey, final Map<String, Object> eventHeaders, + final T event) { - publishEvent(topicName, eventKey, convertToKafkaHeaders(eventHeaders), event); + sendEvent(topicName, eventKey, convertToKafkaHeaders(eventHeaders), event); } private void handleLegacyEventCallback(final String topicName, final CompletableFuture<SendResult<String, T>> eventFuture) { eventFuture.whenComplete((result, e) -> { if (e == null) { - log.debug("Successfully published event to topic : {} , Event : {}", result.getRecordMetadata().topic(), + log.debug("Successfully sent event to topic : {} , Event : {}", result.getRecordMetadata().topic(), result.getProducerRecord().value()); } else { - log.error("Unable to publish event to topic : {} due to {}", topicName, e.getMessage()); + log.error("Unable to send event to topic : {} due to {}", topicName, e.getMessage()); } }); } diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsAnchorServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsAnchorServiceImpl.java index fb22311128..f18ae74c73 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsAnchorServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsAnchorServiceImpl.java @@ -24,9 +24,9 @@ import java.util.Collection; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.model.Anchor; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsAdminPersistenceService; import org.onap.cps.spi.CpsDataPersistenceService; +import org.onap.cps.utils.CpsValidator; import org.springframework.stereotype.Service; @Service diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsDataServiceImpl.java index f2513173a6..045bba5074 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsDataServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsDataServiceImpl.java @@ -1,9 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * Modifications Copyright (C) 2022 Deutsche Telekom AG * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,38 +24,32 @@ package org.onap.cps.impl; +import static org.onap.cps.cpspath.parser.CpsPathUtil.NO_PARENT_PATH; +import static org.onap.cps.cpspath.parser.CpsPathUtil.ROOT_NODE_XPATH; +import static org.onap.cps.utils.ContentType.JSON; + import io.micrometer.core.annotation.Timed; import java.io.Serializable; import java.time.OffsetDateTime; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; -import org.onap.cps.api.CpsDeltaService; -import org.onap.cps.api.exceptions.DataValidationException; +import org.onap.cps.api.DataNodeFactory; import org.onap.cps.api.model.Anchor; import org.onap.cps.api.model.DataNode; -import org.onap.cps.api.model.DataNodeBuilder; -import org.onap.cps.api.model.DeltaReport; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.cpspath.parser.CpsPathUtil; -import org.onap.cps.events.CpsDataUpdateEventsService; +import org.onap.cps.events.CpsDataUpdateEventsProducer; import org.onap.cps.events.model.Data.Operation; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsDataPersistenceService; import org.onap.cps.utils.ContentType; -import org.onap.cps.utils.DataMapUtils; -import org.onap.cps.utils.JsonObjectMapper; -import org.onap.cps.utils.PrefixResolver; +import org.onap.cps.utils.CpsValidator; import org.onap.cps.utils.YangParser; -import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode; import org.springframework.stereotype.Service; @Service @@ -63,36 +57,29 @@ import org.springframework.stereotype.Service; @RequiredArgsConstructor public class CpsDataServiceImpl implements CpsDataService { - private static final String ROOT_NODE_XPATH = "/"; - private static final String PARENT_NODE_XPATH_FOR_ROOT_NODE_XPATH = ""; private static final long DEFAULT_LOCK_TIMEOUT_IN_MILLISECONDS = 300L; - private static final String NO_DATA_NODES = "No data nodes."; private final CpsDataPersistenceService cpsDataPersistenceService; - private final CpsDataUpdateEventsService cpsDataUpdateEventsService; + private final CpsDataUpdateEventsProducer cpsDataUpdateEventsProducer; private final CpsAnchorService cpsAnchorService; - + private final DataNodeFactory dataNodeFactory; private final CpsValidator cpsValidator; private final YangParser yangParser; - private final CpsDeltaService cpsDeltaService; - private final JsonObjectMapper jsonObjectMapper; - private final PrefixResolver prefixResolver; @Override public void saveData(final String dataspaceName, final String anchorName, final String nodeData, final OffsetDateTime observedTimestamp) { - saveData(dataspaceName, anchorName, nodeData, observedTimestamp, ContentType.JSON); + saveData(dataspaceName, anchorName, nodeData, observedTimestamp, JSON); } @Override - @Timed(value = "cps.data.service.datanode.root.save", - description = "Time taken to save a root data node") + @Timed(value = "cps.data.service.datanode.root.save", description = "Time taken to save a root data node") public void saveData(final String dataspaceName, final String anchorName, final String nodeData, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodes = - buildDataNodesWithParentNodeXpath(anchor, ROOT_NODE_XPATH, nodeData, contentType); + final Collection<DataNode> dataNodes = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, ROOT_NODE_XPATH, nodeData, contentType); cpsDataPersistenceService.storeDataNodes(dataspaceName, anchorName, dataNodes); sendDataUpdatedEvent(anchor, ROOT_NODE_XPATH, Operation.CREATE, observedTimestamp); } @@ -100,34 +87,32 @@ public class CpsDataServiceImpl implements CpsDataService { @Override public void saveData(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String nodeData, final OffsetDateTime observedTimestamp) { - saveData(dataspaceName, anchorName, parentNodeXpath, nodeData, observedTimestamp, ContentType.JSON); + saveData(dataspaceName, anchorName, parentNodeXpath, nodeData, observedTimestamp, JSON); } @Override - @Timed(value = "cps.data.service.datanode.child.save", - description = "Time taken to save a child data node") + @Timed(value = "cps.data.service.datanode.child.save", description = "Time taken to save a child data node") public void saveData(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String nodeData, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodes = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); + final Collection<DataNode> dataNodes = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, nodeData, contentType); cpsDataPersistenceService.addChildDataNodes(dataspaceName, anchorName, parentNodeXpath, dataNodes); sendDataUpdatedEvent(anchor, parentNodeXpath, Operation.CREATE, observedTimestamp); } @Override - @Timed(value = "cps.data.service.list.element.save", - description = "Time taken to save list elements") + @Timed(value = "cps.data.service.list.element.save", description = "Time taken to save list elements") public void saveListElements(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String nodeData, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> listElementDataNodeCollection = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); - if (isRootNodeXpath(parentNodeXpath)) { + final Collection<DataNode> listElementDataNodeCollection = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, nodeData, contentType); + if (ROOT_NODE_XPATH.equals(parentNodeXpath)) { cpsDataPersistenceService.storeDataNodes(dataspaceName, anchorName, listElementDataNodeCollection); } else { cpsDataPersistenceService.addListElements(dataspaceName, anchorName, parentNodeXpath, @@ -137,8 +122,7 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.datanode.get", - description = "Time taken to get data nodes for an xpath") + @Timed(value = "cps.data.service.datanode.get", description = "Time taken to get data nodes for an xpath") public Collection<DataNode> getDataNodes(final String dataspaceName, final String anchorName, final String xpath, final FetchDescendantsOption fetchDescendantsOption) { @@ -147,8 +131,7 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.datanode.batch.get", - description = "Time taken to get a batch of data nodes") + @Timed(value = "cps.data.service.datanode.batch.get", description = "Time taken to get a batch of data nodes") public Collection<DataNode> getDataNodesForMultipleXpaths(final String dataspaceName, final String anchorName, final Collection<String> xpaths, final FetchDescendantsOption fetchDescendantsOption) { @@ -164,8 +147,8 @@ public class CpsDataServiceImpl implements CpsDataService { final String nodeData, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodesInPatch = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); + final Collection<DataNode> dataNodesInPatch = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, nodeData, contentType); final Map<String, Map<String, Serializable>> xpathToUpdatedLeaves = dataNodesInPatch.stream() .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)); cpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, xpathToUpdatedLeaves); @@ -181,8 +164,8 @@ public class CpsDataServiceImpl implements CpsDataService { final OffsetDateTime observedTimestamp) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodeUpdates = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, dataNodeUpdatesAsJson, ContentType.JSON); + final Collection<DataNode> dataNodeUpdates = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, dataNodeUpdatesAsJson, JSON); for (final DataNode dataNodeUpdate : dataNodeUpdates) { processDataNodeUpdate(anchor, dataNodeUpdate); } @@ -211,45 +194,6 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.get.delta", - description = "Time taken to get delta between anchors") - public List<DeltaReport> getDeltaByDataspaceAndAnchors(final String dataspaceName, - final String sourceAnchorName, - final String targetAnchorName, final String xpath, - final FetchDescendantsOption fetchDescendantsOption) { - - final Collection<DataNode> sourceDataNodes = getDataNodesForMultipleXpaths(dataspaceName, - sourceAnchorName, Collections.singletonList(xpath), fetchDescendantsOption); - final Collection<DataNode> targetDataNodes = getDataNodesForMultipleXpaths(dataspaceName, - targetAnchorName, Collections.singletonList(xpath), fetchDescendantsOption); - - return cpsDeltaService.getDeltaReports(sourceDataNodes, targetDataNodes); - } - - @Timed(value = "cps.data.service.get.deltaBetweenAnchorAndPayload", - description = "Time taken to get delta between anchor and a payload") - @Override - public List<DeltaReport> getDeltaByDataspaceAnchorAndPayload(final String dataspaceName, - final String sourceAnchorName, final String xpath, - final Map<String, String> yangResourcesNameToContentMap, - final String targetData, - final FetchDescendantsOption fetchDescendantsOption) { - - final Anchor sourceAnchor = cpsAnchorService.getAnchor(dataspaceName, sourceAnchorName); - - final Collection<DataNode> sourceDataNodes = getDataNodes(dataspaceName, - sourceAnchorName, xpath, fetchDescendantsOption); - - final Collection<DataNode> sourceDataNodesRebuilt = - new ArrayList<>(rebuildSourceDataNodes(xpath, sourceAnchor, sourceDataNodes)); - - final Collection<DataNode> targetDataNodes = - new ArrayList<>(buildTargetDataNodes(sourceAnchor, xpath, yangResourcesNameToContentMap, targetData)); - - return cpsDeltaService.getDeltaReports(sourceDataNodesRebuilt, targetDataNodes); - } - - @Override @Timed(value = "cps.data.service.datanode.descendants.update", description = "Time taken to update a data node and descendants") public void updateDataNodeAndDescendants(final String dataspaceName, final String anchorName, @@ -257,8 +201,8 @@ public class CpsDataServiceImpl implements CpsDataService { final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodes = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); + final Collection<DataNode> dataNodes = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, nodeData, contentType); cpsDataPersistenceService.updateDataNodesAndDescendants(dataspaceName, anchorName, dataNodes); sendDataUpdatedEvent(anchor, parentNodeXpath, Operation.UPDATE, observedTimestamp); } @@ -267,31 +211,30 @@ public class CpsDataServiceImpl implements CpsDataService { @Timed(value = "cps.data.service.datanode.descendants.batch.update", description = "Time taken to update a batch of data nodes and descendants") public void updateDataNodesAndDescendants(final String dataspaceName, final String anchorName, - final Map<String, String> nodeDataPerXPath, + final Map<String, String> nodeDataPerParentNodeXPath, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> dataNodes = buildDataNodesWithParentNodeXpath(anchor, nodeDataPerXPath, contentType); + final Collection<DataNode> dataNodes = dataNodeFactory + .createDataNodesWithAnchorAndXpathToNodeData(anchor, nodeDataPerParentNodeXPath, contentType); cpsDataPersistenceService.updateDataNodesAndDescendants(dataspaceName, anchorName, dataNodes); - nodeDataPerXPath.keySet().forEach(nodeXpath -> + nodeDataPerParentNodeXPath.keySet().forEach(nodeXpath -> sendDataUpdatedEvent(anchor, nodeXpath, Operation.UPDATE, observedTimestamp)); } @Override - @Timed(value = "cps.data.service.list.update", - description = "Time taken to update a list") + @Timed(value = "cps.data.service.list.update", description = "Time taken to update a list") public void replaceListContent(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String nodeData, final OffsetDateTime observedTimestamp, final ContentType contentType) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final Collection<DataNode> newListElements = - buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); + final Collection<DataNode> newListElements = dataNodeFactory + .createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentNodeXpath, nodeData, contentType); replaceListContent(dataspaceName, anchorName, parentNodeXpath, newListElements, observedTimestamp); } @Override - @Timed(value = "cps.data.service.list.batch.update", - description = "Time taken to update a batch of lists") + @Timed(value = "cps.data.service.list.batch.update", description = "Time taken to update a batch of lists") public void replaceListContent(final String dataspaceName, final String anchorName, final String parentNodeXpath, final Collection<DataNode> dataNodes, final OffsetDateTime observedTimestamp) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); @@ -301,8 +244,7 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.datanode.delete", - description = "Time taken to delete a datanode") + @Timed(value = "cps.data.service.datanode.delete", description = "Time taken to delete a datanode") public void deleteDataNode(final String dataspaceName, final String anchorName, final String dataNodeXpath, final OffsetDateTime observedTimestamp) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); @@ -312,8 +254,7 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.datanode.batch.delete", - description = "Time taken to delete a batch of datanodes") + @Timed(value = "cps.data.service.datanode.batch.delete", description = "Time taken to delete a batch of datanodes") public void deleteDataNodes(final String dataspaceName, final String anchorName, final Collection<String> dataNodeXpaths, final OffsetDateTime observedTimestamp) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); @@ -349,8 +290,7 @@ public class CpsDataServiceImpl implements CpsDataService { } @Override - @Timed(value = "cps.data.service.list.delete", - description = "Time taken to delete a list or list element") + @Timed(value = "cps.data.service.list.delete", description = "Time taken to delete a list or list element") public void deleteListOrListElement(final String dataspaceName, final String anchorName, final String listNodeXpath, final OffsetDateTime observedTimestamp) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); @@ -363,158 +303,11 @@ public class CpsDataServiceImpl implements CpsDataService { public void validateData(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String nodeData, final ContentType contentType) { final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); - final String xpath = ROOT_NODE_XPATH.equals(parentNodeXpath) ? PARENT_NODE_XPATH_FOR_ROOT_NODE_XPATH : + final String xpath = ROOT_NODE_XPATH.equals(parentNodeXpath) ? NO_PARENT_PATH : CpsPathUtil.getNormalizedXpath(parentNodeXpath); yangParser.validateData(contentType, nodeData, anchor, xpath); } - private Collection<DataNode> rebuildSourceDataNodes(final String xpath, final Anchor sourceAnchor, - final Collection<DataNode> sourceDataNodes) { - - final Collection<DataNode> sourceDataNodesRebuilt = new ArrayList<>(); - if (sourceDataNodes != null) { - final String sourceDataNodesAsJson = getDataNodesAsJson(sourceAnchor, sourceDataNodes); - sourceDataNodesRebuilt.addAll( - buildDataNodesWithAnchorAndXpath(sourceAnchor, xpath, sourceDataNodesAsJson, ContentType.JSON)); - } - return sourceDataNodesRebuilt; - } - - private Collection<DataNode> buildTargetDataNodes(final Anchor sourceAnchor, final String xpath, - final Map<String, String> yangResourcesNameToContentMap, - final String targetData) { - if (yangResourcesNameToContentMap.isEmpty()) { - return buildDataNodesWithAnchorAndXpath(sourceAnchor, xpath, targetData, ContentType.JSON); - } else { - return buildDataNodesWithYangResourceAndXpath(yangResourcesNameToContentMap, xpath, - targetData, ContentType.JSON); - } - } - - private String getDataNodesAsJson(final Anchor anchor, final Collection<DataNode> dataNodes) { - - final List<Map<String, Object>> prefixToDataNodes = prefixResolver(anchor, dataNodes); - final Map<String, Object> targetDataAsJsonObject = getNodeDataAsJsonString(prefixToDataNodes); - return jsonObjectMapper.asJsonString(targetDataAsJsonObject); - } - - private Map<String, Object> getNodeDataAsJsonString(final List<Map<String, Object>> prefixToDataNodes) { - final Map<String, Object> nodeDataAsJson = new HashMap<>(); - for (final Map<String, Object> prefixToDataNode : prefixToDataNodes) { - nodeDataAsJson.putAll(prefixToDataNode); - } - return nodeDataAsJson; - } - - private List<Map<String, Object>> prefixResolver(final Anchor anchor, final Collection<DataNode> dataNodes) { - final List<Map<String, Object>> prefixToDataNodes = new ArrayList<>(dataNodes.size()); - for (final DataNode dataNode: dataNodes) { - final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); - final Map<String, Object> prefixToDataNode = DataMapUtils.toDataMapWithIdentifier(dataNode, prefix); - prefixToDataNodes.add(prefixToDataNode); - } - return prefixToDataNodes; - } - - private Collection<DataNode> buildDataNodesWithParentNodeXpath(final Anchor anchor, - final Map<String, String> nodesJsonData, - final ContentType contentType) { - final Collection<DataNode> dataNodes = new ArrayList<>(); - for (final Map.Entry<String, String> nodeJsonData : nodesJsonData.entrySet()) { - dataNodes.addAll(buildDataNodesWithParentNodeXpath(anchor, nodeJsonData.getKey(), - nodeJsonData.getValue(), contentType)); - } - return dataNodes; - } - - private Collection<DataNode> buildDataNodesWithParentNodeXpath(final Anchor anchor, final String parentNodeXpath, - final String nodeData, final ContentType contentType) { - - if (ROOT_NODE_XPATH.equals(parentNodeXpath)) { - final ContainerNode containerNode = yangParser.parseData(contentType, nodeData, - anchor, PARENT_NODE_XPATH_FOR_ROOT_NODE_XPATH); - final Collection<DataNode> dataNodes = new DataNodeBuilder() - .withContainerNode(containerNode) - .buildCollection(); - if (dataNodes.isEmpty()) { - throw new DataValidationException(NO_DATA_NODES, "No data nodes provided"); - } - return dataNodes; - } - final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(parentNodeXpath); - final ContainerNode containerNode = - yangParser.parseData(contentType, nodeData, anchor, normalizedParentNodeXpath); - final Collection<DataNode> dataNodes = new DataNodeBuilder() - .withParentNodeXpath(normalizedParentNodeXpath) - .withContainerNode(containerNode) - .buildCollection(); - if (dataNodes.isEmpty()) { - throw new DataValidationException(NO_DATA_NODES, "No data nodes provided"); - } - return dataNodes; - } - - private Collection<DataNode> buildDataNodesWithParentNodeXpath( - final Map<String, String> yangResourcesNameToContentMap, final String xpath, - final String nodeData, final ContentType contentType) { - - if (isRootNodeXpath(xpath)) { - final ContainerNode containerNode = yangParser.parseData(contentType, nodeData, - yangResourcesNameToContentMap, PARENT_NODE_XPATH_FOR_ROOT_NODE_XPATH); - final Collection<DataNode> dataNodes = new DataNodeBuilder() - .withContainerNode(containerNode) - .buildCollection(); - if (dataNodes.isEmpty()) { - throw new DataValidationException(NO_DATA_NODES, "Data nodes were not found under the xpath " + xpath); - } - return dataNodes; - } - final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(xpath); - final ContainerNode containerNode = - yangParser.parseData(contentType, nodeData, yangResourcesNameToContentMap, normalizedParentNodeXpath); - final Collection<DataNode> dataNodes = new DataNodeBuilder() - .withParentNodeXpath(normalizedParentNodeXpath) - .withContainerNode(containerNode) - .buildCollection(); - if (dataNodes.isEmpty()) { - throw new DataValidationException(NO_DATA_NODES, "Data nodes were not found under the xpath " + xpath); - } - return dataNodes; - } - - private Collection<DataNode> buildDataNodesWithAnchorAndXpath(final Anchor anchor, final String xpath, - final String nodeData, - final ContentType contentType) { - - if (!isRootNodeXpath(xpath)) { - final String parentNodeXpath = CpsPathUtil.getNormalizedParentXpath(xpath); - if (parentNodeXpath.isEmpty()) { - return buildDataNodesWithParentNodeXpath(anchor, ROOT_NODE_XPATH, nodeData, contentType); - } - return buildDataNodesWithParentNodeXpath(anchor, parentNodeXpath, nodeData, contentType); - } - return buildDataNodesWithParentNodeXpath(anchor, xpath, nodeData, contentType); - } - - private Collection<DataNode> buildDataNodesWithYangResourceAndXpath( - final Map<String, String> yangResourcesNameToContentMap, final String xpath, - final String nodeData, final ContentType contentType) { - if (!isRootNodeXpath(xpath)) { - final String parentNodeXpath = CpsPathUtil.getNormalizedParentXpath(xpath); - if (parentNodeXpath.isEmpty()) { - return buildDataNodesWithParentNodeXpath(yangResourcesNameToContentMap, ROOT_NODE_XPATH, - nodeData, contentType); - } - return buildDataNodesWithParentNodeXpath(yangResourcesNameToContentMap, parentNodeXpath, - nodeData, contentType); - } - return buildDataNodesWithParentNodeXpath(yangResourcesNameToContentMap, xpath, nodeData, contentType); - } - - private static boolean isRootNodeXpath(final String xpath) { - return ROOT_NODE_XPATH.equals(xpath); - } - private void processDataNodeUpdate(final Anchor anchor, final DataNode dataNodeUpdate) { cpsDataPersistenceService.batchUpdateDataLeaves(anchor.getDataspaceName(), anchor.getName(), Collections.singletonMap(dataNodeUpdate.getXpath(), dataNodeUpdate.getLeaves())); @@ -524,10 +317,12 @@ public class CpsDataServiceImpl implements CpsDataService { } } - private void sendDataUpdatedEvent(final Anchor anchor, final String xpath, - final Operation operation, final OffsetDateTime observedTimestamp) { + private void sendDataUpdatedEvent(final Anchor anchor, + final String xpath, + final Operation operation, + final OffsetDateTime observedTimestamp) { try { - cpsDataUpdateEventsService.publishCpsDataUpdateEvent(anchor, xpath, operation, observedTimestamp); + cpsDataUpdateEventsProducer.sendCpsDataUpdateEvent(anchor, xpath, operation, observedTimestamp); } catch (final Exception exception) { log.error("Failed to send message to notification service", exception); } diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsDataspaceServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsDataspaceServiceImpl.java index 15caa2276d..ac55b81bdc 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsDataspaceServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsDataspaceServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2020-2023 Nordix Foundation + * Copyright (C) 2020-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2022 TechMahindra Ltd. @@ -27,8 +27,8 @@ import java.util.Collection; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsDataspaceService; import org.onap.cps.api.model.Dataspace; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsAdminPersistenceService; +import org.onap.cps.utils.CpsValidator; import org.springframework.stereotype.Service; @Service diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsDeltaServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsDeltaServiceImpl.java index 7a9d142506..650aa99b84 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsDeltaServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsDeltaServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 TechMahindra Ltd. + * Copyright (C) 2023-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,9 @@ package org.onap.cps.impl; +import static org.onap.cps.utils.ContentType.JSON; + +import io.micrometer.core.annotation.Timed; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; @@ -28,20 +31,69 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.onap.cps.api.CpsAnchorService; +import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsDeltaService; +import org.onap.cps.api.DataNodeFactory; +import org.onap.cps.api.model.Anchor; import org.onap.cps.api.model.DataNode; import org.onap.cps.api.model.DeltaReport; -import org.onap.cps.api.model.DeltaReportBuilder; +import org.onap.cps.api.parameters.FetchDescendantsOption; +import org.onap.cps.utils.DataMapper; +import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Service; @Slf4j @Service +@RequiredArgsConstructor public class CpsDeltaServiceImpl implements CpsDeltaService { + private final CpsAnchorService cpsAnchorService; + private final CpsDataService cpsDataService; + private final DataNodeFactory dataNodeFactory; + private final DataMapper dataMapper; + private final JsonObjectMapper jsonObjectMapper; + @Override - public List<DeltaReport> getDeltaReports(final Collection<DataNode> sourceDataNodes, - final Collection<DataNode> targetDataNodes) { + @Timed(value = "cps.delta.service.get.delta", + description = "Time taken to get delta between anchors") + public List<DeltaReport> getDeltaByDataspaceAndAnchors(final String dataspaceName, + final String sourceAnchorName, + final String targetAnchorName, + final String xpath, + final FetchDescendantsOption fetchDescendantsOption) { + + final Collection<DataNode> sourceDataNodes = cpsDataService.getDataNodesForMultipleXpaths(dataspaceName, + sourceAnchorName, Collections.singletonList(xpath), fetchDescendantsOption); + final Collection<DataNode> targetDataNodes = cpsDataService.getDataNodesForMultipleXpaths(dataspaceName, + targetAnchorName, Collections.singletonList(xpath), fetchDescendantsOption); + return getDeltaReports(sourceDataNodes, targetDataNodes); + } + + @Timed(value = "cps.delta.service.get.delta", + description = "Time taken to get delta between anchor and a payload") + @Override + public List<DeltaReport> getDeltaByDataspaceAnchorAndPayload(final String dataspaceName, + final String sourceAnchorName, + final String xpath, + final Map<String, String> yangResourceContentPerName, + final String targetData, + final FetchDescendantsOption fetchDescendantsOption) { + + final Anchor sourceAnchor = cpsAnchorService.getAnchor(dataspaceName, sourceAnchorName); + final Collection<DataNode> sourceDataNodes = cpsDataService.getDataNodesForMultipleXpaths(dataspaceName, + sourceAnchorName, Collections.singletonList(xpath), fetchDescendantsOption); + final Collection<DataNode> sourceDataNodesRebuilt = + rebuildSourceDataNodes(xpath, sourceAnchor, sourceDataNodes); + final Collection<DataNode> targetDataNodes = new ArrayList<>( + buildTargetDataNodes(sourceAnchor, xpath, yangResourceContentPerName, targetData)); + return getDeltaReports(sourceDataNodesRebuilt, targetDataNodes); + } + + private List<DeltaReport> getDeltaReports(final Collection<DataNode> sourceDataNodes, + final Collection<DataNode> targetDataNodes) { final List<DeltaReport> deltaReport = new ArrayList<>(); @@ -49,7 +101,6 @@ public class CpsDeltaServiceImpl implements CpsDeltaService { final Map<String, DataNode> xpathToTargetDataNodes = convertToXPathToDataNodesMap(targetDataNodes); deltaReport.addAll(getRemovedAndUpdatedDeltaReports(xpathToSourceDataNodes, xpathToTargetDataNodes)); - deltaReport.addAll(getAddedDeltaReports(xpathToSourceDataNodes, xpathToTargetDataNodes)); return Collections.unmodifiableList(deltaReport); @@ -166,7 +217,6 @@ public class CpsDeltaServiceImpl implements CpsDeltaService { } } else if (sourceLeaf == null) { targetDataInDeltaReport.put(key, targetLeaf); - } else { sourceDataInDeltaReport.put(key, sourceLeaf); } @@ -200,4 +250,30 @@ public class CpsDeltaServiceImpl implements CpsDeltaService { } return addedDeltaReportEntries; } + + private Collection<DataNode> rebuildSourceDataNodes(final String xpath, + final Anchor sourceAnchor, + final Collection<DataNode> sourceDataNodes) { + final Collection<DataNode> sourceDataNodesRebuilt = new ArrayList<>(); + if (sourceDataNodes != null) { + final Map<String, Object> sourceDataNodesAsMap = dataMapper.toFlatDataMap(sourceAnchor, sourceDataNodes); + final String sourceDataNodesAsJson = jsonObjectMapper.asJsonString(sourceDataNodesAsMap); + final Collection<DataNode> dataNodes = dataNodeFactory + .createDataNodesWithAnchorXpathAndNodeData(sourceAnchor, xpath, sourceDataNodesAsJson, JSON); + sourceDataNodesRebuilt.addAll(dataNodes); + } + return sourceDataNodesRebuilt; + } + + private Collection<DataNode> buildTargetDataNodes(final Anchor sourceAnchor, final String xpath, + final Map<String, String> yangResourceContentPerName, + final String targetData) { + if (yangResourceContentPerName.isEmpty()) { + return dataNodeFactory + .createDataNodesWithAnchorXpathAndNodeData(sourceAnchor, xpath, targetData, JSON); + } else { + return dataNodeFactory + .createDataNodesWithYangResourceXpathAndNodeData(yangResourceContentPerName, xpath, targetData, JSON); + } + } } diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsFacadeImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsFacadeImpl.java new file mode 100644 index 0000000000..35a03685b6 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsFacadeImpl.java @@ -0,0 +1,107 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.RequiredArgsConstructor; +import org.onap.cps.api.CpsDataService; +import org.onap.cps.api.CpsFacade; +import org.onap.cps.api.CpsQueryService; +import org.onap.cps.api.model.DataNode; +import org.onap.cps.api.parameters.FetchDescendantsOption; +import org.onap.cps.api.parameters.PaginationOption; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.cpspath.parser.CpsPathUtil; +import org.onap.cps.utils.DataMapper; +import org.springframework.stereotype.Service; + +@RequiredArgsConstructor +@Service +public class CpsFacadeImpl implements CpsFacade { + + private final CpsDataService cpsDataService; + private final CpsQueryService cpsQueryService; + private final DataMapper dataMapper; + + @Override + public Map<String, Object> getFirstDataNodeByAnchor(final String dataspaceName, + final String anchorName, + final String xpath, + final FetchDescendantsOption fetchDescendantsOption) { + final DataNode dataNode = cpsDataService.getDataNodes(dataspaceName, anchorName, xpath, + fetchDescendantsOption).iterator().next(); + return dataMapper.toDataMap(dataspaceName, anchorName, dataNode); + } + + @Override + public List<Map<String, Object>> getDataNodesByAnchor(final String dataspaceName, + final String anchorName, + final String xpath, + final FetchDescendantsOption fetchDescendantsOption) { + final Collection<DataNode> dataNodes = cpsDataService.getDataNodes(dataspaceName, anchorName, xpath, + fetchDescendantsOption); + return dataMapper.toDataMaps(dataspaceName, anchorName, dataNodes); + } + + @Override + public List<Map<String, Object>> executeAnchorQuery(final String dataspaceName, + final String anchorName, + final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption) { + final CpsPathQuery cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); + if (cpsPathQuery.hasAttributeAxis()) { + final String attributeName = cpsPathQuery.getAttributeAxisAttributeName(); + final Set<Object> attributeValues = + cpsQueryService.queryDataLeaf(dataspaceName, anchorName, cpsPath, Object.class); + return dataMapper.toAttributeMaps(attributeName, attributeValues); + } + final Collection<DataNode> dataNodes = + cpsQueryService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption); + return dataMapper.toDataMaps(dataspaceName, anchorName, dataNodes); + } + + @Override + public List<Map<String, Object>> executeDataspaceQuery(final String dataspaceName, + final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption, + final PaginationOption paginationOption) { + final Collection<DataNode> dataNodes = cpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, + cpsPath, fetchDescendantsOption, paginationOption); + return dataMapper.toDataMaps(dataspaceName, dataNodes); + } + + @Override + public int countAnchorsInDataspaceQuery(final String dataspaceName, + final String cpsPath, + final PaginationOption paginationOption) { + if (paginationOption == PaginationOption.NO_PAGINATION) { + return 1; + } + final int totalAnchors = cpsQueryService.countAnchorsForDataspaceAndCpsPath(dataspaceName, cpsPath); + return totalAnchors <= paginationOption.getPageSize() ? 1 + : (int) Math.ceil((double) totalAnchors / paginationOption.getPageSize()); + } + +} + diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsModuleServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsModuleServiceImpl.java index 7622ba5fe2..e50325c739 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsModuleServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsModuleServiceImpl.java @@ -36,8 +36,8 @@ import org.onap.cps.api.model.ModuleDefinition; import org.onap.cps.api.model.ModuleReference; import org.onap.cps.api.model.SchemaSet; import org.onap.cps.api.parameters.CascadeDeleteAllowed; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsModulePersistenceService; +import org.onap.cps.utils.CpsValidator; import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder; import org.onap.cps.yang.YangTextSchemaSourceSet; import org.springframework.stereotype.Service; @@ -57,21 +57,21 @@ public class CpsModuleServiceImpl implements CpsModuleService { @Timed(value = "cps.module.service.schemaset.create", description = "Time taken to create (and store) a schemaset") public void createSchemaSet(final String dataspaceName, final String schemaSetName, - final Map<String, String> yangResourcesNameToContentMap) { + final Map<String, String> yangResourceContentPerName) { cpsValidator.validateNameCharacters(dataspaceName); - cpsModulePersistenceService.storeSchemaSet(dataspaceName, schemaSetName, yangResourcesNameToContentMap); + cpsModulePersistenceService.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName); final YangTextSchemaSourceSet yangTextSchemaSourceSet = - timedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourcesNameToContentMap); + timedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourceContentPerName); yangTextSchemaSourceSetCache.updateCache(dataspaceName, schemaSetName, yangTextSchemaSourceSet); } @Override public void createSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map<String, String> newModuleNameToContentMap, + final Map<String, String> yangResourceContentPerName, final Collection<ModuleReference> allModuleReferences) { cpsValidator.validateNameCharacters(dataspaceName); - cpsModulePersistenceService.storeSchemaSetFromModules(dataspaceName, schemaSetName, - newModuleNameToContentMap, allModuleReferences); + cpsModulePersistenceService.createSchemaSetFromNewAndExistingModules(dataspaceName, schemaSetName, + yangResourceContentPerName, allModuleReferences); } @Override @@ -83,7 +83,7 @@ public class CpsModuleServiceImpl implements CpsModuleService { @Override public SchemaSet getSchemaSet(final String dataspaceName, final String schemaSetName) { cpsValidator.validateNameCharacters(dataspaceName); - final var yangTextSchemaSourceSet = yangTextSchemaSourceSetCache + final YangTextSchemaSourceSet yangTextSchemaSourceSet = yangTextSchemaSourceSetCache .get(dataspaceName, schemaSetName); return SchemaSet.builder().name(schemaSetName).dataspaceName(dataspaceName) .moduleReferences(yangTextSchemaSourceSet.getModuleReferences()).build(); @@ -130,15 +130,14 @@ public class CpsModuleServiceImpl implements CpsModuleService { @Override public void upgradeSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map<String, String> newModuleNameToContentMap, + final Map<String, String> newYangResourceContentPerModule, final Collection<ModuleReference> allModuleReferences) { cpsValidator.validateNameCharacters(dataspaceName); - cpsModulePersistenceService.updateSchemaSetFromModules(dataspaceName, schemaSetName, - newModuleNameToContentMap, allModuleReferences); + cpsModulePersistenceService.updateSchemaSetFromNewAndExistingModules(dataspaceName, schemaSetName, + newYangResourceContentPerModule, allModuleReferences); yangTextSchemaSourceSetCache.removeFromCache(dataspaceName, schemaSetName); } - @Override public Collection<ModuleReference> getYangResourceModuleReferences(final String dataspaceName) { cpsValidator.validateNameCharacters(dataspaceName); @@ -175,8 +174,9 @@ public class CpsModuleServiceImpl implements CpsModuleService { } @Override - public void deleteAllUnusedYangModuleData() { - cpsModulePersistenceService.deleteAllUnusedYangModuleData(); + public void deleteAllUnusedYangModuleData(final String dataspaceName) { + cpsValidator.validateNameCharacters(dataspaceName); + cpsModulePersistenceService.deleteAllUnusedYangModuleData(dataspaceName); } private boolean isCascadeDeleteProhibited(final CascadeDeleteAllowed cascadeDeleteAllowed) { diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsNotificationServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsNotificationServiceImpl.java new file mode 100644 index 0000000000..dc293b26e2 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsNotificationServiceImpl.java @@ -0,0 +1,128 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * Modifications Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl; + +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.api.CpsAnchorService; +import org.onap.cps.api.CpsNotificationService; +import org.onap.cps.api.exceptions.DataNodeNotFoundException; +import org.onap.cps.api.model.Anchor; +import org.onap.cps.api.model.DataNode; +import org.onap.cps.cpspath.parser.CpsPathUtil; +import org.onap.cps.spi.CpsDataPersistenceService; +import org.onap.cps.utils.ContentType; +import org.onap.cps.utils.DataMapper; +import org.onap.cps.utils.YangParser; +import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +@RequiredArgsConstructor +public class CpsNotificationServiceImpl implements CpsNotificationService { + + private final CpsAnchorService cpsAnchorService; + + private final CpsDataPersistenceService cpsDataPersistenceService; + + private final YangParser yangParser; + + private final DataMapper dataMapper; + + private static final String ADMIN_DATASPACE = "CPS-Admin"; + private static final String CPS_SUBSCRIPTION_ANCHOR_NAME = "cps-notification-subscriptions"; + private static final String DATASPACE_SUBSCRIPTION_XPATH_FORMAT = "/dataspaces/dataspace[@name='%s']"; + private static final String ANCHORS_SUBSCRIPTION_XPATH_FORMAT = "/dataspaces/dataspace[@name='%s']/anchors"; + private static final String ANCHOR_SUBSCRIPTION_XPATH_FORMAT = + "/dataspaces/dataspace[@name='%s']/anchors/anchor[@name='%s']"; + + @Override + public void createNotificationSubscription(final String notificationSubscriptionAsJson, final String xpath) { + + final Anchor anchor = cpsAnchorService.getAnchor(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME); + final Collection<DataNode> dataNodes = + buildDataNodesWithParentNodeXpath(anchor, xpath, notificationSubscriptionAsJson); + cpsDataPersistenceService.addListElements(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME, xpath, dataNodes); + } + + @Override + public void deleteNotificationSubscription(final String xpath) { + cpsDataPersistenceService.deleteDataNode(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME, xpath); + } + + @Override + public List<Map<String, Object>> getNotificationSubscription(final String xpath) { + final Collection<DataNode> dataNodes = cpsDataPersistenceService + .getDataNodes(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME, xpath, INCLUDE_ALL_DESCENDANTS); + return dataMapper.toDataMaps(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME, dataNodes); + } + + @Override + public boolean isNotificationEnabled(final String dataspaceName, final String anchorName) { + return (isNotificationEnabledForAnchor(dataspaceName, anchorName) + || notificationEnabledForAllAnchors(dataspaceName)); + } + + private boolean isNotificationEnabledForAnchor(final String dataspaceName, final String anchorName) { + final String xpath = String.format(ANCHOR_SUBSCRIPTION_XPATH_FORMAT, dataspaceName, anchorName); + return isNotificationEnabledForXpath(xpath); + } + + private boolean isNotificationEnabledForXpath(final String xpath) { + try { + cpsDataPersistenceService + .getDataNodes(ADMIN_DATASPACE, CPS_SUBSCRIPTION_ANCHOR_NAME, xpath, INCLUDE_ALL_DESCENDANTS); + } catch (final DataNodeNotFoundException e) { + return false; + } + return true; + } + + private boolean notificationEnabledForAllAnchors(final String dataspaceName) { + final String dataspaceSubscriptionXpath = String.format(DATASPACE_SUBSCRIPTION_XPATH_FORMAT, dataspaceName); + return isNotificationEnabledForXpath(dataspaceSubscriptionXpath) + && noIndividualAnchorEnabledInDataspace(dataspaceName); + } + + private boolean noIndividualAnchorEnabledInDataspace(final String dataspaceName) { + final String xpathForAnchors = String.format(ANCHORS_SUBSCRIPTION_XPATH_FORMAT, dataspaceName); + return !isNotificationEnabledForXpath(xpathForAnchors); + } + + private Collection<DataNode> buildDataNodesWithParentNodeXpath(final Anchor anchor, + final String parentNodeXpath, + final String nodeData) { + final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(parentNodeXpath); + final ContainerNode containerNode = + yangParser.parseData(ContentType.JSON, nodeData, anchor, normalizedParentNodeXpath); + return new DataNodeBuilder() + .withParentNodeXpath(normalizedParentNodeXpath) + .withContainerNode(containerNode) + .buildCollection(); + } +} diff --git a/cps-service/src/main/java/org/onap/cps/impl/CpsQueryServiceImpl.java b/cps-service/src/main/java/org/onap/cps/impl/CpsQueryServiceImpl.java index e534e0aea1..d61caf2d27 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/CpsQueryServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/impl/CpsQueryServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -29,8 +29,8 @@ import org.onap.cps.api.CpsQueryService; import org.onap.cps.api.model.DataNode; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.api.parameters.PaginationOption; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsDataPersistenceService; +import org.onap.cps.utils.CpsValidator; import org.springframework.stereotype.Service; @Service @@ -46,15 +46,36 @@ public class CpsQueryServiceImpl implements CpsQueryService { public Collection<DataNode> queryDataNodes(final String dataspaceName, final String anchorName, final String cpsPath, final FetchDescendantsOption fetchDescendantsOption) { + return queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption, NO_LIMIT); + } + + @Override + @Timed(value = "cps.data.service.datanode.query", + description = "Time taken to query data nodes with a limit on results") + public Collection<DataNode> queryDataNodes(final String dataspaceName, final String anchorName, + final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption, + final int queryResultLimit) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); - return cpsDataPersistenceService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption); + return cpsDataPersistenceService.queryDataNodes(dataspaceName, + anchorName, + cpsPath, + fetchDescendantsOption, + queryResultLimit); } @Override public <T> Set<T> queryDataLeaf(final String dataspaceName, final String anchorName, final String cpsPath, final Class<T> targetClass) { + return queryDataLeaf(dataspaceName, anchorName, cpsPath, NO_LIMIT, targetClass); + } + + @Override + public <T> Set<T> queryDataLeaf(final String dataspaceName, final String anchorName, final String cpsPath, + final int queryResultLimit, final Class<T> targetClass) { cpsValidator.validateNameCharacters(dataspaceName, anchorName); - throw new UnsupportedOperationException("Query by attribute-axis not implemented yet!"); + return cpsDataPersistenceService.queryDataLeaf(dataspaceName, anchorName, cpsPath, + queryResultLimit, targetClass); } @Override diff --git a/cps-service/src/main/java/org/onap/cps/api/model/DataNodeBuilder.java b/cps-service/src/main/java/org/onap/cps/impl/DataNodeBuilder.java index d509f53525..a78f3d9826 100644 --- a/cps-service/src/main/java/org/onap/cps/api/model/DataNodeBuilder.java +++ b/cps-service/src/main/java/org/onap/cps/impl/DataNodeBuilder.java @@ -20,7 +20,7 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.api.model; +package org.onap.cps.impl; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -33,6 +33,7 @@ import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.exceptions.DataValidationException; +import org.onap.cps.api.model.DataNode; import org.onap.cps.utils.YangUtils; import org.opendaylight.yangtools.yang.common.Ordering; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; diff --git a/cps-service/src/main/java/org/onap/cps/impl/DataNodeFactoryImpl.java b/cps-service/src/main/java/org/onap/cps/impl/DataNodeFactoryImpl.java new file mode 100644 index 0000000000..76db887c8e --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/impl/DataNodeFactoryImpl.java @@ -0,0 +1,107 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl; + +import static org.onap.cps.cpspath.parser.CpsPathUtil.NO_PARENT_PATH; +import static org.onap.cps.cpspath.parser.CpsPathUtil.ROOT_NODE_XPATH; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; +import lombok.RequiredArgsConstructor; +import org.onap.cps.api.DataNodeFactory; +import org.onap.cps.api.exceptions.DataValidationException; +import org.onap.cps.api.model.Anchor; +import org.onap.cps.api.model.DataNode; +import org.onap.cps.cpspath.parser.CpsPathUtil; +import org.onap.cps.utils.ContentType; +import org.onap.cps.utils.YangParser; +import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode; +import org.springframework.stereotype.Service; + +@Service +@RequiredArgsConstructor +public class DataNodeFactoryImpl implements DataNodeFactory { + + private final YangParser yangParser; + + @Override + public Collection<DataNode> createDataNodesWithAnchorAndXpathToNodeData(final Anchor anchor, + final Map<String, String> nodesDataPerParentNodeXpath, + final ContentType contentType) { + final Collection<DataNode> dataNodes = new ArrayList<>(); + for (final Map.Entry<String, String> nodeDataToParentNodeXpath : nodesDataPerParentNodeXpath.entrySet()) { + dataNodes.addAll(createDataNodesWithAnchorParentXpathAndNodeData(anchor, nodeDataToParentNodeXpath.getKey(), + nodeDataToParentNodeXpath.getValue(), contentType)); + } + return dataNodes; + } + + @Override + public Collection<DataNode> createDataNodesWithAnchorXpathAndNodeData(final Anchor anchor, final String xpath, + final String nodeData, + final ContentType contentType) { + final String xpathToBuildNodes = isRootNodeXpath(xpath) ? NO_PARENT_PATH : + CpsPathUtil.getNormalizedParentXpath(xpath); + final ContainerNode containerNode = yangParser.parseData(contentType, nodeData, anchor, xpathToBuildNodes); + return convertToDataNodes(xpathToBuildNodes, containerNode); + } + + @Override + public Collection<DataNode> createDataNodesWithAnchorParentXpathAndNodeData(final Anchor anchor, + final String parentNodeXpath, + final String nodeData, + final ContentType contentType) { + + final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(parentNodeXpath); + final ContainerNode containerNode = + yangParser.parseData(contentType, nodeData, anchor, normalizedParentNodeXpath); + return convertToDataNodes(normalizedParentNodeXpath, containerNode); + } + + @Override + public Collection<DataNode> createDataNodesWithYangResourceXpathAndNodeData( + final Map<String, String> yangResourceContentPerName, + final String xpath, final String nodeData, + final ContentType contentType) { + final String normalizedParentNodeXpath = isRootNodeXpath(xpath) ? NO_PARENT_PATH : + CpsPathUtil.getNormalizedParentXpath(xpath); + final ContainerNode containerNode = + yangParser.parseData(contentType, nodeData, yangResourceContentPerName, normalizedParentNodeXpath); + return convertToDataNodes(normalizedParentNodeXpath, containerNode); + } + + private static Collection<DataNode> convertToDataNodes(final String normalizedParentNodeXpath, + final ContainerNode containerNode) { + final Collection<DataNode> dataNodes = new DataNodeBuilder() + .withParentNodeXpath(normalizedParentNodeXpath) + .withContainerNode(containerNode) + .buildCollection(); + if (dataNodes.isEmpty()) { + throw new DataValidationException("No Data Nodes", "The request did not return any data nodes for xpath " + + normalizedParentNodeXpath); + } + return dataNodes; + } + + private static boolean isRootNodeXpath(final String xpath) { + return ROOT_NODE_XPATH.equals(xpath); + } +} diff --git a/cps-service/src/main/java/org/onap/cps/api/model/DeltaReportBuilder.java b/cps-service/src/main/java/org/onap/cps/impl/DeltaReportBuilder.java index a8e922f3df..fdc2e939d6 100644 --- a/cps-service/src/main/java/org/onap/cps/api/model/DeltaReportBuilder.java +++ b/cps-service/src/main/java/org/onap/cps/impl/DeltaReportBuilder.java @@ -18,11 +18,12 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.api.model; +package org.onap.cps.impl; import java.io.Serializable; import java.util.Map; import lombok.extern.slf4j.Slf4j; +import org.onap.cps.api.model.DeltaReport; @Slf4j public class DeltaReportBuilder { diff --git a/cps-service/src/main/java/org/onap/cps/impl/YangTextSchemaSourceSetCache.java b/cps-service/src/main/java/org/onap/cps/impl/YangTextSchemaSourceSetCache.java index 688669c941..e7e7b1c5ce 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/YangTextSchemaSourceSetCache.java +++ b/cps-service/src/main/java/org/onap/cps/impl/YangTextSchemaSourceSetCache.java @@ -27,8 +27,8 @@ import io.micrometer.core.instrument.Metrics; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import lombok.RequiredArgsConstructor; -import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsModulePersistenceService; +import org.onap.cps.utils.CpsValidator; import org.onap.cps.yang.YangTextSchemaSourceSet; import org.onap.cps.yang.YangTextSchemaSourceSetBuilder; import org.springframework.cache.annotation.CacheConfig; diff --git a/cps-service/src/main/java/org/onap/cps/init/AbstractModelLoader.java b/cps-service/src/main/java/org/onap/cps/init/AbstractModelLoader.java index e864633f25..df068c68a6 100644 --- a/cps-service/src/main/java/org/onap/cps/init/AbstractModelLoader.java +++ b/cps-service/src/main/java/org/onap/cps/init/AbstractModelLoader.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -34,6 +34,7 @@ import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsDataspaceService; import org.onap.cps.api.CpsModuleService; import org.onap.cps.api.exceptions.AlreadyDefinedException; +import org.onap.cps.api.exceptions.DuplicatedYangResourceException; import org.onap.cps.api.exceptions.ModelOnboardingException; import org.onap.cps.api.parameters.CascadeDeleteAllowed; import org.onap.cps.utils.JsonObjectMapper; @@ -57,10 +58,10 @@ public abstract class AbstractModelLoader implements ModelLoader { public void onApplicationEvent(final ApplicationStartedEvent applicationStartedEvent) { try { onboardOrUpgradeModel(); - } catch (final Exception modelOnboardUpException) { + } catch (final Exception exception) { log.error("Exiting application due to failure in onboarding model: {} ", - modelOnboardUpException.getMessage()); - SpringApplication.exit(applicationStartedEvent.getApplicationContext(), () -> EXIT_CODE_ON_ERROR); + exception.getMessage()); + exitApplication(applicationStartedEvent); } } @@ -76,8 +77,10 @@ public abstract class AbstractModelLoader implements ModelLoader { cpsModuleService.createSchemaSet(dataspaceName, schemaSetName, yangResourcesContentByResourceName); } catch (final AlreadyDefinedException alreadyDefinedException) { log.warn("Creating new schema set failed as schema set already exists"); + } catch (final DuplicatedYangResourceException duplicatedYangResourceException) { + log.warn("Ignoring yang resource duplication exception. Assuming model was created by another instance"); } catch (final Exception exception) { - log.error("Creating schema set failed: {} ", exception.getMessage()); + log.error("Creating schema set {} failed: {} ", schemaSetName, exception.getMessage()); throw new ModelOnboardingException("Creating schema set failed", exception.getMessage()); } } @@ -180,4 +183,8 @@ public abstract class AbstractModelLoader implements ModelLoader { throw new ModelOnboardingException(message, exception.getMessage()); } } + + private void exitApplication(final ApplicationStartedEvent applicationStartedEvent) { + SpringApplication.exit(applicationStartedEvent.getApplicationContext(), () -> EXIT_CODE_ON_ERROR); + } } diff --git a/cps-service/src/main/java/org/onap/cps/init/CpsNotificationSubscriptionModelLoader.java b/cps-service/src/main/java/org/onap/cps/init/CpsNotificationSubscriptionModelLoader.java index 0b7d1609ff..bf60f8d49a 100644 --- a/cps-service/src/main/java/org/onap/cps/init/CpsNotificationSubscriptionModelLoader.java +++ b/cps-service/src/main/java/org/onap/cps/init/CpsNotificationSubscriptionModelLoader.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 TechMahindra Ltd. + * Copyright (C) 2024-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java index 5be5b1e2e0..138fc34ca1 100644 --- a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java +++ b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation. + * Copyright (C) 2020-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2022 Bell Canada * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. @@ -27,6 +27,7 @@ import java.io.Serializable; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import org.onap.cps.api.model.DataNode; import org.onap.cps.api.parameters.FetchDescendantsOption; import org.onap.cps.api.parameters.PaginationOption; @@ -185,6 +186,36 @@ public interface CpsDataPersistenceService { String cpsPath, FetchDescendantsOption fetchDescendantsOption); /** + * Get a datanode by cps path. + * + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param cpsPath cps path + * @param fetchDescendantsOption defines whether the descendants of the node(s) found by the query should be + * included in the output + * @param queryResultLimit limits the number of returned entities (if less than 1 returns all) + * + * @return the data nodes found i.e. 0 or more data nodes + */ + List<DataNode> queryDataNodes(String dataspaceName, + String anchorName, + String cpsPath, FetchDescendantsOption fetchDescendantsOption, + int queryResultLimit); + + /** + * Get data leaf for the given dataspace and anchor by cps path. + * + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param cpsPath cps path + * @param queryResultLimit limits the number of returned entities (if less than 1 returns all) + * @param targetClass class of the expected data type + * @return a collection of data objects of expected type + */ + <T> Set<T> queryDataLeaf(String dataspaceName, String anchorName, String cpsPath, int queryResultLimit, + Class<T> targetClass); + + /** * Get a datanode by dataspace name and cps path across all anchors. * * @param dataspaceName dataspace name diff --git a/cps-service/src/main/java/org/onap/cps/spi/CpsModulePersistenceService.java b/cps-service/src/main/java/org/onap/cps/spi/CpsModulePersistenceService.java index b1f8aad88f..02e1b6c754 100755 --- a/cps-service/src/main/java/org/onap/cps/spi/CpsModulePersistenceService.java +++ b/cps-service/src/main/java/org/onap/cps/spi/CpsModulePersistenceService.java @@ -36,34 +36,37 @@ public interface CpsModulePersistenceService { /** * Stores Schema Set. * - * @param dataspaceName dataspace name - * @param schemaSetName schema set name - * @param yangResourcesNameToContentMap YANG resources (files) map where key is a name and value is content + * @param dataspaceName dataspace name + * @param schemaSetName schema set name + * @param yangResourceContentPerName a map of YANG resources map where key is a name and value is content */ - void storeSchemaSet(String dataspaceName, String schemaSetName, Map<String, String> yangResourcesNameToContentMap); + void createSchemaSet(String dataspaceName, String schemaSetName, Map<String, String> yangResourceContentPerName); /** * Stores a new schema set from new modules and existing modules. * - * @param dataspaceName Dataspace name - * @param schemaSetName Schema set name - * @param newModuleNameToContentMap YANG resources map where key is a module name and value is content - * @param allModuleReferences All YANG resources module references + * @param dataspaceName dataspace name + * @param schemaSetName Schema set name + * @param newYangResourceContentPerName a map of only the new YANG resources + * the key is a name and value is its content + * @param allModuleReferences all YANG resources module references */ - void storeSchemaSetFromModules(String dataspaceName, String schemaSetName, - Map<String, String> newModuleNameToContentMap, Collection<ModuleReference> allModuleReferences); + void createSchemaSetFromNewAndExistingModules(String dataspaceName, String schemaSetName, + Map<String, String> newYangResourceContentPerName, + Collection<ModuleReference> allModuleReferences); /** * Update an existing schema set from new modules and existing modules. * - * @param dataspaceName Dataspace name + * @param dataspaceName dataspace name * @param schemaSetName Schema set name - * @param newModuleNameToContentMap YANG resources map where key is a module name and value is content - * @param allModuleReferences All YANG resources module references + * @param newYangResourcesPerName a map of only the new YANG resources + * the key is a module name and value is its content + * @param allModuleReferences all YANG resources module references */ - void updateSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map<String, String> newModuleNameToContentMap, - final Collection<ModuleReference> allModuleReferences); + void updateSchemaSetFromNewAndExistingModules(String dataspaceName, String schemaSetName, + Map<String, String> newYangResourcesPerName, + Collection<ModuleReference> allModuleReferences); /** * Checks whether a schema set exists in the specified dataspace. @@ -146,9 +149,11 @@ public interface CpsModulePersistenceService { String moduleName, String moduleRevision); /** - * Remove any unused Yang Resource Modules and Schema Sets. + * Remove any unused Yang Resource Modules and Schema Sets from the given dataspace. + * + * @param dataspaceName dataspace name */ - void deleteAllUnusedYangModuleData(); + void deleteAllUnusedYangModuleData(String dataspaceName); /** * Identify new module references from those returned by a node compared to what is in CPS already. diff --git a/cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java b/cps-service/src/main/java/org/onap/cps/utils/CpsValidator.java index 75bcf126a4..2244ea729b 100644 --- a/cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java +++ b/cps-service/src/main/java/org/onap/cps/utils/CpsValidator.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,23 +18,34 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.impl.utils; +package org.onap.cps.utils; +import org.onap.cps.api.exceptions.DataValidationException; import org.onap.cps.api.parameters.PaginationOption; public interface CpsValidator { /** + * Validate characters in name within cps. + * + * @param name name to be validated + * @return true if name is valid + */ + boolean isValidName(final String name); + + /** * Validate characters in names within cps. * - * @param names names of data to be validated + * @param names names to be validated + * @throws DataValidationException for any name that is not valid. */ void validateNameCharacters(final String... names); /** * Validate characters in names within cps. * - * @param names names of data to be validated + * @param names names to be validated + * @throws DataValidationException for any name that is not valid. */ void validateNameCharacters(final Iterable<String> names); diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java b/cps-service/src/main/java/org/onap/cps/utils/CpsValidatorImpl.java index fa9feee1e7..e1c81bfaf5 100644 --- a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java +++ b/cps-service/src/main/java/org/onap/cps/utils/CpsValidatorImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,24 +18,32 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.ri.utils; +package org.onap.cps.utils; import com.google.common.collect.Lists; import java.util.Arrays; import java.util.Collection; import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.exceptions.DataValidationException; import org.onap.cps.api.parameters.PaginationOption; -import org.onap.cps.impl.utils.CpsValidator; import org.springframework.stereotype.Component; -@Slf4j @Component @RequiredArgsConstructor public class CpsValidatorImpl implements CpsValidator { - private static final char[] UNSUPPORTED_NAME_CHARACTERS = "!\" #$%&'()*+,./\\:;<=>?@[]^`{|}~".toCharArray(); + private static final char[] UNSUPPORTED_NAME_CHARACTERS = "=!\" #$%&'()*+,./\\:;<>?@[]^`{|}~".toCharArray(); + + @Override + public boolean isValidName(final String name) { + final Collection<Character> charactersOfName = Lists.charactersOf(name); + for (final char unsupportedCharacter : UNSUPPORTED_NAME_CHARACTERS) { + if (charactersOfName.contains(unsupportedCharacter)) { + return false; + } + } + return true; + } @Override public void validateNameCharacters(final String... names) { diff --git a/cps-service/src/main/java/org/onap/cps/utils/DataMapper.java b/cps-service/src/main/java/org/onap/cps/utils/DataMapper.java new file mode 100644 index 0000000000..29d61ffcc4 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/utils/DataMapper.java @@ -0,0 +1,144 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.utils; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import org.onap.cps.api.CpsAnchorService; +import org.onap.cps.api.model.Anchor; +import org.onap.cps.api.model.DataNode; +import org.springframework.stereotype.Service; + +@Service +@RequiredArgsConstructor +public class DataMapper { + + private final CpsAnchorService cpsAnchorService; + private final PrefixResolver prefixResolver; + + /** + * Convert a data node to a data map. + * + * @param dataspaceName the name of the dataspace + * @param anchorName the name of the anchor + * @param dataNode the data node to convert + * @return the data node represented as a map of key value pairs + */ + public Map<String, Object> toDataMap(final String dataspaceName, final String anchorName, final DataNode dataNode) { + final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); + final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); + return DataMapUtils.toDataMapWithIdentifier(dataNode, prefix); + } + + /** + * Convert a collection of data nodes to a list of data maps. + * + * @param dataspaceName the name dataspace name + * @param anchorName the name of the anchor + * @param dataNodes the data nodes to convert + * @return a list of maps representing the data nodes + */ + public List<Map<String, Object>> toDataMaps(final String dataspaceName, final String anchorName, + final Collection<DataNode> dataNodes) { + final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); + return toDataMaps(anchor, dataNodes); + } + + /** + * Convert a collection of data nodes to a list of data maps. + * + * @param anchor the anchor + * @param dataNodes the data nodes to convert + * @return a list of maps representing the data nodes + */ + public List<Map<String, Object>> toDataMaps(final Anchor anchor, final Collection<DataNode> dataNodes) { + final List<Map<String, Object>> dataMaps = new ArrayList<>(dataNodes.size()); + for (final DataNode dataNode : dataNodes) { + final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); + final Map<String, Object> dataMap = DataMapUtils.toDataMapWithIdentifier(dataNode, prefix); + dataMaps.add(dataMap); + } + return dataMaps; + } + + /** + * Convert a collection of data nodes (belonging to multiple anchors) to a list of data maps. + * + * @param dataspaceName the name dataspace name + * @param dataNodes the data nodes to convert + * @return a list of maps representing the data nodes + */ + public List<Map<String, Object>> toDataMaps(final String dataspaceName, final Collection<DataNode> dataNodes) { + final List<Map<String, Object>> dataNodesAsMaps = new ArrayList<>(dataNodes.size()); + final Map<String, List<DataNode>> dataNodesPerAnchor = groupDataNodesPerAnchor(dataNodes); + for (final Map.Entry<String, List<DataNode>> dataNodesPerAnchorEntry : dataNodesPerAnchor.entrySet()) { + final String anchorName = dataNodesPerAnchorEntry.getKey(); + final Anchor anchor = cpsAnchorService.getAnchor(dataspaceName, anchorName); + final DataNode dataNode = dataNodesPerAnchorEntry.getValue().get(0); + final String prefix = prefixResolver.getPrefix(anchor, dataNode.getXpath()); + final Map<String, Object> dataNodeAsMap = DataMapUtils.toDataMapWithIdentifierAndAnchor( + dataNodesPerAnchorEntry.getValue(), anchorName, prefix); + dataNodesAsMaps.add(dataNodeAsMap); + } + return dataNodesAsMaps; + } + + /** + * Converts list of attributes values to a list of data maps. + * @param attributeName attribute name + * @param attributeValues attribute values + * @return a list of maps representing the attribute values + */ + public List<Map<String, Object>> toAttributeMaps(final String attributeName, + final Collection<Object> attributeValues) { + return attributeValues.stream().map(attributeValue -> Map.of(attributeName, attributeValue)).toList(); + } + + /** + * Convert a collection of data nodes to a data map. + * + * @param anchor the anchor + * @param dataNodes the data nodes to convert + * @return a map representing the data nodes + */ + public Map<String, Object> toFlatDataMap(final Anchor anchor, final Collection<DataNode> dataNodes) { + final List<Map<String, Object>> dataNodesAsMaps = toDataMaps(anchor, dataNodes); + return flattenDataNodesMaps(dataNodesAsMaps); + } + + private Map<String, Object> flattenDataNodesMaps(final List<Map<String, Object>> dataNodesAsMaps) { + final Map<String, Object> dataNodesAsFlatMap = new HashMap<>(); + for (final Map<String, Object> dataNodeAsMap : dataNodesAsMaps) { + dataNodesAsFlatMap.putAll(dataNodeAsMap); + } + return dataNodesAsFlatMap; + } + + private static Map<String, List<DataNode>> groupDataNodesPerAnchor(final Collection<DataNode> dataNodes) { + return dataNodes.stream().collect(Collectors.groupingBy(DataNode::getAnchorName)); + } + +} diff --git a/cps-service/src/main/java/org/onap/cps/utils/PrefixResolver.java b/cps-service/src/main/java/org/onap/cps/utils/PrefixResolver.java index bd348a25d1..e59029f916 100644 --- a/cps-service/src/main/java/org/onap/cps/utils/PrefixResolver.java +++ b/cps-service/src/main/java/org/onap/cps/utils/PrefixResolver.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation. + * Copyright (C) 2022-2025 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,25 +47,29 @@ public class PrefixResolver { * @return the prefix of the module the top level element of given xpath */ public String getPrefix(final Anchor anchor, final String xpath) { + return getPrefix(anchor.getDataspaceName(), anchor.getSchemaSetName(), xpath); + } + + private String getPrefix(final String dataspaceName, final String schemaSetName, final String xpath) { final CpsPathQuery cpsPathQuery = CpsPathUtil.getCpsPathQuery(xpath); if (cpsPathQuery.getCpsPathPrefixType() != CpsPathPrefixType.ABSOLUTE) { return ""; } - final String topLevelContainerName = cpsPathQuery.getContainerNames().get(0); + final String topLevelContainerName = cpsPathQuery.getContainerNames().get(0); final YangTextSchemaSourceSet yangTextSchemaSourceSet = - yangTextSchemaSourceSetCache.get(anchor.getDataspaceName(), anchor.getSchemaSetName()); + yangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName); final SchemaContext schemaContext = yangTextSchemaSourceSet.getSchemaContext(); return schemaContext.getChildNodes().stream() - .filter(DataNodeContainer.class::isInstance) - .map(SchemaNode::getQName) - .filter(qname -> qname.getLocalName().equals(topLevelContainerName)) - .findFirst() - .map(QName::getModule) - .flatMap(schemaContext::findModule) - .map(Module::getPrefix) - .orElse(""); + .filter(DataNodeContainer.class::isInstance) + .map(SchemaNode::getQName) + .filter(qname -> qname.getLocalName().equals(topLevelContainerName)) + .findFirst() + .map(QName::getModule) + .flatMap(schemaContext::findModule) + .map(Module::getPrefix) + .orElse(""); } } diff --git a/cps-service/src/main/java/org/onap/cps/utils/YangParser.java b/cps-service/src/main/java/org/onap/cps/utils/YangParser.java index 08f450e2f1..5dfeb2fb3f 100644 --- a/cps-service/src/main/java/org/onap/cps/utils/YangParser.java +++ b/cps-service/src/main/java/org/onap/cps/utils/YangParser.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation. + * Copyright (C) 2024-2025 Nordix Foundation. * Modifications Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -71,17 +71,17 @@ public class YangParser { /** * Parses data into (normalized) ContainerNode according to schema context for the given yang resource. * - * @param nodeData data string - * @param yangResourcesNameToContentMap yang resource to content map - * @return the NormalizedNode object + * @param nodeData data string + * @param yangResourceContentPerName yang resource content per name + * @return the NormalizedNode object */ @Timed(value = "cps.utils.yangparser.nodedata.with.parent.with.yangResourceMap.parse", description = "Time taken to parse node data with a parent") public ContainerNode parseData(final ContentType contentType, final String nodeData, - final Map<String, String> yangResourcesNameToContentMap, + final Map<String, String> yangResourceContentPerName, final String parentNodeXpath) { - final SchemaContext schemaContext = getSchemaContext(yangResourcesNameToContentMap); + final SchemaContext schemaContext = getSchemaContext(yangResourceContentPerName); return yangParserHelper.parseData(contentType, nodeData, schemaContext, parentNodeXpath, VALIDATE_AND_PARSE); } @@ -114,8 +114,8 @@ public class YangParser { anchor.getSchemaSetName()).getSchemaContext(); } - private SchemaContext getSchemaContext(final Map<String, String> yangResourcesNameToContentMap) { - return timedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourcesNameToContentMap) + private SchemaContext getSchemaContext(final Map<String, String> yangResourceContentPerName) { + return timedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourceContentPerName) .getSchemaContext(); } diff --git a/cps-service/src/main/java/org/onap/cps/yang/TimedYangTextSchemaSourceSetBuilder.java b/cps-service/src/main/java/org/onap/cps/yang/TimedYangTextSchemaSourceSetBuilder.java index 013faff0c8..9b2ac944dc 100644 --- a/cps-service/src/main/java/org/onap/cps/yang/TimedYangTextSchemaSourceSetBuilder.java +++ b/cps-service/src/main/java/org/onap/cps/yang/TimedYangTextSchemaSourceSetBuilder.java @@ -30,8 +30,8 @@ public class TimedYangTextSchemaSourceSetBuilder { @Timed(value = "cps.yangtextschemasourceset.build", description = "Time taken to build a yang text schema source set") public YangTextSchemaSourceSet getYangTextSchemaSourceSet( - final Map<String, String> yangResourcesNameToContentMap) { - return YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap); + final Map<String, String> yangResourceContentPerName) { + return YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName); } } diff --git a/cps-service/src/main/java/org/onap/cps/yang/YangTextSchemaSourceSetBuilder.java b/cps-service/src/main/java/org/onap/cps/yang/YangTextSchemaSourceSetBuilder.java index ab7a095572..04b491692a 100644 --- a/cps-service/src/main/java/org/onap/cps/yang/YangTextSchemaSourceSetBuilder.java +++ b/cps-service/src/main/java/org/onap/cps/yang/YangTextSchemaSourceSetBuilder.java @@ -33,6 +33,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import lombok.NoArgsConstructor; @@ -77,7 +78,7 @@ public final class YangTextSchemaSourceSetBuilder { * @return the YangTextSchemaSourceSet */ public YangTextSchemaSourceSet build() { - final var schemaContext = generateSchemaContext(yangModelMap.build()); + final SchemaContext schemaContext = generateSchemaContext(yangModelMap.build()); return new YangTextSchemaSourceSetImpl(schemaContext); } @@ -113,9 +114,7 @@ public final class YangTextSchemaSourceSetBuilder { @Override public List<ModuleReference> getModuleReferences() { - return schemaContext.getModules().stream() - .map(YangTextSchemaSourceSetImpl::toModuleReference) - .collect(Collectors.toList()); + return schemaContext.getModules().stream().map(YangTextSchemaSourceSetImpl::toModuleReference).toList(); } private static ModuleReference toModuleReference(final Module module) { @@ -164,12 +163,11 @@ public final class YangTextSchemaSourceSetBuilder { private static List<YangTextSchemaSource> forResources(final Map<String, String> yangResourceNameToContent) { return yangResourceNameToContent.entrySet().stream() - .map(entry -> toYangTextSchemaSource(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + .map(entry -> toYangTextSchemaSource(entry.getKey(), entry.getValue())).toList(); } private static YangTextSchemaSource toYangTextSchemaSource(final String sourceName, final String source) { - final var revisionSourceIdentifier = + final RevisionSourceIdentifier revisionSourceIdentifier = createIdentifierFromSourceName(checkNotNull(sourceName)); return new YangTextSchemaSource(revisionSourceIdentifier) { @@ -192,7 +190,7 @@ public final class YangTextSchemaSourceSetBuilder { } private static RevisionSourceIdentifier createIdentifierFromSourceName(final String sourceName) { - final var matcher = RFC6020_RECOMMENDED_FILENAME_PATTERN.matcher(sourceName); + final Matcher matcher = RFC6020_RECOMMENDED_FILENAME_PATTERN.matcher(sourceName); if (matcher.matches()) { return RevisionSourceIdentifier.create(matcher.group(1), Revision.of(matcher.group(2))); } diff --git a/cps-service/src/main/resources/logback-spring.xml b/cps-service/src/main/resources/logback-spring.xml index 03076023b3..6f7ba4d0fb 100644 --- a/cps-service/src/main/resources/logback-spring.xml +++ b/cps-service/src/main/resources/logback-spring.xml @@ -1,6 +1,6 @@ <!-- ============LICENSE_START======================================================= - Copyright (C) 2021-2022 Nordix Foundation + Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -24,7 +24,6 @@ <include resource="org/springframework/boot/logging/logback/console-appender.xml" /> <springProperty scope="context" name="springAppName" source="spring.application.name"/> - <springProperty scope="context" name="username" source="security.auth.username"/> <springProperty scope="context" name="loggingFormat" source="logging.format"/> <property name="currentTimeStamp" value="%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}"/> diff --git a/cps-service/src/test/groovy/org/onap/cps/api/parameters/FetchDescendantsOptionSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/parameters/FetchDescendantsOptionSpec.groovy index 126e5b197b..508178b419 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/parameters/FetchDescendantsOptionSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/parameters/FetchDescendantsOptionSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -25,6 +25,10 @@ package org.onap.cps.api.parameters import org.onap.cps.api.exceptions.DataValidationException import spock.lang.Specification +import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS + class FetchDescendantsOptionSpec extends Specification { def 'Has next descendant for fetch descendant option: #scenario'() { @@ -105,11 +109,22 @@ class FetchDescendantsOptionSpec extends Specification { expect: 'each fetch descendant option has the correct String value' assert fetchDescendantsOption.toString() == expectedStringValue where: 'the following option is used' - fetchDescendantsOption || expectedStringValue - FetchDescendantsOption.OMIT_DESCENDANTS || 'OmitDescendants' - FetchDescendantsOption.DIRECT_CHILDREN_ONLY || 'DirectChildrenOnly' - FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS || 'IncludeAllDescendants' - new FetchDescendantsOption(2) || 'Depth=2' + fetchDescendantsOption || expectedStringValue + OMIT_DESCENDANTS || 'OmitDescendants' + DIRECT_CHILDREN_ONLY || 'DirectChildrenOnly' + INCLUDE_ALL_DESCENDANTS || 'IncludeAllDescendants' + new FetchDescendantsOption(2) || 'Depth=2' + } + + def 'Convert include-descendants boolean to fetch descendants option with : #includeDescendants'() { + when: 'convert boolean #includeDescendants' + def result = FetchDescendantsOption.getFetchDescendantsOption(includeDescendants) + then: 'result is the expected option' + assert result == expectedFetchDescendantsOption + where: 'following parameters are used' + includeDescendants || expectedFetchDescendantsOption + true || INCLUDE_ALL_DESCENDANTS + false || OMIT_DESCENDANTS } } diff --git a/cps-service/src/test/groovy/org/onap/cps/events/CpsDataUpdateEventsServiceSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/events/CpsDataUpdateEventsProducerSpec.groovy index 5dee8fc28b..07ab2a3613 100644 --- a/cps-service/src/test/groovy/org/onap/cps/events/CpsDataUpdateEventsServiceSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/events/CpsDataUpdateEventsProducerSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 TechMahindra Ltd. - * Copyright (C) 2024 Nordix Foundation. + * Copyright (C) 2024-2025 TechMahindra Ltd. + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,30 +21,37 @@ package org.onap.cps.events -import static org.onap.cps.events.model.Data.Operation.CREATE -import static org.onap.cps.events.model.Data.Operation.DELETE -import static org.onap.cps.events.model.Data.Operation.UPDATE - import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.CloudEvent import io.cloudevents.core.CloudEventUtils import io.cloudevents.jackson.PojoCloudEventDataMapper -import org.onap.cps.events.model.CpsDataUpdatedEvent +import org.onap.cps.api.CpsNotificationService import org.onap.cps.api.model.Anchor +import org.onap.cps.events.model.CpsDataUpdatedEvent import org.onap.cps.utils.JsonObjectMapper import org.springframework.test.context.ContextConfiguration import spock.lang.Specification import java.time.OffsetDateTime +import static org.onap.cps.events.model.Data.Operation.CREATE +import static org.onap.cps.events.model.Data.Operation.DELETE +import static org.onap.cps.events.model.Data.Operation.UPDATE + @ContextConfiguration(classes = [ObjectMapper, JsonObjectMapper]) -class CpsDataUpdateEventsServiceSpec extends Specification { - def mockEventsPublisher = Mock(EventsPublisher) +class CpsDataUpdateEventsProducerSpec extends Specification { + def mockEventsProducer = Mock(EventsProducer) def objectMapper = new ObjectMapper(); + def mockCpsNotificationService = Mock(CpsNotificationService) - def objectUnderTest = new CpsDataUpdateEventsService(mockEventsPublisher) + def objectUnderTest = new CpsDataUpdateEventsProducer(mockEventsProducer, mockCpsNotificationService) + + def setup() { + mockCpsNotificationService.isNotificationEnabled('dataspace01', 'anchor01') >> true + objectUnderTest.topicName = 'cps-core-event' + } - def 'Create and Publish cps update event where events are #scenario'() { + def 'Create and send cps update event where events are #scenario.'() { given: 'an anchor, operation and observed timestamp' def anchor = new Anchor('anchor01', 'dataspace01', 'schema01'); def operation = operationInRequest @@ -53,11 +60,10 @@ class CpsDataUpdateEventsServiceSpec extends Specification { objectUnderTest.notificationsEnabled = true and: 'cpsChangeEventNotificationsEnabled is also true' objectUnderTest.cpsChangeEventNotificationsEnabled = true - when: 'service is called to publish data update event' - objectUnderTest.topicName = "cps-core-event" - objectUnderTest.publishCpsDataUpdateEvent(anchor, xpath, operation, observedTimestamp) + when: 'service is called to send data update event' + objectUnderTest.sendCpsDataUpdateEvent(anchor, xpath, operation, observedTimestamp) then: 'the event contains the required attributes' - 1 * mockEventsPublisher.publishCloudEvent('cps-core-event', 'dataspace01:anchor01', _) >> { + 1 * mockEventsProducer.sendCloudEvent('cps-core-event', 'dataspace01:anchor01', _) >> { args -> { def cpsDataUpdatedEvent = (args[2] as CloudEvent) @@ -80,42 +86,39 @@ class CpsDataUpdateEventsServiceSpec extends Specification { 'non root node xpath and delete operation' | '/test/path' | DELETE || UPDATE } - def 'publish cps update event when #scenario'() { - given: 'an anchor, operation and observed timestamp' - def anchor = new Anchor('anchor01', 'dataspace01', 'schema01'); - def operation = CREATE - def observedTimestamp = OffsetDateTime.now() - and: 'notificationsEnabled is #notificationsEnabled' - objectUnderTest.notificationsEnabled = notificationsEnabled - and: 'cpsChangeEventNotificationsEnabled is #cpsChangeEventNotificationsEnabled' - objectUnderTest.cpsChangeEventNotificationsEnabled = cpsChangeEventNotificationsEnabled - when: 'service is called to publish data update event' - objectUnderTest.topicName = "cps-core-event" - objectUnderTest.publishCpsDataUpdateEvent(anchor, '/', operation, observedTimestamp) - then: 'the event contains the required attributes' - expectedCallToPublisher * mockEventsPublisher.publishCloudEvent('cps-core-event', 'dataspace01:anchor01', _) - where: 'below scenarios are present' - scenario | notificationsEnabled | cpsChangeEventNotificationsEnabled || expectedCallToPublisher - 'both notifications enabled' | true | true || 1 - 'both notifications disabled' | false | false || 0 - 'only CPS change event notification enabled' | false | true || 0 - 'only overall notification enabled' | true | false || 0 - - } - - def 'publish cps update event when no timestamp provided'() { + def 'Send cps update event when no timestamp provided.'() { given: 'an anchor, operation and null timestamp' def anchor = new Anchor('anchor01', 'dataspace01', 'schema01'); - def operation = CREATE def observedTimestamp = null and: 'notificationsEnabled is true' objectUnderTest.notificationsEnabled = true and: 'cpsChangeEventNotificationsEnabled is true' objectUnderTest.cpsChangeEventNotificationsEnabled = true - when: 'service is called to publish data update event' - objectUnderTest.topicName = "cps-core-event" - objectUnderTest.publishCpsDataUpdateEvent(anchor, '/', operation, observedTimestamp) - then: 'the event is published' - 1 * mockEventsPublisher.publishCloudEvent('cps-core-event', 'dataspace01:anchor01', _) + when: 'service is called to send data update event' + objectUnderTest.sendCpsDataUpdateEvent(anchor, '/', CREATE, observedTimestamp) + then: 'the event is sent' + 1 * mockEventsProducer.sendCloudEvent('cps-core-event', 'dataspace01:anchor01', _) } + + def 'Enabling and disabling sending cps update events.'() { + given: 'a different anchor' + def anchor = new Anchor('anchor02', 'some dataspace', 'some schema'); + and: 'notificationsEnabled is #notificationsEnabled' + objectUnderTest.notificationsEnabled = notificationsEnabled + and: 'cpsChangeEventNotificationsEnabled is #cpsChangeEventNotificationsEnabled' + objectUnderTest.cpsChangeEventNotificationsEnabled = cpsChangeEventNotificationsEnabled + and: 'notification service enabled is: #cpsNotificationServiceisNotificationEnabled' + mockCpsNotificationService.isNotificationEnabled(_, 'anchor02') >> cpsNotificationServiceisNotificationEnabled + when: 'service is called to send data update event' + objectUnderTest.sendCpsDataUpdateEvent(anchor, '/', CREATE, null) + then: 'the event is only sent when all related flags are true' + expectedCallsToProducer * mockEventsProducer.sendCloudEvent(*_) + where: 'the following flags are used' + notificationsEnabled | cpsChangeEventNotificationsEnabled | cpsNotificationServiceisNotificationEnabled || expectedCallsToProducer + false | true | true || 0 + true | false | true || 0 + true | true | false || 0 + true | true | true || 1 + } + } diff --git a/cps-service/src/test/groovy/org/onap/cps/events/EventsPublisherSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/events/EventsProducerSpec.groovy index fe67287dec..e36d09387a 100644 --- a/cps-service/src/test/groovy/org/onap/cps/events/EventsPublisherSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/events/EventsProducerSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,8 +31,6 @@ import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.header.Headers import org.apache.kafka.common.header.internals.RecordHeader import org.apache.kafka.common.header.internals.RecordHeaders -import org.junit.jupiter.api.AfterEach -import org.junit.jupiter.api.BeforeEach import org.slf4j.LoggerFactory import org.springframework.kafka.core.KafkaTemplate import org.springframework.kafka.support.SendResult @@ -41,27 +39,27 @@ import spock.lang.Specification import java.util.concurrent.CompletableFuture -class EventsPublisherSpec extends Specification { +class EventsProducerSpec extends Specification { def legacyKafkaTemplateMock = Mock(KafkaTemplate) def mockCloudEventKafkaTemplate = Mock(KafkaTemplate) def logger = Spy(ListAppender<ILoggingEvent>) void setup() { - def setupLogger = ((Logger) LoggerFactory.getLogger(EventsPublisher.class)) + def setupLogger = ((Logger) LoggerFactory.getLogger(EventsProducer.class)) setupLogger.setLevel(Level.DEBUG) setupLogger.addAppender(logger) logger.start() } void cleanup() { - ((Logger) LoggerFactory.getLogger(EventsPublisher.class)).detachAndStopAllAppenders() + ((Logger) LoggerFactory.getLogger(EventsProducer.class)).detachAndStopAllAppenders() } - def objectUnderTest = new EventsPublisher(legacyKafkaTemplateMock, mockCloudEventKafkaTemplate) + def objectUnderTest = new EventsProducer(legacyKafkaTemplateMock, mockCloudEventKafkaTemplate) - def 'Publish Cloud Event'() { - given: 'a successfully published event' + def 'Send Cloud Event'() { + given: 'a successfully sent event' def eventFuture = CompletableFuture.completedFuture( new SendResult( new ProducerRecord('some-topic', 'some-value'), @@ -70,30 +68,30 @@ class EventsPublisherSpec extends Specification { ) def someCloudEvent = Mock(CloudEvent) 1 * mockCloudEventKafkaTemplate.send('some-topic', 'some-event-key', someCloudEvent) >> eventFuture - when: 'publishing the cloud event' - objectUnderTest.publishCloudEvent('some-topic', 'some-event-key', someCloudEvent) + when: 'sending the cloud event' + objectUnderTest.sendCloudEvent('some-topic', 'some-event-key', someCloudEvent) then: 'the correct debug message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.DEBUG - assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + assert lastLoggingEvent.formattedMessage.contains('Successfully sent event') } - def 'Publish Cloud Event with Exception'() { + def 'Send Cloud Event with Exception'() { given: 'a failed event' def eventFutureWithFailure = new CompletableFuture<SendResult<String, String>>() eventFutureWithFailure.completeExceptionally(new RuntimeException('some exception')) def someCloudEvent = Mock(CloudEvent) 1 * mockCloudEventKafkaTemplate.send('some-topic', 'some-event-key', someCloudEvent) >> eventFutureWithFailure - when: 'publishing the cloud event' - objectUnderTest.publishCloudEvent('some-topic', 'some-event-key', someCloudEvent) + when: 'sending the cloud event' + objectUnderTest.sendCloudEvent('some-topic', 'some-event-key', someCloudEvent) then: 'the correct error message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.ERROR - assert lastLoggingEvent.formattedMessage.contains('Unable to publish event') + assert lastLoggingEvent.formattedMessage.contains('Unable to send event') } - def 'Publish Legacy Event'() { - given: 'a successfully published event' + def 'Send Legacy Event'() { + given: 'a successfully sent event' def eventFuture = CompletableFuture.completedFuture( new SendResult( new ProducerRecord('some-topic', 'some-value'), @@ -102,16 +100,16 @@ class EventsPublisherSpec extends Specification { ) def someEvent = Mock(Object) 1 * legacyKafkaTemplateMock.send('some-topic', 'some-event-key', someEvent) >> eventFuture - when: 'publishing the cloud event' - objectUnderTest.publishEvent('some-topic', 'some-event-key', someEvent) + when: 'sending the cloud event' + objectUnderTest.sendEvent('some-topic', 'some-event-key', someEvent) then: 'the correct debug message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.DEBUG - assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + assert lastLoggingEvent.formattedMessage.contains('Successfully sent event') } - def 'Publish Legacy Event with Headers as Map'() { - given: 'a successfully published event' + def 'Send Legacy Event with Headers as Map'() { + given: 'a successfully sent event' def sampleEventHeaders = ['k1': SerializationUtils.serialize('v1')] def eventFuture = CompletableFuture.completedFuture( new SendResult( @@ -120,18 +118,18 @@ class EventsPublisherSpec extends Specification { ) ) def someEvent = Mock(Object.class) - when: 'publishing the legacy event' - objectUnderTest.publishEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) - then: 'event is published' + when: 'sending the legacy event' + objectUnderTest.sendEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) + then: 'event is sent' 1 * legacyKafkaTemplateMock.send(_) >> eventFuture and: 'the correct debug message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.DEBUG - assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + assert lastLoggingEvent.formattedMessage.contains('Successfully sent event') } - def 'Publish Legacy Event with Record Headers'() { - given: 'a successfully published event' + def 'Send Legacy Event with Record Headers'() { + given: 'a successfully sent event' def sampleEventHeaders = new RecordHeaders([new RecordHeader('k1', SerializationUtils.serialize('v1'))]) def sampleProducerRecord = new ProducerRecord('some-topic', null, 'some-key', 'some-value', sampleEventHeaders) def eventFuture = CompletableFuture.completedFuture( @@ -141,18 +139,18 @@ class EventsPublisherSpec extends Specification { ) ) def someEvent = Mock(Object.class) - when: 'publishing the legacy event' - objectUnderTest.publishEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) - then: 'event is published' + when: 'sending the legacy event' + objectUnderTest.sendEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) + then: 'event is sent' 1 * legacyKafkaTemplateMock.send(_) >> eventFuture and: 'the correct debug message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.DEBUG - assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + assert lastLoggingEvent.formattedMessage.contains('Successfully sent event') } def 'Handle Legacy Event Callback'() { - given: 'an event is successfully published' + given: 'an event is successfully sent' def eventFuture = CompletableFuture.completedFuture( new SendResult( new ProducerRecord('some-topic', 'some-value'), @@ -164,11 +162,11 @@ class EventsPublisherSpec extends Specification { then: 'the correct debug message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.DEBUG - assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + assert lastLoggingEvent.formattedMessage.contains('Successfully sent event') } def 'Handle Legacy Event Callback with Exception'() { - given: 'a failure to publish an event' + given: 'a failure to send an event' def eventFutureWithFailure = new CompletableFuture<SendResult<String, String>>() eventFutureWithFailure.completeExceptionally(new RuntimeException('some exception')) when: 'handling legacy event callback' @@ -176,7 +174,7 @@ class EventsPublisherSpec extends Specification { then: 'the correct error message is logged' def lastLoggingEvent = logger.list[0] assert lastLoggingEvent.level == Level.ERROR - assert lastLoggingEvent.formattedMessage.contains('Unable to publish event') + assert lastLoggingEvent.formattedMessage.contains('Unable to send event') } def 'Convert to kafka headers'() { diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsAnchorServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsAnchorServiceImplSpec.groovy index 22f5c9f83e..a21a17fabd 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsAnchorServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsAnchorServiceImplSpec.groovy @@ -20,8 +20,7 @@ package org.onap.cps.impl - -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.utils.CpsValidator import org.onap.cps.spi.CpsAdminPersistenceService import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.api.exceptions.ModuleNamesNotFoundException diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataServiceImplSpec.groovy index 3ea859ae6d..53325bc81c 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataServiceImplSpec.groovy @@ -1,9 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada. - * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * Modifications Copyright (C) 2022 Deutsche Telekom AG * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,24 +26,19 @@ package org.onap.cps.impl import ch.qos.logback.classic.Level import ch.qos.logback.classic.Logger import ch.qos.logback.core.read.ListAppender -import com.fasterxml.jackson.databind.ObjectMapper import org.onap.cps.TestUtils import org.onap.cps.api.CpsAnchorService -import org.onap.cps.api.CpsDeltaService -import org.onap.cps.events.CpsDataUpdateEventsService -import org.onap.cps.impl.utils.CpsValidator -import org.onap.cps.spi.CpsDataPersistenceService -import org.onap.cps.api.parameters.FetchDescendantsOption import org.onap.cps.api.exceptions.ConcurrencyException import org.onap.cps.api.exceptions.DataNodeNotFoundExceptionBatch import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.exceptions.SessionManagerException import org.onap.cps.api.exceptions.SessionTimeoutException import org.onap.cps.api.model.Anchor -import org.onap.cps.api.model.DataNodeBuilder +import org.onap.cps.api.parameters.FetchDescendantsOption +import org.onap.cps.events.CpsDataUpdateEventsProducer +import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.utils.ContentType -import org.onap.cps.utils.JsonObjectMapper -import org.onap.cps.utils.PrefixResolver +import org.onap.cps.utils.CpsValidator import org.onap.cps.utils.YangParser import org.onap.cps.utils.YangParserHelper import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder @@ -65,13 +60,11 @@ class CpsDataServiceImplSpec extends Specification { def mockCpsValidator = Mock(CpsValidator) def mockTimedYangTextSchemaSourceSetBuilder = Mock(TimedYangTextSchemaSourceSetBuilder) def yangParser = new YangParser(new YangParserHelper(), mockYangTextSchemaSourceSetCache, mockTimedYangTextSchemaSourceSetBuilder) - def mockCpsDeltaService = Mock(CpsDeltaService); - def mockDataUpdateEventsService = Mock(CpsDataUpdateEventsService) - def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - def mockPrefixResolver = Mock(PrefixResolver) + def mockCpsDataUpdateEventsProducer = Mock(CpsDataUpdateEventsProducer) + def dataNodeFactory = new DataNodeFactoryImpl(yangParser) - def objectUnderTest = new CpsDataServiceImpl(mockCpsDataPersistenceService, mockDataUpdateEventsService, mockCpsAnchorService, - mockCpsValidator, yangParser, mockCpsDeltaService, jsonObjectMapper, mockPrefixResolver) + def objectUnderTest = new CpsDataServiceImpl(mockCpsDataPersistenceService, mockCpsDataUpdateEventsProducer, mockCpsAnchorService, + dataNodeFactory, mockCpsValidator, yangParser) def logger = (Logger) LoggerFactory.getLogger(objectUnderTest.class) def loggingListAppender @@ -108,8 +101,9 @@ class CpsDataServiceImplSpec extends Specification { def 'Saving #scenario data.'() { given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') - when: 'save data method is invoked with test-tree #scenario data' + and: 'JSON/XML data is fetched from resource file' def data = TestUtils.getResourceFileContent(dataFile) + when: 'save data method is invoked with test-tree #scenario data' objectUnderTest.saveData(dataspaceName, anchorName, data, observedTimestamp, contentType) then: 'the persistence service method is invoked with correct parameters' 1 * mockCpsDataPersistenceService.storeDataNodes(dataspaceName, anchorName, @@ -132,7 +126,7 @@ class CpsDataServiceImplSpec extends Specification { assert exceptionThrown.message.startsWith(expectedMessage) where: 'given parameters' scenario | invalidData | contentType || expectedMessage - 'no data nodes' | '{}' | ContentType.JSON || 'No data nodes' + 'no data nodes' | '{}' | ContentType.JSON || 'No Data Nodes' 'invalid json' | '{invalid json' | ContentType.JSON || 'Data Validation Failed' 'invalid xml' | '<invalid xml' | ContentType.XML || 'Data Validation Failed' } @@ -140,8 +134,9 @@ class CpsDataServiceImplSpec extends Specification { def 'Saving list element data fragment under Root node.'() { given: 'schema set for given anchor and dataspace references bookstore model' setupSchemaSetMocks('bookstore.yang') - when: 'save data method is invoked with list element json data' + and: 'JSON data associated with bookstore model' def jsonData = '{"bookstore-address":[{"bookstore-name":"Easons","address":"Dublin,Ireland","postal-code":"D02HA21"}]}' + when: 'save data method is invoked with list element json data' objectUnderTest.saveListElements(dataspaceName, anchorName, '/', jsonData, observedTimestamp, ContentType.JSON) then: 'the persistence service method is invoked with correct parameters' 1 * mockCpsDataPersistenceService.storeDataNodes(dataspaceName, anchorName, @@ -160,8 +155,8 @@ class CpsDataServiceImplSpec extends Specification { def 'Saving child data fragment under existing node.'() { given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') - when: 'save data method is invoked with test-tree json data' def jsonData = '{"branch": [{"name": "New"}]}' + when: 'save data method is invoked with test-tree json data' objectUnderTest.saveData(dataspaceName, anchorName, '/test-tree', jsonData, observedTimestamp) then: 'the persistence service method is invoked with correct parameters' 1 * mockCpsDataPersistenceService.addChildDataNodes(dataspaceName, anchorName, '/test-tree', @@ -170,7 +165,7 @@ class CpsDataServiceImplSpec extends Specification { 1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName) } - def 'Saving list element data fragment under existing JSON/XML node.'() { + def 'Saving list element data fragment under existing #scenario .'() { given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') when: 'save data method is invoked with list element data' @@ -188,12 +183,13 @@ class CpsDataServiceImplSpec extends Specification { and: 'the CpsValidator is called on the dataspaceName and AnchorName' 1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName) where: - data | contentType - '{"branch": [{"name": "A"}, {"name": "B"}]}' | ContentType.JSON - '<test-tree xmlns="org:onap:cps:test:test-tree"><branch><name>A</name></branch><branch><name>B</name></branch></test-tree>' | ContentType.XML + scenario | data | contentType + 'JSON data' | '{"branch": [{"name": "A"}, {"name": "B"}]}' | ContentType.JSON + 'XML data' | '<test-tree xmlns="org:onap:cps:test:test-tree"><branch><name>A</name></branch><branch><name>B</name></branch></test-tree>' | ContentType.XML + } - def 'Saving empty list element data fragment for JSON/XML data.'() { + def 'Saving empty list element data fragment for #scenario.'() { given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') when: 'save data method is invoked with an empty list' @@ -201,9 +197,9 @@ class CpsDataServiceImplSpec extends Specification { then: 'invalid data exception is thrown' thrown(DataValidationException) where: - data | contentType - '{"branch": []}' | ContentType.JSON - '<test-tree><branch></branch></test-tree>' | ContentType.XML + scenario | data | contentType + 'JSON data' | '{"branch": []}' | ContentType.JSON + 'XML data' | '<test-tree><branch></branch></test-tree>' | ContentType.XML } def 'Get all data nodes #scenario.'() { @@ -231,76 +227,6 @@ class CpsDataServiceImplSpec extends Specification { fetchDescendantsOption << [FetchDescendantsOption.OMIT_DESCENDANTS, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS] } - def 'Get delta between 2 anchors'() { - given: 'some xpath, source and target data nodes' - def xpath = '/xpath' - def sourceDataNodes = [new DataNodeBuilder().withXpath(xpath).build()] - def targetDataNodes = [new DataNodeBuilder().withXpath(xpath).build()] - when: 'attempt to get delta between 2 anchors' - objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, ANCHOR_NAME_1, ANCHOR_NAME_2, xpath, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'the dataspace and anchor names are validated' - 2 * mockCpsValidator.validateNameCharacters(_) - and: 'data nodes are fetched using appropriate persistence layer method' - mockCpsDataPersistenceService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes - mockCpsDataPersistenceService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_2, [xpath], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> targetDataNodes - and: 'appropriate delta service method is invoked once with correct source and target data nodes' - 1 * mockCpsDeltaService.getDeltaReports(sourceDataNodes, targetDataNodes) - } - - def 'Get delta between anchor and payload with user provided schema #scenario'() { - given: 'user provided schema set ' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - setupSchemaSetMocksForDelta(yangResourcesNameToContentMap) - when: 'attempt to get delta between an anchor and a JSON payload' - objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, yangResourcesNameToContentMap, jsonData, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'dataspacename and anchor names are validated' - 1 * mockCpsValidator.validateNameCharacters(['some-dataspace', 'some-anchor']) - and: 'source data nodes are fetched using appropriate persistence layer method' - 1 * mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, xpath, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes - and: 'appropriate delta service method is invoked once with correct source and target data nodes' - 1 * mockCpsDeltaService.getDeltaReports({sourceDataNodesRebuilt -> sourceDataNodesRebuilt.xpath[0] == expectedNodeXpath}, {targetDataNodes -> targetDataNodes.xpath[0] == expectedNodeXpath}) - where: 'following data was used' - scenario | xpath | sourceDataNodes | jsonData || expectedNodeXpath - 'root node xpath' | '/' | [new DataNodeBuilder().withXpath('/bookstore').build()] | '{"bookstore":{"bookstore-name":"Easons"}}' || '/bookstore' - 'parent xpath' | '/bookstore' | [new DataNodeBuilder().withXpath('/bookstore').build()] | '{"bookstore":{"bookstore-name":"Easons"}}' || '/bookstore' - 'non-root xpath' | '/bookstore/categories[@code="02"]' | [new DataNodeBuilder().withXpath('/bookstore/categories[@code="02"]').withLeaves(["code":"02"]).build()] | '{"categories":[{"name":"kids","code":"02"}]}' || '/bookstore/categories[@code=\'02\']' - } - - def 'Get delta between anchor and payload by using schema from anchor #scenario'() { - given: 'schema set for a given dataspace and anchor' - setupSchemaSetMocks("bookstore.yang") - when: 'attempt to get delta between an anchor and a JSON payload' - objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, [:], jsonData, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'dataspacename and anchor names are validated' - 1 * mockCpsValidator.validateNameCharacters(['some-dataspace', 'some-anchor']) - and: 'source data nodes are fetched using appropriate persistence layer method' - 1 * mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, xpath, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes - and: 'appropriate delta service method is invoked once with correct source and target data nodes' - 1 * mockCpsDeltaService.getDeltaReports({sourceDataNodesRebuilt -> sourceDataNodesRebuilt.xpath[0] == expectedNodeXpath}, {targetDataNodes -> targetDataNodes.xpath[0] == expectedNodeXpath}) - where: 'following data was used' - scenario | xpath | sourceDataNodes | jsonData || expectedNodeXpath - 'root node xpath' | '/' | [new DataNodeBuilder().withXpath('/bookstore').build()] | '{"bookstore":{"bookstore-name":"Easons"}}' || '/bookstore' - 'parent xpath' | '/bookstore' | [new DataNodeBuilder().withXpath('/bookstore').build()] | '{"bookstore":{"bookstore-name":"Easons"}}' || '/bookstore' - 'non-root xpath' | '/bookstore/categories[@code="02"]' | [new DataNodeBuilder().withXpath('/bookstore/categories[@code="02"]').withLeaves(["code":"02"]).build()] | '{"categories":[{"name":"kids","code":"02"}]}' || '/bookstore/categories[@code=\'02\']' - } - - def 'Delta between anchor and payload error scenario #scenario'() { - given: 'schema set for given anchor and dataspace references bookstore model' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - setupSchemaSetMocksForDelta(yangResourcesNameToContentMap) - when: 'attempt to get delta between anchor and payload' - objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, anchorName, xpath, yangResourcesNameToContentMap, jsonData, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'expected exception is thrown' - thrown(DataValidationException) - where: 'following parameters were used' - scenario | xpath | jsonData - 'invalid json data with root node xpath' | '/' | '{"some-key": "some-value"' - 'empty json data with root node xpath' | '/' | '{}' - 'invalid json data with parent node xpath' | '/bookstore' | '{"some-key": "some-value"' - 'empty json data with parent node xpath' | '/bookstore' | '{}' - 'empty json data with xpath' | "/bookstore/categories[@code='02']" | '{}' - } - def 'Update data node leaves: #scenario.'() { given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') @@ -572,8 +498,8 @@ class CpsDataServiceImplSpec extends Specification { and: 'the persistence service method is invoked with the correct parameters' 1 * mockCpsDataPersistenceService.deleteDataNodes(dataspaceName, _ as Collection<String>) and: 'a data update event is sent for each anchor' - 1 * mockDataUpdateEventsService.publishCpsDataUpdateEvent(anchor1, '/', DELETE, observedTimestamp) - 1 * mockDataUpdateEventsService.publishCpsDataUpdateEvent(anchor2, '/', DELETE, observedTimestamp) + 1 * mockCpsDataUpdateEventsProducer.sendCpsDataUpdateEvent(anchor1, '/', DELETE, observedTimestamp) + 1 * mockCpsDataUpdateEventsProducer.sendCpsDataUpdateEvent(anchor2, '/', DELETE, observedTimestamp) } def "Validating #scenario when dry run is enabled."() { @@ -634,11 +560,11 @@ class CpsDataServiceImplSpec extends Specification { 1 * mockCpsDataPersistenceService.lockAnchor('some-sessionId', 'some-dataspaceName', 'some-anchorName', 250L) } - def 'Exception is thrown while publishing the notification.'(){ + def 'Exception is thrown while sending the notification.'(){ given: 'schema set for given anchor and dataspace references test-tree model' setupSchemaSetMocks('test-tree.yang') - when: 'publisher set to throw an exception' - mockDataUpdateEventsService.publishCpsDataUpdateEvent(_, _, _, _) >> { throw new Exception("publishing failed")} + when: 'producer throws an exception while sending event' + mockCpsDataUpdateEventsProducer.sendCpsDataUpdateEvent(_, _, _, _) >> { throw new Exception("Sending failed")} and: 'an update event is performed' objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, '/', '{"test-tree": {"branch": []}}', observedTimestamp, ContentType.JSON) then: 'the exception is not bubbled up' @@ -647,6 +573,7 @@ class CpsDataServiceImplSpec extends Specification { def logs = loggingListAppender.list.toString() assert logs.contains('Failed to send message to notification service') } + def setupSchemaSetMocks(String... yangResources) { def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) mockYangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName) >> mockYangTextSchemaSourceSet @@ -655,12 +582,4 @@ class CpsDataServiceImplSpec extends Specification { mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext } - def setupSchemaSetMocksForDelta(Map<String, String> yangResourcesNameToContentMap) { - def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) - mockTimedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourcesNameToContentMap) >> mockYangTextSchemaSourceSet - mockYangTextSchemaSourceSetCache.get(_, _) >> mockYangTextSchemaSourceSet - def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap).getSchemaContext() - mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext - } - } diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataspaceServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataspaceServiceImplSpec.groovy index 468fe76d41..97f6fba4d3 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataspaceServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDataspaceServiceImplSpec.groovy @@ -21,7 +21,7 @@ package org.onap.cps.impl -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.utils.CpsValidator import org.onap.cps.spi.CpsAdminPersistenceService import org.onap.cps.api.model.Dataspace import spock.lang.Specification diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDeltaServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDeltaServiceImplSpec.groovy index d3dfcf8826..a1bfbb06c9 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsDeltaServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsDeltaServiceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 TechMahindra Ltd. + * Copyright (C) 2023-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,13 +20,50 @@ package org.onap.cps.impl +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.Logger +import ch.qos.logback.core.read.ListAppender +import com.fasterxml.jackson.databind.ObjectMapper +import org.onap.cps.TestUtils +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.api.CpsDataService +import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.model.Anchor import org.onap.cps.api.model.DataNode +import org.onap.cps.api.parameters.FetchDescendantsOption +import org.onap.cps.utils.ContentType +import org.onap.cps.utils.DataMapper +import org.onap.cps.utils.JsonObjectMapper +import org.onap.cps.utils.PrefixResolver +import org.onap.cps.utils.YangParser +import org.onap.cps.utils.YangParserHelper +import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder +import org.onap.cps.yang.YangTextSchemaSourceSet +import org.onap.cps.yang.YangTextSchemaSourceSetBuilder +import org.slf4j.LoggerFactory +import org.springframework.context.annotation.AnnotationConfigApplicationContext +import spock.lang.Shared import spock.lang.Specification -class CpsDeltaServiceImplSpec extends Specification{ +class CpsDeltaServiceImplSpec extends Specification { - def objectUnderTest = new CpsDeltaServiceImpl() + def mockCpsAnchorService = Mock(CpsAnchorService) + def mockCpsDataService = Mock(CpsDataService) + def mockYangTextSchemaSourceSetCache = Mock(YangTextSchemaSourceSetCache) + def mockTimedYangTextSchemaSourceSetBuilder = Mock(TimedYangTextSchemaSourceSetBuilder) + def yangParser = new YangParser(new YangParserHelper(), mockYangTextSchemaSourceSetCache, mockTimedYangTextSchemaSourceSetBuilder) + def dataNodeFactory = new DataNodeFactoryImpl(yangParser) + def mockPrefixResolver = Mock(PrefixResolver) + def dataMapper = new DataMapper(mockCpsAnchorService, mockPrefixResolver) + def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + def objectUnderTest = new CpsDeltaServiceImpl(mockCpsAnchorService, mockCpsDataService, dataNodeFactory, dataMapper, jsonObjectMapper) + static def bookstoreDataNodeWithParentXpath = [new DataNode(xpath: '/bookstore', leaves: ['bookstore-name': 'Easons'])] + static def bookstoreDataNodeWithChildXpath = [new DataNode(xpath: '/bookstore/categories[@code=\'02\']', leaves: ['code': '02', 'name': 'Kids'])] + static def bookstoreDataAsMapForParentNode = [bookstore: ['bookstore-name': 'Easons']] + static def bookstoreDataAsMapForChildNode = [categories: ['code': '02', 'name': 'Kids']] + static def bookstoreJsonForParentNode = '{"bookstore":{"bookstore-name":"My Store"}}' + static def bookstoreJsonForChildNode = '{"categories":[{"name":"Child","code":"02"}]}' static def sourceDataNodeWithLeafData = [new DataNode(xpath: '/parent', leaves: ['parent-leaf': 'parent-payload-in-source'])] static def sourceDataNodeWithoutLeafData = [new DataNode(xpath: '/parent')] @@ -35,72 +72,193 @@ class CpsDeltaServiceImplSpec extends Specification{ static def sourceDataNodeWithMultipleLeaves = [new DataNode(xpath: '/parent', leaves: ['leaf-1': 'leaf-1-in-source', 'leaf-2': 'leaf-2-in-source'])] static def targetDataNodeWithMultipleLeaves = [new DataNode(xpath: '/parent', leaves: ['leaf-1': 'leaf-1-in-target', 'leaf-2': 'leaf-2-in-target'])] - def 'Get delta between data nodes for REMOVED data'() { - when: 'attempt to get delta between 2 data nodes' - def result = objectUnderTest.getDeltaReports(sourceDataNodeWithLeafData, []) - then: 'the delta report contains expected "remove" action' - assert result[0].action.equals('remove') - and : 'the delta report contains the expected xpath' - assert result[0].xpath == '/parent' - and: 'the delta report contains expected source data' - assert result[0].sourceData == ['parent-leaf': 'parent-payload-in-source'] - and: 'the delta report contains no target data' - assert result[0].targetData == null + def logger = (Logger) LoggerFactory.getLogger(objectUnderTest.class) + def loggingListAppender + def applicationContext = new AnnotationConfigApplicationContext() + + @Shared + static def ANCHOR_NAME_1 = 'some-anchor-1' + static def ANCHOR_NAME_2 = 'some-anchor-2' + static def INCLUDE_ALL_DESCENDANTS = FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS + def dataspaceName = 'some-dataspace' + def schemaSetName = 'some-schema-set' + def anchor1 = Anchor.builder().name(ANCHOR_NAME_1).dataspaceName(dataspaceName).schemaSetName(schemaSetName).build() + def anchor2 = Anchor.builder().name(ANCHOR_NAME_2).dataspaceName(dataspaceName).schemaSetName(schemaSetName).build() + + def setup() { + mockCpsAnchorService.getAnchor(dataspaceName, ANCHOR_NAME_1) >> anchor1 + mockCpsAnchorService.getAnchor(dataspaceName, ANCHOR_NAME_2) >> anchor2 + logger.setLevel(Level.DEBUG) + loggingListAppender = new ListAppender() + logger.addAppender(loggingListAppender) + loggingListAppender.start() + applicationContext.refresh() } - def 'Get delta between data nodes for ADDED data'() { - when: 'attempt to get delta between 2 data nodes' - def result = objectUnderTest.getDeltaReports([], targetDataNodeWithLeafData) - then: 'the delta report contains expected "create" action' - assert result[0].action.equals('create') - and: 'the delta report contains expected xpath' - assert result[0].xpath == '/parent' - and: 'the delta report contains no source data' - assert result[0].sourceData == null - and: 'the delta report contains expected target data' - assert result[0].targetData == ['parent-leaf': 'parent-payload-in-target'] + void cleanup() { + ((Logger) LoggerFactory.getLogger(CpsDataServiceImpl.class)).detachAndStopAllAppenders() + applicationContext.close() } - def 'Delta Report between leaves for parent and child nodes'() { - given: 'Two data nodes' + def 'Get Delta between 2 anchors for #scenario'() { + given: 'xpath to get delta' + def xpath = '/' + when: 'attempt to get delta between 2 anchors' + def deltaReport = objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, ANCHOR_NAME_1, ANCHOR_NAME_2, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes + and: 'cps data service is invoked again to return target data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_2, [xpath], INCLUDE_ALL_DESCENDANTS) >> targetDataNodes + and: 'the delta report contains the expected information' + deltaReport.size() == 1 + deltaReport[0].action.equals(expectedAction) + deltaReport[0].xpath.equals('/parent') + deltaReport[0].sourceData == expectedSourceData + deltaReport[0].targetData == expectedTargetData + where: 'following data was used' + scenario | sourceDataNodes | targetDataNodes || expectedAction | expectedSourceData | expectedTargetData + 'Data node is added' | [] | targetDataNodeWithLeafData || 'create' | null | ['parent-leaf': 'parent-payload-in-target'] + 'Data node is removed' | sourceDataNodeWithLeafData | [] || 'remove' | ['parent-leaf': 'parent-payload-in-source'] | null + 'Data node is updated' | sourceDataNodeWithLeafData | targetDataNodeWithLeafData || 'replace' | ['parent-leaf': 'parent-payload-in-source'] |['parent-leaf': 'parent-payload-in-target'] + } + + def 'Delta Report between parent nodes containing child nodes'() { + given: 'Two data nodes and xpath' + def xpath = '/' def sourceDataNode = [new DataNode(xpath: '/parent', leaves: ['parent-leaf': 'parent-payload'], childDataNodes: [new DataNode(xpath: '/parent/child', leaves: ['child-leaf': 'child-payload'])])] def targetDataNode = [new DataNode(xpath: '/parent', leaves: ['parent-leaf': 'parent-payload-updated'], childDataNodes: [new DataNode(xpath: '/parent/child', leaves: ['child-leaf': 'child-payload-updated'])])] - when: 'attempt to get delta between 2 data nodes' - def result = objectUnderTest.getDeltaReports(sourceDataNode, targetDataNode) - then: 'the delta report contains expected details for parent node' - assert result[0].action.equals('replace') - assert result[0].xpath == '/parent' - assert result[0].sourceData == ['parent-leaf': 'parent-payload'] - assert result[0].targetData == ['parent-leaf': 'parent-payload-updated'] + when: 'attempt to get delta between 2 anchors' + def deltaReport = objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, ANCHOR_NAME_1, ANCHOR_NAME_2, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNode + and: 'cps data service is invoked again to return target data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_2, [xpath], INCLUDE_ALL_DESCENDANTS) >> targetDataNode + and: 'the delta report contains expected details for parent node' + assert deltaReport[0].action.equals('replace') + assert deltaReport[0].xpath == '/parent' + assert deltaReport[0].sourceData == ['parent-leaf': 'parent-payload'] + assert deltaReport[0].targetData == ['parent-leaf': 'parent-payload-updated'] and: 'the delta report contains expected details for child node' - assert result[1].action.equals('replace') - assert result[1].xpath == '/parent/child' - assert result[1].sourceData == ['child-leaf': 'child-payload'] - assert result[1].targetData == ['child-leaf': 'child-payload-updated'] + assert deltaReport[1].action.equals('replace') + assert deltaReport[1].xpath == '/parent/child' + assert deltaReport[1].sourceData == ['child-leaf': 'child-payload'] + assert deltaReport[1].targetData == ['child-leaf': 'child-payload-updated'] } def 'Delta report between leaves, #scenario'() { - when: 'attempt to get delta between 2 data nodes' - def result = objectUnderTest.getDeltaReports(sourceDataNode, targetDataNode) - then: 'the delta report contains expected "replace" action' - assert result[0].action.equals('replace') - and: 'the delta report contains expected xpath' - assert result[0].xpath == '/parent' - and: 'the delta report contains expected source and target data' - assert result[0].sourceData == expectedSourceData - assert result[0].targetData == expectedTargetData - where: 'the following data was used' - scenario | sourceDataNode | targetDataNode || expectedSourceData | expectedTargetData - 'source and target data nodes have leaves' | sourceDataNodeWithLeafData | targetDataNodeWithLeafData || ['parent-leaf': 'parent-payload-in-source'] | ['parent-leaf': 'parent-payload-in-target'] - 'only source data node has leaves' | sourceDataNodeWithLeafData | targetDataNodeWithoutLeafData || ['parent-leaf': 'parent-payload-in-source'] | null - 'only target data node has leaves' | sourceDataNodeWithoutLeafData | targetDataNodeWithLeafData || null | ['parent-leaf': 'parent-payload-in-target'] - 'source and target dsta node with multiple leaves' | sourceDataNodeWithMultipleLeaves | targetDataNodeWithMultipleLeaves || ['leaf-1': 'leaf-1-in-source', 'leaf-2': 'leaf-2-in-source'] | ['leaf-1': 'leaf-1-in-target', 'leaf-2': 'leaf-2-in-target'] + given: 'xpath to fetch delta between two anchors' + def xpath = '/' + when: 'attempt to get delta between 2 anchors' + def deltaReport = objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, ANCHOR_NAME_1, ANCHOR_NAME_2, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNode + and: 'cps data service is invoked again to return target data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_2, [xpath], INCLUDE_ALL_DESCENDANTS) >> targetDataNode + and: 'the delta report contains expected "replace" action' + assert deltaReport[0].action.equals('replace') + and: 'the delta report contains expected xpath' + assert deltaReport[0].xpath == '/parent' + and: 'the delta report contains expected source and target data' + assert deltaReport[0].sourceData == expectedSourceData + assert deltaReport[0].targetData == expectedTargetData + where: 'the following data was used' + scenario | sourceDataNode | targetDataNode || expectedSourceData | expectedTargetData + 'source and target data nodes have leaves' | sourceDataNodeWithLeafData | targetDataNodeWithLeafData || ['parent-leaf': 'parent-payload-in-source'] | ['parent-leaf': 'parent-payload-in-target'] + 'only source data node has leaves' | sourceDataNodeWithLeafData | targetDataNodeWithoutLeafData || ['parent-leaf': 'parent-payload-in-source'] | null + 'only target data node has leaves' | sourceDataNodeWithoutLeafData | targetDataNodeWithLeafData || null | ['parent-leaf': 'parent-payload-in-target'] + 'source and target dsta node with multiple leaves' | sourceDataNodeWithMultipleLeaves | targetDataNodeWithMultipleLeaves || ['leaf-1': 'leaf-1-in-source', 'leaf-2': 'leaf-2-in-source'] | ['leaf-1': 'leaf-1-in-target', 'leaf-2': 'leaf-2-in-target'] } def 'Get delta between data nodes for updated data, where source and target data nodes have no leaves '() { + given: 'xpath to get delta between anchors' + def xpath = '/' when: 'attempt to get delta between 2 data nodes' - def result = objectUnderTest.getDeltaReports(sourceDataNodeWithoutLeafData, targetDataNodeWithoutLeafData) + def deltaReport = objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, ANCHOR_NAME_1, ANCHOR_NAME_2, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNodeWithoutLeafData + and: 'cps data service is invoked again to return target data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_2, [xpath], INCLUDE_ALL_DESCENDANTS) >> targetDataNodeWithoutLeafData then: 'the delta report is empty' - assert result.isEmpty() + assert deltaReport.isEmpty() + } + + def 'Get delta between anchor and payload with user provided schema #scenario'() { + given: 'user provided schema set ' + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + setupSchemaSetMocksForDelta(yangResourceContentPerName) + when: 'attempt to get delta between an anchor and a JSON payload' + def deltaReport = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, ANCHOR_NAME_1, xpath, yangResourceContentPerName, jsonData, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes + and: 'source data nodes are rebuilt (to match the data type with target data nodes)' + dataNodeFactory.createDataNodesWithAnchorXpathAndNodeData(anchor1, xpath, jsonObjectMapper.asJsonString(sourceDataNodesAsMap), ContentType.JSON) + and: 'data node factory method is invoked to build target data nodes using user provided schema' + dataNodeFactory.createDataNodesWithYangResourceXpathAndNodeData(yangResourceContentPerName, xpath, jsonData, ContentType.JSON) + and: 'delta report contains expected xpath, action, source and target data' + deltaReport[0].getXpath() == expectedNodeXpath + deltaReport[0].getAction().equals('replace') + deltaReport[0].getSourceData().equals(expectedSourceData) + deltaReport[0].getTargetData().equals(expectedTargetData) + where: 'following data was used' + scenario | xpath | sourceDataNodes | sourceDataNodesAsMap | jsonData || expectedNodeXpath | expectedSourceData | expectedTargetData + 'root node xpath' | '/' | bookstoreDataNodeWithParentXpath | bookstoreDataAsMapForParentNode | bookstoreJsonForParentNode || '/bookstore' | ['bookstore-name':'Easons'] | ['bookstore-name':'My Store'] + 'parent xpath' | '/bookstore' | bookstoreDataNodeWithParentXpath | bookstoreDataAsMapForParentNode | bookstoreJsonForParentNode || '/bookstore' | ['bookstore-name':'Easons'] | ['bookstore-name':'My Store'] + 'non-root xpath' | '/bookstore/categories[@code=\'02\']' | bookstoreDataNodeWithChildXpath | bookstoreDataAsMapForChildNode | bookstoreJsonForChildNode || '/bookstore/categories[@code=\'02\']'| ['name':'Kids'] | ['name':'Child'] + } + + def 'Get delta between anchor and payload by using schema from anchor #scenario'() { + given: 'schema set for a given dataspace and anchor' + setupSchemaSetMocks('bookstore.yang') + when: 'attempt to get delta between an anchor and a JSON payload' + def deltaReport = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, ANCHOR_NAME_1, xpath, [:], jsonData, INCLUDE_ALL_DESCENDANTS) + then: 'cps data service is invoked and returns source data nodes' + mockCpsDataService.getDataNodesForMultipleXpaths(dataspaceName, ANCHOR_NAME_1, [xpath], INCLUDE_ALL_DESCENDANTS) >> sourceDataNodes + and: 'source data nodes are rebuilt (to match the data type with target data nodes)' + dataNodeFactory.createDataNodesWithAnchorXpathAndNodeData(anchor1, xpath, jsonObjectMapper.asJsonString(sourceDataNodesAsMap), ContentType.JSON) + and: 'data node factory method is invoked to build target data nodes using schema details fetched from anchor name' + dataNodeFactory.createDataNodesWithAnchorXpathAndNodeData(anchor1, xpath, jsonData, ContentType.JSON) + and: 'delta report contains expected xpath, action, source and target data' + deltaReport[0].getXpath() == expectedNodeXpath + deltaReport[0].getAction().equals('replace') + deltaReport[0].getSourceData().equals(expectedSourceData) + deltaReport[0].getTargetData().equals(expectedTargetData) + where: 'following data was used' + scenario | xpath | sourceDataNodes | sourceDataNodesAsMap | jsonData || expectedNodeXpath | expectedSourceData | expectedTargetData + 'root node xpath' | '/' | bookstoreDataNodeWithParentXpath | bookstoreDataAsMapForParentNode | bookstoreJsonForParentNode || '/bookstore' | ['bookstore-name':'Easons'] | ['bookstore-name':'My Store'] + 'parent xpath' | '/bookstore' | bookstoreDataNodeWithParentXpath | bookstoreDataAsMapForParentNode | bookstoreJsonForParentNode || '/bookstore' | ['bookstore-name':'Easons'] | ['bookstore-name':'My Store'] + 'non-root xpath' | '/bookstore/categories[@code=\'02\']' | bookstoreDataNodeWithChildXpath | bookstoreDataAsMapForChildNode | bookstoreJsonForChildNode || '/bookstore/categories[@code=\'02\']' | ['name':'Kids'] | ['name':'Child'] + } + + def 'Delta between anchor and payload error scenario #scenario'() { + given: 'schema set for given anchor and dataspace references bookstore model' + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + setupSchemaSetMocksForDelta(yangResourceContentPerName) + when: 'attempt to get delta between anchor and payload' + objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, ANCHOR_NAME_1, xpath, yangResourceContentPerName, jsonData, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: 'expected exception is thrown' + thrown(DataValidationException) + where: 'following parameters were used' + scenario | xpath | jsonData + 'invalid json data with root node xpath' | '/' | '{"some-key": "some-value"' + 'empty json data with root node xpath' | '/' | '{}' + 'invalid json data with parent node xpath' | '/bookstore' | '{"some-key": "some-value"' + 'empty json data with parent node xpath' | '/bookstore' | '{}' + 'empty json data with xpath' | '/bookstore/categories[@code=\'02\']' | '{}' + } + + def setupSchemaSetMocks(String... yangResources) { + def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) + mockYangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName) >> mockYangTextSchemaSourceSet + def yangResourceNameToContent = TestUtils.getYangResourcesAsMap(yangResources) + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceNameToContent).getSchemaContext() + mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext + } + + def setupSchemaSetMocksForDelta(Map<String, String> yangResourceContentPerName) { + def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) + mockTimedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourceContentPerName) >> mockYangTextSchemaSourceSet + mockYangTextSchemaSourceSetCache.get(_, _) >> mockYangTextSchemaSourceSet + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName).getSchemaContext() + mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext } } diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsFacadeImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsFacadeImplSpec.groovy new file mode 100644 index 0000000000..36cee6d627 --- /dev/null +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsFacadeImplSpec.groovy @@ -0,0 +1,125 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl + +import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS +import static org.onap.cps.api.parameters.PaginationOption.NO_PAGINATION + +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.api.CpsDataService +import org.onap.cps.api.CpsQueryService +import org.onap.cps.api.model.DataNode +import org.onap.cps.api.parameters.PaginationOption +import org.onap.cps.utils.DataMapper +import org.onap.cps.utils.PrefixResolver +import spock.lang.Specification + +class CpsFacadeImplSpec extends Specification { + + def mockCpsDataService = Mock(CpsDataService) + def mockCpsQueryService = Mock(CpsQueryService) + def mockCpsAnchorService = Mock(CpsAnchorService) + def mockPrefixResolver = Mock(PrefixResolver) + def dataMapper = new DataMapper(mockCpsAnchorService, mockPrefixResolver) + + def myFetchDescendantsOption = OMIT_DESCENDANTS + def myPaginationOption = NO_PAGINATION + + def objectUnderTest = new CpsFacadeImpl(mockCpsDataService, mockCpsQueryService , dataMapper) + + def dataNode1 = new DataNode(xpath:'/path1', anchorName: 'my anchor') + def dataNode2 = new DataNode(xpath:'/path2', anchorName: 'my anchor') + def dataNode3 = new DataNode(xpath:'/path3', anchorName: 'other anchor') + + def setup() { + mockCpsDataService.getDataNodes('my dataspace', 'my anchor', 'my path', myFetchDescendantsOption) >> [ dataNode1, dataNode2] + mockPrefixResolver.getPrefix(_, '/path1') >> 'prefix1' + mockPrefixResolver.getPrefix(_, '/path2') >> 'prefix2' + mockPrefixResolver.getPrefix(_, '/path3') >> 'prefix3' + } + + def 'Get one data node.'() { + when: 'get data node by dataspace and anchor' + def result = objectUnderTest.getFirstDataNodeByAnchor('my dataspace', 'my anchor', 'my path', myFetchDescendantsOption) + then: 'only the first node (from the data service result) is returned' + assert result.size() == 1 + assert result.keySet()[0] == 'prefix1:path1' + } + + def 'Get multiple data nodes.'() { + when: 'get data node by dataspace and anchor' + def result = objectUnderTest.getDataNodesByAnchor('my dataspace', 'my anchor', 'my path', myFetchDescendantsOption) + then: 'all nodes (from the data service result) are returned' + assert result.size() == 2 + assert result[0].keySet()[0] == 'prefix1:path1' + assert result[1].keySet()[0] == 'prefix2:path2' + } + + def 'Execute anchor query.'() { + given: 'the cps query service returns two data nodes' + mockCpsQueryService.queryDataNodes('my dataspace', 'my anchor', '/my/path', myFetchDescendantsOption) >> [ dataNode1, dataNode2] + when: 'get data node by dataspace and anchor' + def result = objectUnderTest.executeAnchorQuery('my dataspace', 'my anchor', '/my/path', myFetchDescendantsOption) + then: 'all nodes (from the query service result) are returned' + assert result.size() == 2 + assert result[0].keySet()[0] == 'prefix1:path1' + assert result[1].keySet()[0] == 'prefix2:path2' + } + + def 'Execute anchor query with attribute-axis.'() { + given: 'the cps query service returns two attribute values' + mockCpsQueryService.queryDataLeaf('my dataspace', 'my anchor', '/my/path/@myAttribute', Object) >> ['value1', 'value2'] + when: 'get data using attribute axis' + def result = objectUnderTest.executeAnchorQuery('my dataspace', 'my anchor', '/my/path/@myAttribute', myFetchDescendantsOption) + then: 'attribute values (from the query service result) are returned' + assert result.size() == 2 + assert result[0] == ['myAttribute': 'value1'] + assert result[1] == ['myAttribute': 'value2'] + } + + def 'Execute dataspace query.'() { + given: 'the cps query service returns two data nodes (on two different anchors)' + mockCpsQueryService.queryDataNodesAcrossAnchors('my dataspace', 'my cps path', myFetchDescendantsOption, myPaginationOption) >> [ dataNode1, dataNode2, dataNode3 ] + when: 'get data node by dataspace and anchor' + def result = objectUnderTest.executeDataspaceQuery('my dataspace', 'my cps path', myFetchDescendantsOption, myPaginationOption) + then: 'all nodes (from the query service result) are returned, grouped by anchor' + assert result.size() == 2 + assert result[0].toString() == '{anchorName=my anchor, dataNodes=[{prefix1:path1={}}, {prefix1:path2={}}]}' + assert result[1].toString() == '{anchorName=other anchor, dataNodes=[{prefix3:path3={}}]}' + } + + def 'How many pages (anchors) could be in the output with #scenario.'() { + given: 'the query service says there are 10 anchors for the given query' + mockCpsQueryService.countAnchorsForDataspaceAndCpsPath('my dataspace', 'my cps path') >> 10 + expect: 'the correct number of pages is returned' + assert objectUnderTest.countAnchorsInDataspaceQuery('my dataspace', 'my cps path', paginationOption) == expectedNumberOfPages + where: 'the following pagination options are used' + scenario | paginationOption || expectedNumberOfPages + 'no pagination' | NO_PAGINATION || 1 + '1 anchor per page' | new PaginationOption(1,1) || 10 + '1 anchor per page, start at 2' | new PaginationOption(2,1) || 10 + '2 anchors per page' | new PaginationOption(1,2) || 5 + '3 anchors per page' | new PaginationOption(1,3) || 4 + '10 anchors per page' | new PaginationOption(1,10) || 1 + '100 anchors per page' | new PaginationOption(1,100) || 1 + } + +} diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsModuleServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsModuleServiceImplSpec.groovy index ce871621e5..48db53c882 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsModuleServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsModuleServiceImplSpec.groovy @@ -25,7 +25,7 @@ package org.onap.cps.impl import org.onap.cps.TestUtils import org.onap.cps.api.CpsAnchorService -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.utils.CpsValidator import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.api.exceptions.DuplicatedYangResourceException import org.onap.cps.api.exceptions.ModelValidationException @@ -56,7 +56,7 @@ class CpsModuleServiceImplSpec extends Specification { when: 'Create schema set method is invoked' objectUnderTest.createSchemaSet('someDataspace', 'schemaSetName@with Special!Characters', [:]) then: 'Parameters are validated and processing is delegated to persistence service' - 1 * mockCpsModulePersistenceService.storeSchemaSet('someDataspace', 'schemaSetName@with Special!Characters', [:]) + 1 * mockCpsModulePersistenceService.createSchemaSet('someDataspace', 'schemaSetName@with Special!Characters', [:]) and: 'the CpsValidator is called on the dataspaceName' 1 * mockCpsValidator.validateNameCharacters('someDataspace') } @@ -68,16 +68,16 @@ class CpsModuleServiceImplSpec extends Specification { when: 'create schema set from modules method is invoked' objectUnderTest.createSchemaSetFromModules('someDataspaceName', 'someSchemaSetName', [newModule: 'newContent'], listOfExistingModulesModuleReference) then: 'processing is delegated to persistence service' - 1 * mockCpsModulePersistenceService.storeSchemaSetFromModules('someDataspaceName', 'someSchemaSetName', [newModule: 'newContent'], listOfExistingModulesModuleReference) + 1 * mockCpsModulePersistenceService.createSchemaSetFromNewAndExistingModules('someDataspaceName', 'someSchemaSetName', [newModule: 'newContent'], listOfExistingModulesModuleReference) and: 'the CpsValidator is called on the dataspaceName' 1 * mockCpsValidator.validateNameCharacters('someDataspaceName') } def 'Create schema set from invalid resources'() { given: 'Invalid yang resource as name-to-content map' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('invalid.yang') + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('invalid.yang') when: 'Create schema set method is invoked' - objectUnderTest.createSchemaSet('someDataspace', 'someSchemaSet', yangResourcesNameToContentMap) + objectUnderTest.createSchemaSet('someDataspace', 'someSchemaSet', yangResourceContentPerName) then: 'Model validation exception is thrown' thrown(ModelValidationException) } @@ -85,7 +85,7 @@ class CpsModuleServiceImplSpec extends Specification { def 'Create schema set with duplicate yang resource exception in persistence layer.'() { given: 'the persistence layer throws an duplicated yang resource exception' def originalException = new DuplicatedYangResourceException('name', '123', null) - mockCpsModulePersistenceService.storeSchemaSet(*_) >> { throw originalException } + mockCpsModulePersistenceService.createSchemaSet(*_) >> { throw originalException } when: 'attempt to create schema set' objectUnderTest.createSchemaSet('someDataspace', 'someSchemaSet', [:]) then: 'the same duplicated yang resource exception is thrown (up)' @@ -98,9 +98,9 @@ class CpsModuleServiceImplSpec extends Specification { def 'Get schema set by name and dataspace.'() { given: 'an already present schema set' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') and: 'yang resource cache returns the expected schema set' - mockYangTextSchemaSourceSetCache.get('someDataspace', 'schemaSetName@with Special!Characters') >> YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) + mockYangTextSchemaSourceSetCache.get('someDataspace', 'schemaSetName@with Special!Characters') >> YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) when: 'get schema set method is invoked' def result = objectUnderTest.getSchemaSet('someDataspace', 'schemaSetName@with Special!Characters') then: 'the correct schema set is returned' @@ -252,11 +252,11 @@ class CpsModuleServiceImplSpec extends Specification { 1 * mockCpsValidator.validateNameCharacters('some-dataspace-name', 'some-anchor-name') } - def 'Delete all unused yang module data.'() { + def 'Delete unused yang module data for a dataspace.'() { when: 'deleting unused yang module data' - objectUnderTest.deleteAllUnusedYangModuleData() - then: 'it is delegated to the module persistence service' - 1 * mockCpsModulePersistenceService.deleteAllUnusedYangModuleData() + objectUnderTest.deleteAllUnusedYangModuleData('some-dataspace-name') + then: 'it is delegated to the module persistence service with the correct parameters' + 1 * mockCpsModulePersistenceService.deleteAllUnusedYangModuleData('some-dataspace-name') } def 'Schema set exists.'() { diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsNotificationServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsNotificationServiceImplSpec.groovy new file mode 100644 index 0000000000..ab7853c8e6 --- /dev/null +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsNotificationServiceImplSpec.groovy @@ -0,0 +1,191 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * Modifications Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl + +import com.fasterxml.jackson.databind.ObjectMapper +import org.onap.cps.TestUtils +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.api.exceptions.DataNodeNotFoundException +import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.model.Anchor +import org.onap.cps.api.parameters.FetchDescendantsOption +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.utils.DataMapper +import org.onap.cps.utils.JsonObjectMapper +import org.onap.cps.utils.PrefixResolver +import org.onap.cps.utils.YangParser +import org.onap.cps.utils.YangParserHelper +import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder +import org.onap.cps.yang.YangTextSchemaSourceSet +import org.onap.cps.yang.YangTextSchemaSourceSetBuilder +import org.springframework.test.context.ContextConfiguration +import spock.lang.Specification + +@ContextConfiguration(classes = [ObjectMapper, JsonObjectMapper]) +class CpsNotificationServiceImplSpec extends Specification { + + def dataspaceName = 'CPS-Admin' + def anchorName = 'cps-notification-subscriptions' + def schemaSetName = 'cps-notification-subscriptions' + def anchor = new Anchor(anchorName, dataspaceName, schemaSetName) + def someDataNode = new DataNodeBuilder().withXpath('/xpath-1').build() + + def mockCpsDataPersistenceService = Mock(CpsDataPersistenceService) + def mockCpsAnchorService = Mock(CpsAnchorService) + def mockYangTextSchemaSourceSetCache = Mock(YangTextSchemaSourceSetCache) + def mockTimedYangTextSchemaSourceSetBuilder = Mock(TimedYangTextSchemaSourceSetBuilder) + def yangParser = new YangParser(new YangParserHelper(), mockYangTextSchemaSourceSetCache, mockTimedYangTextSchemaSourceSetBuilder) + def dataMapper = new DataMapper(mockCpsAnchorService, Mock(PrefixResolver)) + + def objectUnderTest = new CpsNotificationServiceImpl(mockCpsAnchorService, mockCpsDataPersistenceService, yangParser, dataMapper) + + def 'add notification subscription for list of dataspaces'() { + given: 'details for notification subscription and subscription root node xpath' + def notificationSubscriptionAsjson = '{"dataspace":[{"name":"ds01"},{"name":"ds02"}]}' + def xpath = '/dataspaces' + and: 'schema set for given anchor and dataspace references notification subscription model' + setupSchemaSetMocks('cps-notification-subscriptions@2024-07-03.yang') + and: 'anchor is provided' + mockCpsAnchorService.getAnchor(dataspaceName, anchorName) >> anchor + when: 'create notification subscription is called' + objectUnderTest.createNotificationSubscription(notificationSubscriptionAsjson, xpath) + then: 'the persistence service is called once with the correct parameters' + 1 * mockCpsDataPersistenceService.addListElements('CPS-Admin', 'cps-notification-subscriptions', xpath, { dataNodeCollection -> + { + assert dataNodeCollection.size() == 2 + assert dataNodeCollection.collect { it.getXpath() } + .containsAll(['/dataspaces/dataspace[@name=\'ds01\']', '/dataspaces/dataspace[@name=\'ds02\']']) + } + }) + } + + def 'add notification subscription fails with exception'() { + given: 'details for notification subscription' + def jsonData = '{"dataspace":[{"name":"ds01"},{"name":"ds02"}]}' + and: 'schema set for given anchor and dataspace references invalid data model' + setupSchemaSetMocks('test-tree.yang') + and: 'anchor is provided' + mockCpsAnchorService.getAnchor(dataspaceName, anchorName) >> anchor + when: 'create notification subscription is called' + objectUnderTest.createNotificationSubscription(jsonData, '/somepath') + then: 'data validation exception is thrown ' + thrown(DataValidationException) + } + + def 'delete notification subscription for given xpath'() { + given: 'details for notification subscription' + def xpath = '/some/path' + when: 'delete notification subscription is called' + objectUnderTest.deleteNotificationSubscription(xpath) + then: 'the persistence service is called once with the correct parameters' + 1 * mockCpsDataPersistenceService.deleteDataNode(dataspaceName, anchorName, xpath) + } + + def 'get notification subscription for given xpath'() { + given: 'details for notification subscription' + def xpath = '/some/path' + and: 'persistence service returns data nodes for subscribed data' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, + xpath, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + [new DataNodeBuilder().withXpath('/some/path').withLeaves([leaf: 'dataspace', leafList: ['ds01', 'ds02']]).build()] + when: 'delete notification subscription is called' + def result = objectUnderTest.getNotificationSubscription(xpath) + then: 'the result is a json representation of the data node(s) returned by the data persistence service' + assert result.get(0).toString() == '{path={leaf=dataspace, leafList=[ds01, ds02]}}' + } + + def 'is notification enabled for given anchor'() { + given: 'data nodes available for given anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> [someDataNode] + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.isNotificationEnabled(dataspaceName, anchorName) + then: 'the notification is enabled' + assert isNotificationEnabled + } + + def 'is notification disabled for given anchor'() { + given: 'data nodes not available for given anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']/anchors/anchor[@name='anchor-01']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + { throw new DataNodeNotFoundException(dataspaceName, anchorName) } + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.isNotificationEnabled('ds01', 'anchor-01') + then: 'the notification is disabled' + assert !isNotificationEnabled + } + + def 'is notification enabled for given anchor because all anchors are enabled'() { + given: 'data nodes not available for given anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']/anchors/anchor[@name='anchor-01']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + { throw new DataNodeNotFoundException(dataspaceName, anchorName) } + and: 'data nodes not available for any specific anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']/anchors", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + { throw new DataNodeNotFoundException(dataspaceName, anchorName) } + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.isNotificationEnabled('ds01', 'anchor-01') + then: 'the notification is enabled' + assert isNotificationEnabled + } + + def 'is notification enabled for all anchors in a dataspace'() { + given: 'data nodes available for given dataspace' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + [someDataNode] + and: 'data nodes not available for any specific anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']/anchors", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + { throw new DataNodeNotFoundException(dataspaceName, anchorName) } + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.notificationEnabledForAllAnchors('ds01') + then: 'the notification is enabled' + assert isNotificationEnabled + } + + def 'is notification disabled for a dataspace'() { + given: 'No data nodes available for given dataspace' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + { throw new DataNodeNotFoundException(dataspaceName, anchorName) } + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.notificationEnabledForAllAnchors('ds01') + then: 'the notification is disabled' + assert !isNotificationEnabled + } + + def 'is notification disabled for some anchors in a dataspace'() { + given: 'data nodes available for given dataspace' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + [someDataNode] + and: 'data nodes also available for any specific anchor' + mockCpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, "/dataspaces/dataspace[@name='ds01']/anchors", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) >> + [someDataNode] + when: 'is notification enabled is called' + boolean isNotificationEnabled = objectUnderTest.notificationEnabledForAllAnchors('ds01') + then: 'the notification is disabled' + assert !isNotificationEnabled + } + + def setupSchemaSetMocks(String... yangResources) { + def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) + mockYangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName) >> mockYangTextSchemaSourceSet + def yangResourceNameToContent = TestUtils.getYangResourcesAsMap(yangResources) + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceNameToContent).getSchemaContext() + mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext + } +} diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/CpsQueryServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/CpsQueryServiceImplSpec.groovy index 80db83b27a..d581727e40 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/CpsQueryServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/CpsQueryServiceImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation + * Copyright (C) 2021-2025 Nordix Foundation * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,8 +21,8 @@ package org.onap.cps.impl - -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.api.CpsQueryService +import org.onap.cps.utils.CpsValidator import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.api.parameters.FetchDescendantsOption import org.onap.cps.api.parameters.PaginationOption @@ -42,7 +42,7 @@ class CpsQueryServiceImplSpec extends Specification { when: 'queryDataNodes is invoked' objectUnderTest.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption) then: 'the persistence service is called once with the correct parameters' - 1 * mockCpsDataPersistenceService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption) + 1 * mockCpsDataPersistenceService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption, CpsQueryService.NO_LIMIT) and: 'the CpsValidator is called on the dataspaceName, schemaSetName and anchorName' 1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName) where: 'all fetch descendants options are supported' @@ -50,6 +50,21 @@ class CpsQueryServiceImplSpec extends Specification { FetchDescendantsOption.DIRECT_CHILDREN_ONLY, new FetchDescendantsOption(10)] } + def 'Query data nodes by cps path with limit.'() { + given: 'a dataspace name, an anchor name and a cps path' + def dataspaceName = 'some-dataspace' + def anchorName = 'some-anchor' + def cpsPath = '/cps-path' + def fetchDescendantsOption = FetchDescendantsOption.OMIT_DESCENDANTS + def myLimit = 123 + when: 'queryDataNodes (with limit) is invoked' + objectUnderTest.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption, myLimit) + then: 'the persistence service is called once with the correct parameters' + 1 * mockCpsDataPersistenceService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption, myLimit) + and: 'the CpsValidator is called on the dataspaceName, schemaSetName and anchorName' + 1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName) + } + def 'Query data nodes across all anchors by cps path with #fetchDescendantsOption.'() { given: 'a dataspace name, an anchor name and a cps path' def dataspaceName = 'some-dataspace' @@ -62,8 +77,8 @@ class CpsQueryServiceImplSpec extends Specification { and: 'the CpsValidator is called on the dataspaceName, schemaSetName and anchorName' 1 * mockCpsValidator.validateNameCharacters(dataspaceName) where: 'all fetch descendants options are supported' - fetchDescendantsOption << [FetchDescendantsOption.OMIT_DESCENDANTS, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS, - FetchDescendantsOption.DIRECT_CHILDREN_ONLY, new FetchDescendantsOption(10)] + fetchDescendantsOption << [FetchDescendantsOption.OMIT_DESCENDANTS, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS, + FetchDescendantsOption.DIRECT_CHILDREN_ONLY, new FetchDescendantsOption(10)] } def 'Query total anchors for dataspace and cps path.'() { @@ -73,11 +88,10 @@ class CpsQueryServiceImplSpec extends Specification { 1 * mockCpsDataPersistenceService.countAnchorsForDataspaceAndCpsPath("some-dataspace", "/cps-path") } - // TODO will be implemented in CPS-2416 def 'Query data leaf.'() { when: 'a query for a specific leaf is executed' objectUnderTest.queryDataLeaf('some-dataspace', 'some-anchor', '/cps-path/@id', Object.class) - then: 'solution is not implemented yet' - thrown(UnsupportedOperationException) + then: 'the persistence service is called once with the correct parameters' + 1 * mockCpsDataPersistenceService.queryDataLeaf('some-dataspace', 'some-anchor', '/cps-path/@id', 0, Object.class) } } diff --git a/cps-service/src/test/groovy/org/onap/cps/api/model/DataNodeBuilderSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/DataNodeBuilderSpec.groovy index 24c78864a5..1597d45761 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/model/DataNodeBuilderSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/DataNodeBuilderSpec.groovy @@ -19,10 +19,11 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.api.model +package org.onap.cps.impl import org.onap.cps.TestUtils import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.model.DataNode import org.onap.cps.utils.ContentType import org.onap.cps.utils.DataMapUtils import org.onap.cps.utils.YangParserHelper diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/DataNodeFactorySpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/DataNodeFactorySpec.groovy new file mode 100644 index 0000000000..082fb33a61 --- /dev/null +++ b/cps-service/src/test/groovy/org/onap/cps/impl/DataNodeFactorySpec.groovy @@ -0,0 +1,196 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.Logger +import ch.qos.logback.core.read.ListAppender +import org.onap.cps.TestUtils +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.model.Anchor +import org.onap.cps.utils.ContentType +import org.onap.cps.utils.YangParser +import org.onap.cps.utils.YangParserHelper +import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder +import org.onap.cps.yang.YangTextSchemaSourceSet +import org.onap.cps.yang.YangTextSchemaSourceSetBuilder +import org.slf4j.LoggerFactory +import org.springframework.context.annotation.AnnotationConfigApplicationContext +import spock.lang.Specification + +class DataNodeFactorySpec extends Specification { + + def mockCpsAnchorService = Mock(CpsAnchorService) + def mockYangTextSchemaSourceSetCache = Mock(YangTextSchemaSourceSetCache) + def mockTimedYangTextSchemaSourceSetBuilder = Mock(TimedYangTextSchemaSourceSetBuilder) + def yangParser = new YangParser(new YangParserHelper(), mockYangTextSchemaSourceSetCache, mockTimedYangTextSchemaSourceSetBuilder) + def objectUnderTest = new DataNodeFactoryImpl(yangParser) + + def logger = (Logger) LoggerFactory.getLogger(objectUnderTest.class) + def loggingListAppender + def applicationContext = new AnnotationConfigApplicationContext() + + def dataspaceName = 'some-dataspace' + def anchorName = 'some-anchor' + def schemaSetName = 'some-schema-set' + def anchor = Anchor.builder().name(anchorName).dataspaceName(dataspaceName).schemaSetName(schemaSetName).build() + + def setup() { + mockCpsAnchorService.getAnchor(dataspaceName, anchorName) >> anchor + logger.setLevel(Level.DEBUG) + loggingListAppender = new ListAppender() + logger.addAppender(loggingListAppender) + loggingListAppender.start() + applicationContext.refresh() + } + + void cleanup() { + ((Logger) LoggerFactory.getLogger(DataNodeFactoryImpl.class)).detachAndStopAllAppenders() + applicationContext.close() + } + + def 'Create data nodes using anchor and map of xpath to #scenario'() { + given:'schema set for given anchor and dataspace references test-tree model' + setupSchemaSetMocks('test-tree.yang') + when: 'attempt to create data nodes' + def dataNodes = objectUnderTest.createDataNodesWithAnchorAndXpathToNodeData(anchor, xpathToNodeData, contentType) + then: 'expected number of data nodes are created' + dataNodes.size() == expectedDataNodes + and: 'data nodes have expected xpaths' + dataNodes.stream().map { it.getXpath() }.toList().containsAll(expectedXpaths) + where: 'the following data was used' + scenario | xpathToNodeData | contentType || expectedDataNodes | expectedXpaths + 'JSON Data' | ['/' : "{'test-tree': {'branch': []}}", '/test-tree' : "{'branch': [{'name':'Name'}]}"] | ContentType.JSON || 2 | ['/test-tree', "/test-tree/branch[@name='Name']"] + 'XML Data' | ['/test-tree' : '<branch><name>Name</name></branch>'] | ContentType.XML || 1 | ["/test-tree/branch[@name='Name']"] + } + + def 'Create data nodes using anchor, xpath and #scenario string'() { + given:'xpath, json string and schema set for given anchor and dataspace references test-tree model' + def xpath = '/' + def nodeData = TestUtils.getResourceFileContent(data) + setupSchemaSetMocks('test-tree.yang') + when: 'attempt to create data nodes' + def dataNodes = objectUnderTest.createDataNodesWithAnchorXpathAndNodeData(anchor, xpath, nodeData, contentType) + then: 'expected number of data nodes are created' + dataNodes.size() == 1 + and: 'data nodes have expected xpaths' + dataNodes[0].getXpath() == '/test-tree' + where: 'the following data was used' + scenario | data | contentType + 'JSON' | 'test-tree.json' | ContentType.JSON + 'XML' | 'test-tree.xml' | ContentType.XML + } + + def 'Building data nodes using anchor, xpath and #scenario'() { + given:'xpath, invalid json string and schema set for given anchor and dataspace references test-tree model' + setupSchemaSetMocks('test-tree.yang') + when: 'attempt to create data nodes' + objectUnderTest.createDataNodesWithAnchorXpathAndNodeData(anchor, '/test-tree', invalidData, contentType) + then: 'expected number of data nodes are created' + def exceptionThrown = thrown(DataValidationException) + assert exceptionThrown.message.startsWith(expectedMessage) + where: + scenario | invalidData | contentType || expectedMessage + 'no data nodes' | '{}' | ContentType.JSON || 'No Data Nodes' + 'invalid json' | '{invalid json' | ContentType.JSON || 'Data Validation Failed' + 'invalid xml' | '<invalid xml' | ContentType.XML || 'Data Validation Failed' + } + + def 'Create data nodes using anchor, parent node xpath and #scenario string'() { + given:'parent node xpath, json string and schema set for given anchor and dataspace references test-tree model' + def parentXpath = '/test-tree' + setupSchemaSetMocks('test-tree.yang') + when: 'attempt to create data nodes' + def dataNodes = objectUnderTest.createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentXpath, nodeData, contentType) + then: 'expected number of data nodes are created' + dataNodes.size() == 1 + and: 'data nodes have expected xpaths' + dataNodes[0].getXpath() == "/test-tree/branch[@name='A']" + where: 'the following data was used' + scenario | nodeData | contentType + 'JSON' | '{"branch": [{"name": "A"}]}' | ContentType.JSON + 'XML' | '<test-tree xmlns="org:onap:cps:test:test-tree"><branch><name>A</name></branch></test-tree>' | ContentType.XML + } + + def 'Create data nodes using anchor, parent node xpath and invalid #scenario string'() { + given:'parent node xpath, invalid json string and schema set for given anchor and dataspace references test-tree model' + def parentXpath = '/test-tree' + setupSchemaSetMocks('test-tree.yang') + when: 'attempt to create data nodes' + objectUnderTest.createDataNodesWithAnchorParentXpathAndNodeData(anchor, parentXpath, invalidData, contentType) + then: 'expected number of data nodes are created' + def exceptionThrown = thrown(DataValidationException) + assert exceptionThrown.message.startsWith(expectedMessage) + where: + scenario | invalidData | contentType || expectedMessage + 'no data nodes' | '{"branch": []}' | ContentType.JSON || 'No Data Nodes' + 'invalid json' | '<test-tree><branch></branch></test-tree>' | ContentType.JSON || 'Data Validation Failed' + } + + def 'Create data nodes using schema, xpath and #scenario string'() { + given:'xpath, json string and schema set for given anchor and dataspace references bookstore model' + def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') + setupSchemaSetMocksForDelta(yangResourcesNameToContentMap) + when: 'attempt to create data nodes' + def dataNodes = objectUnderTest.createDataNodesWithYangResourceXpathAndNodeData(yangResourcesNameToContentMap, '/', nodeData, contentType) + then: 'expected number of data nodes are created' + dataNodes.size() == 1 + and: 'data nodes have expected xpath' + dataNodes[0].getXpath() == '/bookstore' + where: 'the following data was used' + scenario | nodeData | contentType + 'JSON' | '{"bookstore":{"bookstore-name":"Easons"}}' | ContentType.JSON + 'XML' | "<bookstore xmlns=\"org:onap:ccsdk:sample\"><bookstore-name>Easons</bookstore-name></bookstore>" | ContentType.XML + } + + def 'Create data nodes using schema, xpath and invalid #scenario string'() { + given:'xpath, invalid json string and schema set for given anchor and dataspace references bookstore model' + def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') + setupSchemaSetMocksForDelta(yangResourcesNameToContentMap) + when: 'attempt to create data nodes' + objectUnderTest.createDataNodesWithYangResourceXpathAndNodeData(yangResourcesNameToContentMap, '/', invalidData, contentType) + then: 'expected number of data nodes are created' + def exceptionThrown = thrown(DataValidationException) + assert exceptionThrown.message.startsWith(expectedMessage) + where: + scenario | invalidData | contentType || expectedMessage + 'no json nodes' | '{}' | ContentType.JSON || 'No Data Nodes' + 'no xml nodes' | '"<bookstore xmlns=\"org:onap:ccsdk:sample\"/>' | ContentType.XML || 'Data Validation Failed' + 'invalid json' | '{invalid' | ContentType.JSON || 'Data Validation Failed' + 'invalid xml' | '<invalid' | ContentType.XML || 'Data Validation Failed' + } + + def setupSchemaSetMocks(String... yangResources) { + def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) + mockYangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName) >> mockYangTextSchemaSourceSet + def yangResourceNameToContent = TestUtils.getYangResourcesAsMap(yangResources) + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceNameToContent).getSchemaContext() + mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext + } + + def setupSchemaSetMocksForDelta(Map<String, String> yangResourcesNameToContentMap) { + def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) + mockTimedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourcesNameToContentMap) >> mockYangTextSchemaSourceSet + mockYangTextSchemaSourceSetCache.get(_, _) >> mockYangTextSchemaSourceSet + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap).getSchemaContext() + mockYangTextSchemaSourceSet.getSchemaContext() >> schemaContext + } +} diff --git a/cps-service/src/test/groovy/org/onap/cps/api/model/DeltaReportBuilderSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/DeltaReportBuilderSpec.groovy index 94e3ed5c26..2decefff21 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/model/DeltaReportBuilderSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/DeltaReportBuilderSpec.groovy @@ -18,7 +18,7 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.api.model +package org.onap.cps.impl import spock.lang.Specification diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/E2ENetworkSliceSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/E2ENetworkSliceSpec.groovy index 4ab71f7228..4713283c9b 100755 --- a/cps-service/src/test/groovy/org/onap/cps/impl/E2ENetworkSliceSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/E2ENetworkSliceSpec.groovy @@ -1,9 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation. + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2021-2022 Bell Canada. * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2022-2024 TechMahindra Ltd. + * Modifications Copyright (C) 2022-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,18 +23,14 @@ package org.onap.cps.impl -import com.fasterxml.jackson.databind.ObjectMapper import org.onap.cps.TestUtils import org.onap.cps.api.CpsAnchorService -import org.onap.cps.api.CpsDeltaService -import org.onap.cps.events.CpsDataUpdateEventsService -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.api.model.Anchor +import org.onap.cps.events.CpsDataUpdateEventsProducer import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.CpsModulePersistenceService -import org.onap.cps.api.model.Anchor import org.onap.cps.utils.ContentType -import org.onap.cps.utils.JsonObjectMapper -import org.onap.cps.utils.PrefixResolver +import org.onap.cps.utils.CpsValidator import org.onap.cps.utils.YangParser import org.onap.cps.utils.YangParserHelper import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder @@ -42,23 +38,17 @@ import org.onap.cps.yang.YangTextSchemaSourceSetBuilder import spock.lang.Specification class E2ENetworkSliceSpec extends Specification { - def mockModuleStoreService = Mock(CpsModulePersistenceService) - def mockDataStoreService = Mock(CpsDataPersistenceService) + def mockCpsModulePersistenceService = Mock(CpsModulePersistenceService) + def mockCpsDataPersistenceService = Mock(CpsDataPersistenceService) def mockCpsAnchorService = Mock(CpsAnchorService) def mockYangTextSchemaSourceSetCache = Mock(YangTextSchemaSourceSetCache) def mockCpsValidator = Mock(CpsValidator) def timedYangTextSchemaSourceSetBuilder = new TimedYangTextSchemaSourceSetBuilder() def yangParser = new YangParser(new YangParserHelper(), mockYangTextSchemaSourceSetCache, timedYangTextSchemaSourceSetBuilder) - def mockCpsDeltaService = Mock(CpsDeltaService) - def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - def mockPrefixResolver = Mock(PrefixResolver) - - def cpsModuleServiceImpl = new CpsModuleServiceImpl(mockModuleStoreService, - mockYangTextSchemaSourceSetCache, mockCpsAnchorService, mockCpsValidator,timedYangTextSchemaSourceSetBuilder) - - def mockDataUpdateEventsService = Mock(CpsDataUpdateEventsService) - def cpsDataServiceImpl = new CpsDataServiceImpl(mockDataStoreService, mockDataUpdateEventsService, mockCpsAnchorService, mockCpsValidator, - yangParser, mockCpsDeltaService, jsonObjectMapper, mockPrefixResolver) + def cpsModuleServiceImpl = new CpsModuleServiceImpl(mockCpsModulePersistenceService, mockYangTextSchemaSourceSetCache, mockCpsAnchorService, mockCpsValidator,timedYangTextSchemaSourceSetBuilder) + def mockCpsDataUpdateEventsProducer = Mock(CpsDataUpdateEventsProducer) + def dataNodeFactory = new DataNodeFactoryImpl(yangParser) + def cpsDataServiceImpl = new CpsDataServiceImpl(mockCpsDataPersistenceService, mockCpsDataUpdateEventsProducer, mockCpsAnchorService, dataNodeFactory, mockCpsValidator, yangParser) def dataspaceName = 'someDataspace' def anchorName = 'someAnchor' def schemaSetName = 'someSchemaSet' @@ -66,32 +56,32 @@ class E2ENetworkSliceSpec extends Specification { def 'E2E model can be parsed by CPS.'() { given: 'Valid yang resource as name-to-content map' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap( + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap( 'ietf/ietf-inet-types@2013-07-15.yang', 'ietf/ietf-yang-types@2013-07-15.yang', 'e2e/basic/ran-network2020-08-06.yang' ) when: 'Create schema set method is invoked' - cpsModuleServiceImpl.createSchemaSet(dataspaceName, schemaSetName, yangResourcesNameToContentMap) + cpsModuleServiceImpl.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName) then: 'Parameters are validated and processing is delegated to persistence service' - 1 * mockModuleStoreService.storeSchemaSet(dataspaceName, schemaSetName, yangResourcesNameToContentMap) + 1 * mockCpsModulePersistenceService.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName) } def 'E2E Coverage Area-Tracking Area & TA-Cell mapping model can be parsed by CPS.'() { given: 'Valid yang resource as name-to-content map' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap( + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap( 'e2e/basic/cps-cavsta-onap-internal2021-01-28.yang') when: 'Create schema set method is invoked' - cpsModuleServiceImpl.createSchemaSet(dataspaceName, schemaSetName, yangResourcesNameToContentMap) + cpsModuleServiceImpl.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName) then: 'Parameters are validated and processing is delegated to persistence service' - 1 * mockModuleStoreService.storeSchemaSet(dataspaceName, schemaSetName, yangResourcesNameToContentMap) + 1 * mockCpsModulePersistenceService.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName) } def 'E2E Coverage Area-Tracking Area & TA-Cell mapping data can be parsed by CPS.'() { given: 'Valid yang resource as name-to-content map' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap( + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap( 'e2e/basic/cps-cavsta-onap-internal2021-01-28.yang') - def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap).getSchemaContext() + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName).getSchemaContext() def dataNodeStored and : 'a valid json is provided for the model' def jsonData = TestUtils.getResourceFileContent('e2e/basic/cps-Cavsta-Data.txt') @@ -99,45 +89,41 @@ class E2ENetworkSliceSpec extends Specification { mockCpsAnchorService.getAnchor(dataspaceName, anchorName) >> new Anchor().builder().name(anchorName).schemaSetName(schemaSetName).dataspaceName(dataspaceName).build() mockYangTextSchemaSourceSetCache.get(dataspaceName, schemaSetName) >> - YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) - mockModuleStoreService.getYangSchemaResources(dataspaceName, schemaSetName) >> schemaContext + YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) + mockCpsModulePersistenceService.getYangSchemaResources(dataspaceName, schemaSetName) >> schemaContext when: 'saveData method is invoked' cpsDataServiceImpl.saveData(dataspaceName, anchorName, jsonData, noTimestamp) then: 'Parameters are validated and processing is delegated to persistence service' - 1 * mockDataStoreService.storeDataNodes('someDataspace', 'someAnchor', _) >> + 1 * mockCpsDataPersistenceService.storeDataNodes('someDataspace', 'someAnchor', _) >> { args -> dataNodeStored = args[2]} def child = dataNodeStored[0].childDataNodes[0] assert child.childDataNodes.size() == 1 and: 'list of Tracking Area for a Coverage Area are stored with correct xpath and child nodes ' def listOfTAForCoverageArea = child.childDataNodes[0] - listOfTAForCoverageArea.xpath == '/ran-coverage-area/pLMNIdList[@mcc=\'310\' and @mnc=\'410\']/' + - 'coverage-area[@coverageArea=\'Washington\']' - listOfTAForCoverageArea.childDataNodes[0].leaves.get('nRTAC') == 234 + listOfTAForCoverageArea.xpath == '/ran-coverage-area/pLMNIdList[@mcc=\'310\' and @mnc=\'410\']/coverage-area[@coverageArea=\'Washington\']' + assert listOfTAForCoverageArea.childDataNodes[0].leaves.get('nRTAC') == 234 and: 'list of cells in a tracking area are stored with correct xpath and child nodes ' def listOfCellsInTrackingArea = listOfTAForCoverageArea.childDataNodes[0] - listOfCellsInTrackingArea.xpath == '/ran-coverage-area/pLMNIdList[@mcc=\'310\' and @mnc=\'410\']/' + - 'coverage-area[@coverageArea=\'Washington\']/coverageAreaTAList[@nRTAC=\'234\']' + listOfCellsInTrackingArea.xpath == '/ran-coverage-area/pLMNIdList[@mcc=\'310\' and @mnc=\'410\']/coverage-area[@coverageArea=\'Washington\']/coverageAreaTAList[@nRTAC=\'234\']' listOfCellsInTrackingArea.childDataNodes[0].leaves.get('cellLocalId') == 15709 } def 'E2E Coverage Area-Tracking Area & TA-Cell mapping data can be parsed for RAN inventory.'() { def dataNodeStored given: 'valid yang resource as name-to-content map' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap( - 'e2e/basic/cps-ran-inventory@2021-01-28.yang') - def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap).getSchemaContext() + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('e2e/basic/cps-ran-inventory@2021-01-28.yang') + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName).getSchemaContext() and : 'a valid json is provided for the model' def jsonData = TestUtils.getResourceFileContent('e2e/basic/cps-ran-inventory-data.json') and : 'all the further dependencies are mocked ' mockCpsAnchorService.getAnchor('someDataspace', 'someAnchor') >> new Anchor().builder().name('someAnchor').schemaSetName('someSchemaSet').dataspaceName(dataspaceName).build() - mockYangTextSchemaSourceSetCache.get('someDataspace', 'someSchemaSet') >> YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) - mockModuleStoreService.getYangSchemaResources('someDataspace', 'someSchemaSet') >> schemaContext + mockYangTextSchemaSourceSetCache.get('someDataspace', 'someSchemaSet') >> YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) + mockCpsModulePersistenceService.getYangSchemaResources('someDataspace', 'someSchemaSet') >> schemaContext when: 'saveData method is invoked' cpsDataServiceImpl.saveData('someDataspace', 'someAnchor', jsonData, noTimestamp) then: 'parameters are validated and processing is delegated to persistence service' - 1 * mockDataStoreService.storeDataNodes('someDataspace', 'someAnchor', _) >> - { args -> dataNodeStored = args[2]} + 1 * mockCpsDataPersistenceService.storeDataNodes('someDataspace', 'someAnchor', _) >> { args -> dataNodeStored = args[2]} and: 'the size of the tree is correct' def cpsRanInventory = TestUtils.getFlattenMapByXpath(dataNodeStored[0]) assert cpsRanInventory.size() == 4 @@ -146,22 +132,21 @@ class E2ENetworkSliceSpec extends Specification { def ranSlices = cpsRanInventory.get('/ran-inventory/ran-slices[@rannfnssiid=\'14559ead-f4fe-4c1c-a94c-8015fad3ea35\']') def sliceProfilesList = cpsRanInventory.get('/ran-inventory/ran-slices[@rannfnssiid=\'14559ead-f4fe-4c1c-a94c-8015fad3ea35\']/sliceProfilesList[@sliceProfileId=\'f33a9dd8-ae51-4acf-8073-c9390c25f6f1\']') def pLMNIdList = cpsRanInventory.get('/ran-inventory/ran-slices[@rannfnssiid=\'14559ead-f4fe-4c1c-a94c-8015fad3ea35\']/sliceProfilesList[@sliceProfileId=\'f33a9dd8-ae51-4acf-8073-c9390c25f6f1\']/pLMNIdList[@mcc=\'310\' and @mnc=\'410\']') - ranInventory.getChildDataNodes().size() == 1 - ranInventory.getChildDataNodes().find( {it.xpath == ranSlices.xpath}) + assert ranInventory.getChildDataNodes().size() == 1 + assert ranInventory.getChildDataNodes().find( {it.xpath == ranSlices.xpath}) and: 'ranSlices contains the correct child node' - ranSlices.getChildDataNodes().size() == 1 - ranSlices.getChildDataNodes().find( {it.xpath == sliceProfilesList.xpath}) + assert ranSlices.getChildDataNodes().size() == 1 + assert ranSlices.getChildDataNodes().find( {it.xpath == sliceProfilesList.xpath}) and: 'sliceProfilesList contains the correct child node' - sliceProfilesList.getChildDataNodes().size() == 1 - sliceProfilesList.getChildDataNodes().find( {it.xpath == pLMNIdList.xpath}) + assert sliceProfilesList.getChildDataNodes().size() == 1 + assert sliceProfilesList.getChildDataNodes().find( {it.xpath == pLMNIdList.xpath}) and: 'pLMNIdList contains no children' - pLMNIdList.getChildDataNodes().size() == 0 - + assert pLMNIdList.getChildDataNodes().size() == 0 } def 'E2E RAN Schema Model.'(){ given: 'yang resources' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap( + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap( 'ietf/ietf-inet-types@2013-07-15.yang', 'ietf/ietf-yang-types@2013-07-15.yang', 'e2e/basic/cps-ran-schema-model@2021-05-19.yang' @@ -169,7 +154,7 @@ class E2ENetworkSliceSpec extends Specification { and : 'json data' def jsonData = TestUtils.getResourceFileContent('e2e/basic/cps-ran-schema-model-data-v4.json') expect: 'schema context is built with no exception indicating the schema set being valid ' - def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap).getSchemaContext() + def schemaContext = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName).getSchemaContext() and: 'data is parsed with no exception indicating the model match' new YangParserHelper().parseData(ContentType.JSON, jsonData, schemaContext, '', false) != null } diff --git a/cps-service/src/test/groovy/org/onap/cps/impl/YangTextSchemaSourceSetCacheSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/impl/YangTextSchemaSourceSetCacheSpec.groovy index fe49d04f3c..e71782ce4f 100644 --- a/cps-service/src/test/groovy/org/onap/cps/impl/YangTextSchemaSourceSetCacheSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/impl/YangTextSchemaSourceSetCacheSpec.groovy @@ -22,7 +22,7 @@ package org.onap.cps.impl import org.onap.cps.TestUtils -import org.onap.cps.impl.utils.CpsValidator +import org.onap.cps.utils.CpsValidator import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.yang.YangTextSchemaSourceSet import org.onap.cps.yang.YangTextSchemaSourceSetBuilder @@ -65,13 +65,13 @@ class YangTextSchemaSourceSetCacheSpec extends Specification { given: 'cache is empty' yangResourceCacheImpl.clear() and: 'a schema set exists' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - def expectedYangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + def expectedYangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) when: 'schema-set information is asked' def result = objectUnderTest.get('my-dataspace', 'my-schemaset') then: 'information fetched from cps module persistence' 1 * mockModuleStoreService.getYangSchemaResources('my-dataspace', 'my-schemaset') - >> yangResourcesNameToContentMap + >> yangResourceContentPerName and: 'stored in the cache' def cachedValue = getCachedValue('my-dataspace', 'my-schemaset') assert cachedValue.getModuleReferences() == expectedYangTextSchemaSourceSet.getModuleReferences() @@ -83,8 +83,8 @@ class YangTextSchemaSourceSetCacheSpec extends Specification { def 'Cache Hit: Respond from cache'() { given: 'a schema set exists' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - def expectedYangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + def expectedYangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) and: 'stored in cache' yangResourceCacheImpl.put(getCacheKey('my-dataspace', 'my-schemaset'), expectedYangTextSchemaSourceSet) when: 'schema-set information is asked' @@ -97,8 +97,8 @@ class YangTextSchemaSourceSetCacheSpec extends Specification { def 'Cache Update: when no data exist in the cache'() { given: 'a schema set exists' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - def yangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + def yangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) when: 'cache is updated' objectUnderTest.updateCache('my-dataspace', 'my-schemaset', yangTextSchemaSourceSet) then: 'cached value is same as expected' @@ -110,8 +110,8 @@ class YangTextSchemaSourceSetCacheSpec extends Specification { def 'Cache Evict:with invalid #scenario'() { given: 'a schema set exists in cache' - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') - def yangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourcesNameToContentMap) + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') + def yangTextSchemaSourceSet = YangTextSchemaSourceSetBuilder.of(yangResourceContentPerName) yangResourceCacheImpl.put(getCacheKey('my-dataspace', 'my-schemaset'), yangTextSchemaSourceSet) def cachedValue = getCachedValue('my-dataspace', 'my-schemaset') assert cachedValue.getModuleReferences() == yangTextSchemaSourceSet.getModuleReferences() diff --git a/cps-service/src/test/groovy/org/onap/cps/init/AbstractModelLoaderSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/init/AbstractModelLoaderSpec.groovy index 0618cad951..c3cb4f205b 100644 --- a/cps-service/src/test/groovy/org/onap/cps/init/AbstractModelLoaderSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/init/AbstractModelLoaderSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * Modification Copyright (C) 2024 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,6 +28,7 @@ import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataService import org.onap.cps.api.CpsDataspaceService import org.onap.cps.api.CpsModuleService +import org.onap.cps.api.exceptions.DuplicatedYangResourceException import org.onap.cps.api.exceptions.ModelOnboardingException import org.onap.cps.api.parameters.CascadeDeleteAllowed import org.onap.cps.api.exceptions.AlreadyDefinedException @@ -117,6 +118,16 @@ class AbstractModelLoaderSpec extends Specification { 1 * mockCpsModuleService.createSchemaSet('some dataspace','new name',_) } + def 'Creating a schema set handles duplicated yang resource exception'() { + given: 'module service throws duplicated yang resource exception' + mockCpsModuleService.createSchemaSet(*_) >> { throw new DuplicatedYangResourceException('my-yang-resource', 'my-yang-resource-checksum', null) } + when: 'attempt to create a schema set' + objectUnderTest.createSchemaSet('some dataspace','some schema set','cps-notification-subscriptions@2024-07-03.yang') + then: 'exception is ignored, and correct exception message is logged' + noExceptionThrown() + assertLogContains('Ignoring yang resource duplication exception. Assuming model was created by another instance') + } + def 'Creating a schema set handles already defined exception.'() { given: 'the module service throws an already defined exception' mockCpsModuleService.createSchemaSet(*_) >> { throw AlreadyDefinedException.forSchemaSet('name','context',null) } diff --git a/cps-service/src/test/groovy/org/onap/cps/init/CpsNotificationSubscriptionModelLoaderSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/init/CpsNotificationSubscriptionModelLoaderSpec.groovy index 0d515f90ac..1e2dc54424 100644 --- a/cps-service/src/test/groovy/org/onap/cps/init/CpsNotificationSubscriptionModelLoaderSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/init/CpsNotificationSubscriptionModelLoaderSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 TechMahindra Ltd. + * Copyright (C) 2024-2025 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -78,10 +78,4 @@ class CpsNotificationSubscriptionModelLoaderSpec extends Specification { and: 'the data service to create a top level datanode is called once' 1 * mockCpsDataService.saveData(CPS_DATASPACE_NAME, ANCHOR_NAME, '{"dataspaces":{}}', _) } - - private void assertLogContains(String message) { - def logs = loggingListAppender.list.toString() - assert logs.contains(message) - } - } diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/utils/CpsValidatorImplSpec.groovy index f7c4798772..df257c9028 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/utils/CpsValidatorImplSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,11 +18,10 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.ri.utils +package org.onap.cps.utils - -import org.onap.cps.api.parameters.PaginationOption import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.parameters.PaginationOption import spock.lang.Specification class CpsValidatorImplSpec extends Specification { @@ -60,19 +59,38 @@ class CpsValidatorImplSpec extends Specification { def 'Validating a list of names with invalid names.'() { given: 'a list of names with an invalid name' - def names = ['valid-name', 'name with spaces'] + def names = ['valid-name', 'invalid name with spaces'] when: 'a list of strings is validated' objectUnderTest.validateNameCharacters(names) then: 'a data validation exception is thrown' thrown(DataValidationException) } - def 'Validate Pagination option with invalid page index and size.'() { + def 'Validate valid pagination options'() { + when: 'the pagination option is validated' + objectUnderTest.validatePaginationOption(option) + then: 'no exception occurs' + noExceptionThrown() + where: 'the following pagination options are used' + option << [null, new PaginationOption(1,2)] + } + + def 'Validate invalid pagination.'() { when: 'the pagination option is validated using invalid options' objectUnderTest.validatePaginationOption(new PaginationOption(-5, -2)) then: 'a data validation exception is thrown' def exceptionThrown = thrown(DataValidationException) and: 'the error was encountered at the following index in #scenario' - assert exceptionThrown.getDetails().contains("Invalid page index or size") + assert exceptionThrown.getDetails().contains('Invalid page index or size') + } + + + def 'Validation with boolean result.'() { + expect: 'validation returns expected boolean result' + assert objectUnderTest.isValidName(name) == expectedResult + where: 'following names are used' + name || expectedResult + 'valid-name' || true + 'invalid name' || false } } diff --git a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy index bb0f5b0911..6ff41c128f 100644 --- a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy @@ -22,7 +22,7 @@ package org.onap.cps.utils -import org.onap.cps.api.model.DataNodeBuilder +import org.onap.cps.impl.DataNodeBuilder import spock.lang.Specification class DataMapUtilsSpec extends Specification { diff --git a/cps-service/src/test/groovy/org/onap/cps/utils/YangParserSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/utils/YangParserSpec.groovy index a2fadb7e9f..cb7a16cb84 100644 --- a/cps-service/src/test/groovy/org/onap/cps/utils/YangParserSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/utils/YangParserSpec.groovy @@ -40,7 +40,7 @@ class YangParserSpec extends Specification { def objectUnderTest = new YangParser(mockYangParserHelper, mockYangTextSchemaSourceSetCache, mockTimedYangTextSchemaSourceSetBuilder) def anchor = new Anchor(dataspaceName: 'my dataspace', schemaSetName: 'my schema') - def yangResourcesNameToContentMap = TestUtils.getYangResourcesAsMap('bookstore.yang') + def yangResourceContentPerName = TestUtils.getYangResourcesAsMap('bookstore.yang') def mockYangTextSchemaSourceSet = Mock(YangTextSchemaSourceSet) def mockSchemaContext = Mock(SchemaContext) def containerNodeFromYangUtils = Mock(ContainerNode) @@ -91,9 +91,9 @@ class YangParserSpec extends Specification { def 'Parsing data with yang resource to context map.'() { given: 'the schema source set for the yang resource map is returned' - mockTimedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourcesNameToContentMap) >> mockYangTextSchemaSourceSet + mockTimedYangTextSchemaSourceSetBuilder.getYangTextSchemaSourceSet(yangResourceContentPerName) >> mockYangTextSchemaSourceSet when: 'parsing some json data' - def result = objectUnderTest.parseData(ContentType.JSON, 'some json', yangResourcesNameToContentMap, noParent) + def result = objectUnderTest.parseData(ContentType.JSON, 'some json', yangResourceContentPerName, noParent) then: 'the yang parser helper always returns a container node' 1 * mockYangParserHelper.parseData(ContentType.JSON, 'some json', mockSchemaContext, noParent, validateAndParse) >> containerNodeFromYangUtils and: 'the result is the same container node as return from yang utils' diff --git a/cps-service/src/test/resources/cps-notification-subscriptions@2024-07-03.yang b/cps-service/src/test/resources/cps-notification-subscriptions@2024-07-03.yang new file mode 100644 index 0000000000..1cab7923ea --- /dev/null +++ b/cps-service/src/test/resources/cps-notification-subscriptions@2024-07-03.yang @@ -0,0 +1,48 @@ +module cps-notification-subscriptions { + yang-version 1.1; + namespace "org:onap:cps"; + + prefix cps-notification-subscriptions; + + revision "2024-08-05" { + description + "First release of cps notification subscriptions model"; + } + container dataspaces { + + list dataspace { + key "name"; + + leaf name { + type string; + } + + container anchors { + + list anchor { + key "name"; + + leaf name { + type string; + } + + container xpaths { + + list xpath { + key "path"; + leaf path { + type string; + } + } + } + } + } + leaf-list subscriptionIds { + type string; + } + leaf topic { + type string; + } + } + } +}
\ No newline at end of file diff --git a/csit/install-deps.sh b/csit/install-deps.sh index ef0b96a799..1b4a0ae895 100755 --- a/csit/install-deps.sh +++ b/csit/install-deps.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2024 Nordix Foundation. +# Copyright 2024-2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ export PATH="$(pwd)/bin:$PATH" # Download docker-compose. if [ ! -x bin/docker-compose ]; then echo " Downloading docker-compose" - curl -s -L https://github.com/docker/compose/releases/download/v2.29.2/docker-compose-linux-x86_64 > bin/docker-compose + curl -s -L https://github.com/docker/compose/releases/download/v2.35.1/docker-compose-linux-x86_64 > bin/docker-compose chmod +x bin/docker-compose else echo " docker-compose already installed" diff --git a/csit/install-robotframework.sh b/csit/install-robotframework.sh index 54dbce400a..867ef3c5bb 100644 --- a/csit/install-robotframework.sh +++ b/csit/install-robotframework.sh @@ -1,3 +1,19 @@ +# +# Copyright 2025 OpenInfra Foundation Europe. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + set -exu ROBOT3_VENV=$(mktemp -d --suffix=robot_venv) @@ -8,7 +24,6 @@ echo "Python version is: $(python3 --version)" python3 -m venv "${ROBOT3_VENV}" source "${ROBOT3_VENV}/bin/activate" - # Make sure pip3 itself us up-to-date. python3 -m pip install --upgrade pip diff --git a/csit/plans/cps/pnfsim/docker-compose.yml b/csit/plans/cps/pnfsim/docker-compose.yml deleted file mode 100644 index 869df22789..0000000000 --- a/csit/plans/cps/pnfsim/docker-compose.yml +++ /dev/null @@ -1,27 +0,0 @@ -# ============LICENSE_START======================================================= -# Modifications Copyright (C) 2022-2024 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -services: - netconf-pnp-simulator: - image: blueonap/netconf-pnp-simulator:v2.8.6 - container_name: netconf-simulator - restart: always - ports: - - "831:830" - - "6512:6513" - volumes: - - ./netconf-config:/config/modules/stores - - ./tls:/config/tls diff --git a/csit/plans/cps/sdnc/check_sdnc_mount_node.sh b/csit/plans/cps/sdnc/check_sdnc_mount_node.sh deleted file mode 100644 index e92cec717f..0000000000 --- a/csit/plans/cps/sdnc/check_sdnc_mount_node.sh +++ /dev/null @@ -1,82 +0,0 @@ -# ============LICENSE_START======================================================= -# Copyright (C) 2023-2024 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -# WAIT 10 minutes maximum and test every 30 seconds if SDNC is up using HealthCheck API -TIME_OUT=600 -INTERVAL=30 -TIME=0 -while [ "$TIME" -lt "$TIME_OUT" ]; do - response=$(curl --write-out '%{http_code}' --silent --output /dev/null -H "Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==" -X POST -H "X-FromAppId: csit-sdnc" -H "X-TransactionId: csit-sdnc" -H "Accept: application/json" -H "Content-Type: application/json" http://$SDNC_HOST:$SDNC_PORT/restconf/operations/SLI-API:healthcheck ); - echo $response - - if [ "$response" == "200" ]; then - echo SDNC started in $TIME seconds - break; - fi - - echo Sleep: $INTERVAL seconds before testing if SDNC is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds - sleep $INTERVAL - TIME=$(($TIME+$INTERVAL)) -done - -if [ "$TIME" -ge "$TIME_OUT" ]; then - echo TIME OUT: karaf session not started in $TIME_OUT seconds... Could cause problems for testing activities... -fi - -###################### mount pnf-sim as PNFDemo ########################## -SDNC_TIME_OUT=250 -SDNC_INTERVAL=10 -SDNC_TIME=0 - -while [ "$SDNC_TIME" -le "$SDNC_TIME_OUT" ]; do - - # Mount netconf node - curl --location --request PUT 'http://'$SDNC_HOST:$SDNC_PORT'/restconf/config/network-topology:network-topology/topology/topology-netconf/node/ietfYang-PNFDemo' \ - --header 'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==' \ - --header 'Content-Type: application/json' \ - --data-raw '{ - "node": [ - { - "node-id": "ietfYang-PNFDemo", - "netconf-node-topology:protocol": { - "name": "TLS" - }, - "netconf-node-topology:host": "'$LOCAL_IP'", - "netconf-node-topology:key-based": { - "username": "netconf", - "key-id": "ODL_private_key_0" - }, - "netconf-node-topology:port": 6512, - "netconf-node-topology:tcp-only": false, - "netconf-node-topology:max-connection-attempts": 5 - } - ] - }' - - # Verify node has been mounted - - RESPONSE=$( curl --location --request GET 'http://'$SDNC_HOST:$SDNC_PORT'/restconf/config/network-topology:network-topology/topology/topology-netconf' --header 'Authorization: basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==') - - if [[ "$RESPONSE" == *"ietfYang-PNFDemo"* ]]; then - echo "Node mounted in $SDNC_TIME" - sleep 10 - break; - fi - - sleep $SDNC_INTERVAL - SDNC_TIME=$((SDNC_TIME + SDNC_INTERVAL)) - -done
\ No newline at end of file diff --git a/csit/plans/cps/sdnc/docker-compose.yml b/csit/plans/cps/sdnc/docker-compose.yml deleted file mode 100644 index 29e8293fde..0000000000 --- a/csit/plans/cps/sdnc/docker-compose.yml +++ /dev/null @@ -1,71 +0,0 @@ -# ============LICENSE_START======================================================= -# Modifications Copyright (C) 2022 Nordix Foundation -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= - -services: - mariadb: - image: mariadb:10.5 - ports: - - "3306:3306" - environment: - - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} - - MYSQL_ROOT_HOST=% - - MYSQL_USER=${MYSQL_USER:-sdnc} - - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} - - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5" - - sdnc: - image: onap/sdnc-image:${VERSION:-2.2.3} - container_name: sdnc - depends_on : - - mariadb - entrypoint: ["/opt/onap/sdnc/bin/startODL.sh"] - ports: - - "8282:8181" - hostname: - sdnc - links: - - mariadb:dbhost - - mariadb:sdnctldb01 - - mariadb:sdnctldb02 - environment: - - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} - - MYSQL_USER=${MYSQL_USER:-sdnc} - - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} - - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} - - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties - - SDNC_BIN=/opt/onap/sdnc/bin - - ODL_CERT_DIR=/opt/opendaylight/certs - - ODL_ADMIN_USERNAME=${ODL_USER:-admin} - - ODL_ADMIN_PASSWORD=${ODL_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} - - SDNC_DB_INIT=true - - SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} - - volumes: - - ./certs/certs.properties:/opt/opendaylight/certs/certs.properties - - ./certs/keys0.zip:/opt/opendaylight/certs/keys0.zip - - dns: - - ${DNS_IP_ADDR-10.0.100.1} - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5"
\ No newline at end of file diff --git a/csit/plans/cps/sdnc/sdnc_setup.sh b/csit/plans/cps/sdnc/sdnc_setup.sh deleted file mode 100644 index 61c61fc289..0000000000 --- a/csit/plans/cps/sdnc/sdnc_setup.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# ============LICENSE_START======================================================= -# Copyright (C) 2021-2022 Nordix Foundation. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END========================================================= - -# @author Rahul Tyagi (rahul.tyagi@est.tech) -# setup sdnc - -export SDNC_CERT_PATH=$WORKSPACE/plans/cps/sdnc/certs - -#start SDNC containers with docker compose and configuration from docker-compose.yml -docker-compose -f $WORKSPACE/plans/cps/sdnc/docker-compose.yml up -d
\ No newline at end of file diff --git a/csit/plans/cps/setup.sh b/csit/plans/cps/setup.sh index 00ed52a7ef..5591e474c7 100755 --- a/csit/plans/cps/setup.sh +++ b/csit/plans/cps/setup.sh @@ -1,6 +1,11 @@ #!/bin/bash # # Copyright 2016-2017 Huawei Technologies Co., Ltd. +# Modifications copyright (c) 2017 AT&T Intellectual Property +# Modifications copyright (c) 2020-2021 Samsung Electronics Co., Ltd. +# Modifications Copyright (C) 2021 Pantheon.tech +# Modifications Copyright (C) 2021 Bell Canada. +# Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,43 +19,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Modifications copyright (c) 2017 AT&T Intellectual Property -# Modifications copyright (c) 2020-2021 Samsung Electronics Co., Ltd. -# Modifications Copyright (C) 2021 Pantheon.tech -# Modifications Copyright (C) 2021 Bell Canada. -# Modifications Copyright (C) 2021-2024 Nordix Foundation. -# # Branched from ccsdk/distribution to this repository Feb 23, 2021 # -check_health() -{ - TIME_OUT=120 - INTERVAL=5 - TICKER=0 - - while [ "$TICKER" -le "$TIME_OUT" ]; do - - RESPONSE=$(curl --location --request GET 'http://'$1'/actuator/health/readiness') - - if [[ "$RESPONSE" == *"UP"* ]]; then - echo "$2 started in $TICKER" - break; - fi - - sleep $INTERVAL - TICKER=$((TICKER + INTERVAL)) - - done - - if [ "$TICKER" -ge "$TIME_OUT" ]; then - echo TIME OUT: $2 session not started in $TIME_OUT seconds... Could cause problems for testing activities... - fi -} - ###################### setup env ############################ # Set env variables for docker compose -export LOCAL_IP=$((ip -4 addr show docker0 | grep -Po 'inet \K[\d.]+') || hostname -I | awk '{print $1}') +export LOCAL_IP=localhost source $WORKSPACE/plans/cps/test.properties export $(cut -d= -f1 $WORKSPACE/plans/cps/test.properties) @@ -58,23 +32,18 @@ export $(cut -d= -f1 $WORKSPACE/plans/cps/test.properties) ###################### setup cps-ncmp ############################ cd $CPS_HOME/docker-compose -# start CPS/NCMP, DMI Plugin, and PostgreSQL containers with docker compose -docker-compose --profile dmi-service up -d - -###################### setup sdnc ####################################### -source $WORKSPACE/plans/cps/sdnc/sdnc_setup.sh - -###################### setup pnfsim ##################################### -docker-compose -f $WORKSPACE/plans/cps/pnfsim/docker-compose.yml up -d - -###################### verify ncmp-cps health ########################## - -check_health $CPS_CORE_HOST:$CPS_CORE_PORT 'cps-ncmp' - -###################### verify dmi health ########################## - -check_health $DMI_HOST:$DMI_PORT 'dmi-plugin' +# start CPS/NCMP, DMI Plugin, and PostgreSQL containers with docker compose, waiting for all containers to be healthy +docker-compose --profile dmi-service --profile dmi-stub up -d --quiet-pull --wait || exit 1 ###################### ROBOT Configurations ########################## # Pass variables required for Robot test suites in ROBOT_VARIABLES -ROBOT_VARIABLES="-v CPS_CORE_HOST:$CPS_CORE_HOST -v CPS_CORE_PORT:$CPS_CORE_PORT -v DMI_HOST:$LOCAL_IP -v DMI_PORT:$DMI_PORT -v DMI_VERSION:$DMI_VERSION -v DMI_CSIT_STUB_HOST:$LOCAL_IP -v DMI_CSIT_STUB_PORT:$DMI_DEMO_STUB_PORT -v DMI_AUTH_ENABLED:$DMI_AUTH_ENABLED -v DATADIR_CPS_CORE:$WORKSPACE/data/cps-core -v DATADIR_NCMP:$WORKSPACE/data/ncmp -v DATADIR_SUBS_NOTIFICATION:$WORKSPACE/data/subscription-notification --exitonfailure" +ROBOT_VARIABLES="\ +-v CPS_CORE_HOST:$CPS_CORE_HOST \ +-v CPS_CORE_PORT:$CPS_CORE_PORT \ +-v DMI_HOST:$DMI_HOST \ +-v DMI_PORT:$DMI_PORT \ +-v DMI_CSIT_STUB_HOST:$DMI_DEMO_STUB_HOST \ +-v DMI_CSIT_STUB_PORT:$DMI_DEMO_STUB_PORT \ +-v DATADIR_CPS_CORE:$WORKSPACE/data/cps-core \ +-v DATADIR_NCMP:$WORKSPACE/data/ncmp \ +-v DATADIR_SUBS_NOTIFICATION:$WORKSPACE/data/subscription-notification" diff --git a/csit/plans/cps/test.properties b/csit/plans/cps/test.properties index 52e82bdb85..53e310544e 100644 --- a/csit/plans/cps/test.properties +++ b/csit/plans/cps/test.properties @@ -1,34 +1,33 @@ -DB_HOST=$LOCAL_IP +DB_HOST=dbpostgresql DB_USERNAME=cps DB_PASSWORD=cps -SDNC_HOST=$LOCAL_IP -SDNC_PORT=8282 +SDNC_HOST=sdnc +SDNC_PORT=8181 SDNC_USERNAME=admin SDNC_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U CPS_CORE_HOST=$LOCAL_IP CPS_CORE_PORT=8883 -CPS_CORE_USERNAME=cpsuser -CPS_CORE_PASSWORD=cpsr0cks! -DMI_HOST=$LOCAL_IP -DMI_PORT=8783 +DMI_HOST=ncmp-dmi-plugin +DMI_PORT=8080 DMI_USERNAME=cpsuser DMI_PASSWORD=cpsr0cks! -DMI_SERVICE_URL=http://$LOCAL_IP:$DMI_PORT +DMI_SERVICE_URL=http://$DMI_HOST:$DMI_PORT DOCKER_REPO=nexus3.onap.org:10003 CPS_VERSION=latest DMI_VERSION=latest +DMI_DEMO_STUB_VERSION=latest ADVISED_MODULES_SYNC_SLEEP_TIME_MS=2000 CMHANDLE_DATA_SYNC_SLEEP_TIME_MS=2000 CPS_HOME=$CPS_HOME -DMI_DEMO_STUB_PORT=8784 -DMI_DEMO_STUB_VERSION=latest +DMI_DEMO_STUB_HOST=ncmp-dmi-plugin-demo-and-csit-stub +DMI_DEMO_STUB_PORT=8092 DMI_AUTH_ENABLED=true diff --git a/csit/prepare-csit.sh b/csit/prepare-csit.sh index 1b8578e0ce..c4c96c6dcf 100755 --- a/csit/prepare-csit.sh +++ b/csit/prepare-csit.sh @@ -2,6 +2,7 @@ # # Copyright 2019-2021 © Samsung Electronics Co., Ltd. # Modifications Copyright (C) 2021 Pantheon.tech +# Modifications Copyright 2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -51,8 +52,10 @@ if [ -f ${WORKSPACE}/env.properties ]; then source ${WORKSPACE}/env.properties fi if [ -f ${ROBOT3_VENV}/bin/activate ]; then + echo "Activating existing Robot3 Env" source ${ROBOT3_VENV}/bin/activate else + echo "Installing Robot3 Env" rm -rf /tmp/ci-management rm -f ${WORKSPACE}/env.properties cd /tmp @@ -67,11 +70,11 @@ rm -rf ${ROBOT3_VENV}/src/onap/testsuite python3 -m pip install --upgrade --extra-index-url="https://nexus3.onap.org/repository/PyPi.staging/simple" 'robotframework-onap==11.0.0.dev17' --pre -echo "Versioning information:" +echo "[Prepare] Versioning information:" python3 --version echo "Installing confluent kafka library for robot framework:" pip install robotframework-confluentkafkalibrary==2.4.0-2 pip freeze -python3 -m robot.run --version || :
\ No newline at end of file +python3 -m robot.run --version || : diff --git a/csit/run-csit.sh b/csit/run-csit.sh index 93941e2163..f9c8f003a3 100755 --- a/csit/run-csit.sh +++ b/csit/run-csit.sh @@ -219,7 +219,7 @@ echo ROBOT_VARIABLES="${ROBOT_VARIABLES}" echo "Starting Robot test suites ${SUITES} ..." relax_set -echo "Versioning information:" +echo "[Run] Versioning information:" python3 --version pip freeze python3 -m robot.run --version || : diff --git a/csit/run-project-csit.sh b/csit/run-project-csit.sh index f362cc7130..958e925cea 100755 --- a/csit/run-project-csit.sh +++ b/csit/run-project-csit.sh @@ -2,7 +2,7 @@ # # Copyright 2020-2021 © Samsung Electronics Co., Ltd. # Modifications Copyright (C) 2021 Pantheon.tech -# Modifications Copyright (C) 2024 Nordix Foundation. +# Modifications Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/csit/tests/cm-handle-query/cm-handle-query.robot b/csit/tests/cm-handle-query/cm-handle-query.robot index a16446a743..00a27de18a 100644 --- a/csit/tests/cm-handle-query/cm-handle-query.robot +++ b/csit/tests/cm-handle-query/cm-handle-query.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${ncmpBasePath} /ncmp/v1 ${jsonModuleAndPropertyQueryParameters} {"cmHandleQueryParameters": [{"conditionName": "hasAllModules", "conditionParameters": [ {"moduleName": "iana-crypt-hash"} ]}, {"conditionName": "hasAllProperties", "conditionParameters": [ {"Contact": "newemailforstore@bookstore.com"} ]}]} ${jsonEmptyQueryParameters} {} @@ -39,20 +38,20 @@ ${jsonMissingPropertyQueryParameters} {"cmHandleQueryParameters": [{"condi *** Test Cases *** Retrieve CM Handle ids where query parameters Match (module and property query) ${uri}= Set Variable ${ncmpBasePath}/ch/id-searches - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonModuleAndPropertyQueryParameters} Should Be Equal As Strings ${response.status_code} 200 Should Contain ${response.json()} ietfYang-PNFDemo Retrieve CM Handle ids where query parameters Match (empty query) ${uri}= Set Variable ${ncmpBasePath}/ch/id-searches - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonEmptyQueryParameters} Should Be Equal As Strings ${response.status_code} 200 Should Contain ${response.json()} ietfYang-PNFDemo Throw 400 when Structure of Request is Incorrect ${uri}= Set Variable ${ncmpBasePath}/ch/id-searches - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonMissingPropertyQueryParameters} expected_status=400 Should Be Equal As Strings ${response} <Response [400]> diff --git a/csit/tests/cps-admin/cps-admin.robot b/csit/tests/cps-admin/cps-admin.robot index 0df67b6245..10b86bf8c1 100644 --- a/csit/tests/cps-admin/cps-admin.robot +++ b/csit/tests/cps-admin/cps-admin.robot @@ -1,5 +1,6 @@ # ============LICENSE_START======================================================= # Copyright (c) 2021 Pantheon.tech. +# Modifications Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,7 +28,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${basePath} /cps/api ${dataspaceName} CSIT-Dataspace ${schemaSetName} CSIT-SchemaSet @@ -38,8 +38,7 @@ ${ranDataspaceName} NFP-Operational Create Dataspace ${uri}= Set Variable ${basePath}/v1/dataspaces ${params}= Create Dictionary dataspace-name=${dataspaceName} - ${headers}= Create Dictionary Authorization=${auth} - ${response}= POST On Session CPS_URL ${uri} params=${params} headers=${headers} + ${response}= POST On Session CPS_URL ${uri} params=${params} Should Be Equal As Strings ${response.status_code} 201 Create Schema Set from YANG file @@ -48,8 +47,7 @@ Create Schema Set from YANG file ${fileData}= Get Binary File ${DATADIR_CPS_CORE}${/}test-tree.yang ${fileTuple}= Create List test.yang ${fileData} application/zip &{files}= Create Dictionary file=${fileTuple} - ${headers}= Create Dictionary Authorization=${auth} - ${response}= POST On Session CPS_URL ${uri} files=${files} params=${params} headers=${headers} + ${response}= POST On Session CPS_URL ${uri} files=${files} params=${params} Should Be Equal As Strings ${response.status_code} 201 Create Schema Set from ZIP file @@ -58,14 +56,12 @@ Create Schema Set from ZIP file ${fileData}= Get Binary File ${DATADIR_CPS_CORE}${/}yang-resources.zip ${fileTuple}= Create List test.zip ${fileData} application/zip &{files}= Create Dictionary file=${fileTuple} - ${headers}= Create Dictionary Authorization=${auth} - ${response}= POST On Session CPS_URL ${uri} files=${files} params=${params} headers=${headers} + ${response}= POST On Session CPS_URL ${uri} files=${files} params=${params} Should Be Equal As Strings ${response.status_code} 201 Get Schema Set info ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/schema-sets/${schemaSetName} - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} expected_status=200 + ${response}= Get On Session CPS_URL ${uri} expected_status=200 ${responseJson}= Set Variable ${response.json()} Should Be Equal As Strings ${responseJson['name']} ${schemaSetName} Should Be Equal As Strings ${responseJson['dataspaceName']} ${dataspaceName} @@ -73,6 +69,5 @@ Get Schema Set info Create Anchor ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/anchors ${params}= Create Dictionary schema-set-name=${schemaSetName} anchor-name=${anchorName} - ${headers}= Create Dictionary Authorization=${auth} - ${response}= POST On Session CPS_URL ${uri} params=${params} headers=${headers} + ${response}= POST On Session CPS_URL ${uri} params=${params} Should Be Equal As Strings ${response.status_code} 201 diff --git a/csit/tests/cps-data-operations/cps-data-operations.robot b/csit/tests/cps-data-operations/cps-data-operations.robot index 96212ff632..5f6567e146 100644 --- a/csit/tests/cps-data-operations/cps-data-operations.robot +++ b/csit/tests/cps-data-operations/cps-data-operations.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,7 +32,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${topic} data-operation-client-topic ${ncmpBasePath} /ncmp ${expectedRequestId} ${EMPTY} @@ -45,11 +44,10 @@ NCMP Data Operation, forwarded to DMI, response on Client Topic ${uri}= Set Variable ${ncmpBasePath}/v1/data ${dataOperationReqBody}= Get Binary File ${DATADIR_CPS_CORE}${/}dataOperationRequest.json ${params}= Create Dictionary topic=${topic} - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json POST On Session CPS_URL ncmpInventory/v1/ch headers=${headers} data=${newCmHandleRequestBody} ${getCmHandleUri}= Set Variable ${ncmpBasePath}/v1/ch/CMHandle1 - ${getCmHandleHeaders}= Create Dictionary Authorization=${auth} - Wait Until Keyword Succeeds 20sec 200ms Is CM Handle READY ${getCmHandleUri} ${getCmHandleHeaders} CMHandle1 + Wait Until Keyword Succeeds 20sec 200ms Is CM Handle READY ${getCmHandleUri} CMHandle1 ${response}= POST On Session CPS_URL ${uri} params=${params} headers=${headers} data=${dataOperationReqBody} Set Global Variable ${expectedRequestId} ${response.json()}[requestId] Should Be Equal As Strings ${response.status_code} 200 @@ -76,8 +74,8 @@ Compare Header Values END Is CM Handle READY - [Arguments] ${uri} ${headers} ${cmHandle} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + [Arguments] ${uri} ${cmHandle} + ${response}= GET On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 ${number_of_items}= Count Items In JSON Response ${response} Should Be True ${number_of_items} > 0 diff --git a/csit/tests/cps-data-sync/cps-data-sync.robot b/csit/tests/cps-data-sync/cps-data-sync.robot index b8ba479e7c..94ee7cda5b 100644 --- a/csit/tests/cps-data-sync/cps-data-sync.robot +++ b/csit/tests/cps-data-sync/cps-data-sync.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation + * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${ncmpBasePath} /ncmp *** Test Cases *** @@ -38,22 +37,19 @@ ${ncmpBasePath} /ncmp Operational state goes to UNSYNCHRONIZED when data sync (flag) is enabled ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data-sync ${params}= Create Dictionary dataSyncEnabled=true - ${headers}= Create Dictionary Authorization=${auth} - ${response}= PUT On Session CPS_URL ${uri} params=${params} headers=${headers} + ${response}= PUT On Session CPS_URL ${uri} params=${params} Should Be Equal As Strings ${response.status_code} 200 ${verifyUri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/state - ${verifyHeaders}= Create Dictionary Authorization=${auth} - ${verifyResponse}= GET On Session CPS_URL ${verifyUri} headers=${verifyHeaders} + ${verifyResponse}= GET On Session CPS_URL ${verifyUri} Should Be Equal As Strings ${verifyResponse.json()['state']['dataSyncState']['operational']['syncState']} UNSYNCHRONIZED Operational state goes to SYNCHRONIZED after sometime when data sync (flag) is enabled ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/state - ${headers}= Create Dictionary Authorization=${auth} - Wait Until Keyword Succeeds 40sec 100ms Is CM Handle State SYNCHRONIZED ${uri} ${headers} + Wait Until Keyword Succeeds 40sec 100ms Is CM Handle State SYNCHRONIZED ${uri} *** Keywords *** Is CM Handle State SYNCHRONIZED - [Arguments] ${uri} ${headers} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + [Arguments] ${uri} + ${response}= GET On Session CPS_URL ${uri} Should Be Equal As Strings ${response.json()['state']['dataSyncState']['operational']['syncState']} SYNCHRONIZED diff --git a/csit/tests/cps-data/cps-data.robot b/csit/tests/cps-data/cps-data.robot index e83857caea..b0cd9e7652 100644 --- a/csit/tests/cps-data/cps-data.robot +++ b/csit/tests/cps-data/cps-data.robot @@ -1,7 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (c) 2021 Pantheon.tech. # Modifications Copyright (C) 2022 Bell Canada. -# Modifications Copyright (C) 2022-2023 Nordix Foundation. +# Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,7 +29,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${basePath} /cps/api ${dataspaceName} CSIT-Dataspace ${anchorName} CSIT-Anchor @@ -37,7 +36,7 @@ ${anchorName} CSIT-Anchor *** Test Cases *** Create Data Node ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/anchors/${anchorName}/nodes - ${headers} Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers} Create Dictionary Content-Type=application/json ${jsonData}= Get Binary File ${DATADIR_CPS_CORE}${/}test-tree.json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonData} Should Be Equal As Strings ${response.status_code} 201 @@ -45,7 +44,7 @@ Create Data Node Patch Data Node ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/anchors/${anchorName}/nodes ${params}= Create Dictionary xpath=/test-tree/branch[@name='Right'] - ${headers} Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers} Create Dictionary Content-Type=application/json ${jsonData}= Get Binary File ${DATADIR_CPS_CORE}${/}testTreePatchExample.json ${response}= PATCH On Session CPS_URL ${uri} params=${params} headers=${headers} data=${jsonData} Should Be Equal As Strings ${response.status_code} 200 @@ -53,8 +52,7 @@ Patch Data Node Get Updated Data Node by XPath ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/anchors/${anchorName}/node ${params}= Create Dictionary xpath=/test-tree/branch[@name='Right']/nest - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} params=${params} headers=${headers} expected_status=200 + ${response}= Get On Session CPS_URL ${uri} params=${params} expected_status=200 ${responseJson}= Set Variable ${response.json()['tree:nest']} Should Be Equal As Strings ${responseJson['name']} Bigger ${length_birds}= Get Length ${responseJson['birds']} @@ -64,8 +62,7 @@ Get Updated Data Node by XPath Get Data Node by XPath ${uri}= Set Variable ${basePath}/v1/dataspaces/${dataspaceName}/anchors/${anchorName}/node ${params}= Create Dictionary xpath=/test-tree/branch[@name='LEFT/left']/nest - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} params=${params} headers=${headers} expected_status=200 + ${response}= Get On Session CPS_URL ${uri} params=${params} expected_status=200 Should Be Equal As Strings ${response.json()['tree:nest']['name']} SMALL/small diff --git a/csit/tests/cps-model-sync/cps-model-sync.robot b/csit/tests/cps-model-sync/cps-model-sync.robot index b4e61b30d8..d8d17f5d6e 100644 --- a/csit/tests/cps-model-sync/cps-model-sync.robot +++ b/csit/tests/cps-model-sync/cps-model-sync.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,7 +31,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${ncmpInventoryBasePath} /ncmpInventory ${ncmpBasePath} /ncmp ${dmiUrl} http://${DMI_HOST}:${DMI_PORT} @@ -42,14 +41,13 @@ ${deletePayload} {"dmiPlugin":"${dmiUrl}","dmiDataPlugin":"","dmiModelPl *** Test Cases *** Register data node and sync modules. ${uri}= Set Variable ${ncmpInventoryBasePath}/v1/ch - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${createPayload} Should Be Equal As Strings ${response.status_code} 200 Get CM Handle details and confirm it has been registered. ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo - ${headers}= Create Dictionary Authorization=${auth} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + ${response}= GET On Session CPS_URL ${uri} ${responseJson}= Set Variable ${response.json()} ${schemaCount}= Get length ${responseJson} Should Be Equal As Strings ${response.status_code} 200 @@ -61,14 +59,13 @@ Get CM Handle details and confirm it has been registered. Update data node and sync modules. ${uri}= Set Variable ${ncmpInventoryBasePath}/v1/ch - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${updatePayload} Should Be Equal As Strings ${response.status_code} 200 Get CM Handle details and confirm it has been updated. ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo - ${headers}= Create Dictionary Authorization=${auth} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + ${response}= GET On Session CPS_URL ${uri} ${responseJson}= Set Variable ${response.json()} ${schemaCount}= Get length ${responseJson} Should Be Equal As Strings ${response.status_code} 200 @@ -80,13 +77,11 @@ Get CM Handle details and confirm it has been updated. Check if ietfYang-PNFDemo is READY ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo - ${headers}= Create Dictionary Authorization=${auth} - Wait Until Keyword Succeeds 20sec 200ms Is CM Handle READY ${uri} ${headers} ietfYang-PNFDemo + Wait Until Keyword Succeeds 20sec 200ms Is CM Handle READY ${uri} ietfYang-PNFDemo Get modules for registered data node ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/modules - ${headers}= Create Dictionary Authorization=${auth} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + ${response}= GET On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 ${number_of_items}= Count Items In JSON Response ${response} Should Be True ${number_of_items} > 0 @@ -98,20 +93,19 @@ Get modules for registered data node Delete cm handle ${uri}= Set Variable ${ncmpInventoryBasePath}/v1/ch - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${deletePayload} Should Be Equal As Strings ${response.status_code} 200 Get cm handle details and confirm it has been deleted ${uri}= Set Variable ${ncmpBasePath}/v1/ch/CmHandleForDelete - ${headers}= Create Dictionary Authorization=${auth} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} expected_status=404 + ${response}= GET On Session CPS_URL ${uri} expected_status=404 *** Keywords *** Is CM Handle READY - [Arguments] ${uri} ${headers} ${cmHandle} - ${response}= GET On Session CPS_URL ${uri} headers=${headers} + [Arguments] ${uri} ${cmHandle} + ${response}= GET On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 ${number_of_items}= Count Items In JSON Response ${response} Should Be True ${number_of_items} > 0 diff --git a/csit/tests/cps-trust-level/cps-trust-level.robot b/csit/tests/cps-trust-level/cps-trust-level.robot index 810bcf4d12..767ad44a2b 100644 --- a/csit/tests/cps-trust-level/cps-trust-level.robot +++ b/csit/tests/cps-trust-level/cps-trust-level.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,6 @@ Library ConfluentKafkaLibrary Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${CPS_CORE_PORT} *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${ncmpInventoryBasePath} /ncmpInventory ${ncmpBasePath} /ncmp/v1 ${dmiUrl} http://${DMI_HOST}:${DMI_PORT} @@ -41,16 +40,16 @@ ${jsonTrustLevelEventPayload} {"data":{"attributeValueChange":[{"a *** Test Cases *** Register data node ${uri}= Set Variable ${ncmpInventoryBasePath}/v1/ch - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonCreateCmHandles} Should Be Equal As Strings ${response.status_code} 200 Verify notification - ${group_id}= Create Consumer auto_offset_reset=earliest - Subscribe Topic topics=cm-events group_id=${group_id} - ${result}= Poll group_id=${group_id} only_value=False poll_attempts=5 - ${headers} Set Variable ${result[0].headers()} - ${payload} Set Variable ${result[0].value()} + ${group_id}= Create Consumer auto_offset_reset=earliest + Subscribe Topic topics=ncmp-inventory-events group_id=${group_id} + ${result}= Poll group_id=${group_id} only_value=False poll_attempts=5 + ${headers} Set Variable ${result[0].headers()} + ${payload} Set Variable ${result[0].value()} FOR ${header_key_value_pair} IN @{headers} Compare Header Values ${header_key_value_pair[0]} ${header_key_value_pair[1]} "ce_specversion" "1.0" Compare Header Values ${header_key_value_pair[0]} ${header_key_value_pair[1]} "ce_source" "NCMP" @@ -62,7 +61,7 @@ Verify notification Retrieve CM Handle ids where query parameters Match (trust level query) ${uri}= Set Variable ${ncmpBasePath}/ch/id-searches - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonTrustLevelPropertyQueryParameters} ${responseJson}= Set Variable ${response.json()} Should Be Equal As Strings ${response.status_code} 200 @@ -82,4 +81,4 @@ Compare Header Values Basic Teardown [Arguments] ${group_id} Unsubscribe ${group_id} - Close Consumer ${group_id}
\ No newline at end of file + Close Consumer ${group_id} diff --git a/csit/tests/ncmp-passthrough/ncmp-passthrough.robot b/csit/tests/ncmp-passthrough/ncmp-passthrough.robot index 02689f3362..fbfbc4dbe4 100644 --- a/csit/tests/ncmp-passthrough/ncmp-passthrough.robot +++ b/csit/tests/ncmp-passthrough/ncmp-passthrough.robot @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation + * Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,7 +30,6 @@ Suite Setup Create Session CPS_URL http://${CPS_CORE_HOST}:${C *** Variables *** -${auth} Basic Y3BzdXNlcjpjcHNyMGNrcyE= ${ncmpBasePath} /ncmp ${netconf} NETCONF @@ -38,14 +37,12 @@ ${netconf} NETCONF Get for Passthrough Operational (CF, RO) with fields & topic ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-operational?resourceIdentifier=ietf-netconf-monitoring:netconf-state&options=(fields=schemas/schema)&topic=test-topic - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} expected_status=200 + ${response}= Get On Session CPS_URL ${uri} expected_status=200 Should Be Equal As Strings ${response.status_code} 200 Get for Passthrough Operational (CF, RO) with fields ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-operational?resourceIdentifier=ietf-netconf-monitoring:netconf-state&options=(fields=schemas/schema) - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} expected_status=200 + ${response}= Get On Session CPS_URL ${uri} expected_status=200 ${responseJson}= Set Variable ${response.json()} ${schemaCount}= Get length ${responseJson['ietf-netconf-monitoring:netconf-state']['schemas']} Should Be True ${schemaCount} >0 @@ -53,15 +50,14 @@ Get for Passthrough Operational (CF, RO) with fields Create to bookstore using passthrough-running ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${jsonData}= Get Binary File ${DATADIR_NCMP}${/}bookstoreCreateExample.json ${response}= POST On Session CPS_URL ${uri} headers=${headers} data=${jsonData} Should Be Equal As Strings ${response.status_code} 201 Verify create to bookstore using passthrough-running ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} + ${response}= Get On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 FOR ${item} IN @{response.json()['stores:bookstore']['categories']} IF "${item['code']}" == "01" @@ -76,15 +72,14 @@ Verify create to bookstore using passthrough-running Update Bookstore using passthrough-running update Category 01 (replace category) ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore/categories=01 - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/json ${jsonData}= Get Binary File ${DATADIR_NCMP}${/}bookstoreUpdateExample.json ${response}= PUT On Session CPS_URL ${uri} headers=${headers} data=${jsonData} Should Be Equal As Strings ${response.status_code} 200 Verify update to bookstore using passthrough-running updated category 01 ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore/categories=01 - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} + ${response}= Get On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 FOR ${item} IN @{response.json()['stores:categories']} IF "${item['code']}" == "01" @@ -94,22 +89,19 @@ Verify update to bookstore using passthrough-running updated category 01 Verify update to bookstore using passthrough-running did not remove category 02 ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} + ${response}= Get On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 200 ${schemaCount}= Get length ${response.json()['stores:bookstore']['categories']} Should Be Equal As Numbers ${schemaCount} 2 Delete Bookstore using passthrough-running for Category 01 ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore/categories=01 - ${headers}= Create Dictionary Content-Type=application/json Authorization=${auth} - ${response}= DELETE On Session CPS_URL ${uri} headers=${headers} + ${response}= DELETE On Session CPS_URL ${uri} Should Be Equal As Strings ${response.status_code} 204 Verify delete to bookstore using passthrough-running removed only category 01 ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore - ${headers}= Create Dictionary Authorization=${auth} - ${response}= Get On Session CPS_URL ${uri} headers=${headers} + ${response}= Get On Session CPS_URL ${uri} ${responseJson}= Set Variable ${response.json()['stores:bookstore']['categories']} Should Be Equal As Strings ${response.status_code} 200 ${schemaCount}= Get length ${responseJson} @@ -122,13 +114,12 @@ Verify delete to bookstore using passthrough-running removed only category 01 Patch will add new category with new book and add a new book to an existing category ${uri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore - ${headers}= Create Dictionary Content-Type=application/yang.patch+json Authorization=${auth} + ${headers}= Create Dictionary Content-Type=application/yang.patch+json ${jsonData}= Get Binary File ${DATADIR_NCMP}${/}bookstorePatchExample.json ${response}= PATCH On Session CPS_URL ${uri} headers=${headers} data=${jsonData} Should Be Equal As Strings ${response.status_code} 200 ${verifyUri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore/categories=100 - ${verifyHeaders}= Create Dictionary Authorization=${auth} - ${verifyResponse}= Get On Session CPS_URL ${verifyUri} headers=${verifyHeaders} + ${verifyResponse}= Get On Session CPS_URL ${verifyUri} Should Be Equal As Strings ${verifyResponse.status_code} 200 FOR ${item} IN @{verifyResponse.json()['stores:categories']} IF "${item['code']}" == "100" @@ -136,5 +127,5 @@ Patch will add new category with new book and add a new book to an existing cate END END ${verifyUri}= Set Variable ${ncmpBasePath}/v1/ch/ietfYang-PNFDemo/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=stores:bookstore/categories=02/books=A%20New%20book%20in%20existing%20category - ${verifyResponse}= Get On Session CPS_URL ${verifyUri} headers=${verifyHeaders} + ${verifyResponse}= Get On Session CPS_URL ${verifyUri} Should Be Equal As Strings ${verifyResponse.status_code} 200 diff --git a/docker-compose/README.md b/docker-compose/README.md index ea5a2a7610..b321c80edf 100644 --- a/docker-compose/README.md +++ b/docker-compose/README.md @@ -1,7 +1,7 @@ <!-- ============LICENSE_START======================================================= Copyright (C) 2020 Pantheon.tech - Modifications Copyright (C) 2020-2024 Nordix Foundation. + Modifications Copyright (C) 2020-2025 OpenInfra Foundation Europe. All rights reserved. Modifications Copyright (C) 2021 Bell Canada. Modifications Copyright (C) 2022 TechMahindra Ltd. ================================================================================ @@ -113,7 +113,7 @@ Then CPS can be started either using a Java Archive previously built or directly Following command starts the application using JAR file: ```bash -DB_HOST=localhost DB_USERNAME=cps DB_PASSWORD=cps CPS_USERNAME=cpsuser CPS_PASSWORD=cpsr0cks! \ +DB_HOST=localhost DB_USERNAME=cps DB_PASSWORD=cps \ DMI_USERNAME=cpsuser DMI_PASSWORD=cpsr0cks! \ java -jar cps-application/target/cps-application-x.y.z-SNAPSHOT.jar ``` @@ -125,7 +125,6 @@ Here are the steps to run or debug the application from Intellij: 1. Enable the desired maven profile form Maven Tool Window 2. Run a configuration from `Run -> Edit configurations` with following settings: * `Environment variables`: `DB_HOST=localhost;DB_USERNAME=cps;DB_PASSWORD=cps - CPS_USERNAME=cpsuser CPS_PASSWORD=cpsr0cks! DMI_USERNAME=cpsuser DMI_PASSWORD=cpsr0cks!` ## Accessing services diff --git a/docker-compose/config/grafana/cps-database-pool.json b/docker-compose/config/grafana/cps-database-pool.json new file mode 100644 index 0000000000..37782af71a --- /dev/null +++ b/docker-compose/config/grafana/cps-database-pool.json @@ -0,0 +1,886 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "enable": true, + "expr": "resets(process_uptime_seconds{application=\"$application\", region=\"$region\", instance=\"$instance\"}[1m]) > 0", + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "name": "Restart Detection", + "showIn": 0, + "step": "1m", + "tagKeys": "restart-tag", + "tags": [], + "textFormat": "uptime reset", + "titleFormat": "Restart", + "type": "tags" + } + ] + }, + "description": "HikariCP & JDBC Dashboard (Micrometer.io)", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": 16, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 4, + "panels": [], + "title": "JDBC Connections", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 8, + "x": 0, + "y": 1 + }, + "id": 6, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "jdbc_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\", name=~\"$jdbc_connection_name\"}", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "title": "Min", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "fixedColor": "rgb(31, 120, 193)", + "mode": "fixed" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 8, + "x": 8, + "y": 1 + }, + "id": 7, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "jdbc_connections_active{application=\"$application\", region=~\"$region\", instance=~\"$instance\", name=~\"$jdbc_connection_name\"}", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "title": "Active", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 8, + "x": 16, + "y": 1 + }, + "id": 8, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "jdbc_connections_max{application=\"$application\", region=~\"$region\", instance=~\"$instance\", name=~\"$jdbc_connection_name\"}", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "title": "Max", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 4 + }, + "id": 2, + "panels": [], + "title": "Hikari Connections", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 12, + "w": 21, + "x": 0, + "y": 5 + }, + "id": 10, + "options": { + "legend": { + "calcs": [ + "max", + "min" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_active{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Active connections", + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_idle{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Idle connections", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_pending{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Pending threads", + "refId": "B" + } + ], + "title": "Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 3, + "x": 21, + "y": 5 + }, + "id": 12, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_max{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "title": "Max", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 3, + "x": 21, + "y": 9 + }, + "id": 13, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "title": "Min", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 3, + "x": 21, + "y": 13 + }, + "id": 17, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "hikaricp_connections_timeout_total{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Total Timeout", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "dtdurations" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 17 + }, + "id": 16, + "options": { + "legend": { + "calcs": [ + "max", + "min" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "irate(hikaricp_connections_usage_seconds_sum{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m]) / irate(hikaricp_connections_usage_seconds_count{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Usage time", + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "irate(hikaricp_connections_creation_seconds_sum{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m]) / irate(hikaricp_connections_creation_seconds_count{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Creation time", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "expr": "irate(hikaricp_connections_acquire_seconds_sum{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m]) / irate(hikaricp_connections_acquire_seconds_count{application=\"$application\", region=~\"$region\", instance=~\"$instance\", pool=~\"$hikaricp_pool_name\"}[5m])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Acquire time", + "refId": "B" + } + ], + "title": "Connections Time", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5s", + "schemaVersion": 40, + "tags": [ + "prometheus", + "hikaricp", + "micrometer", + "spring boot", + "jdbc" + ], + "templating": { + "list": [ + { + "current": { + "text": "", + "value": "" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "", + "includeAll": false, + "label": "Application", + "name": "application", + "options": [], + "query": "label_values(application)", + "refresh": 2, + "regex": "", + "type": "query" + }, + { + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "", + "includeAll": true, + "label": "Region", + "name": "region", + "options": [], + "query": "label_values(jdbc_connections_min{application=\"$application\"}, region)", + "refresh": 1, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "current": { + "text": "129.192.80.24:9998", + "value": "129.192.80.24:9998" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "label_values(jdbc_connections_min{application=\"$application\", region=~\"$region\"}, instance)", + "includeAll": true, + "label": "Instance", + "name": "instance", + "options": [], + "query": "label_values(jdbc_connections_min{application=\"$application\", region=~\"$region\"}, instance)", + "refresh": 2, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "current": { + "text": "dataSource", + "value": "dataSource" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "label_values(jdbc_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\"}, name)", + "includeAll": false, + "label": "JDBC Connection Name", + "name": "jdbc_connection_name", + "options": [], + "query": "label_values(jdbc_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\"}, name)", + "refresh": 2, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "current": { + "text": "CpsDatabasePool", + "value": "CpsDatabasePool" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "label_values(hikaricp_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\"}, pool)", + "includeAll": false, + "label": "Hikari Pool Name", + "name": "hikaricp_pool_name", + "options": [], + "query": "label_values(hikaricp_connections_min{application=\"$application\", region=~\"$region\", instance=~\"$instance\"}, pool)", + "refresh": 2, + "regex": "", + "sort": 1, + "type": "query" + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Cps Database Pool", + "uid": "wdV6wx7iz", + "version": 1, + "weekStart": "" +}
\ No newline at end of file diff --git a/docker-compose/config/grafana/data-dashboard.json b/docker-compose/config/grafana/data-dashboard.json new file mode 100644 index 0000000000..4fbb65ee5b --- /dev/null +++ b/docker-compose/config/grafana/data-dashboard.json @@ -0,0 +1,276 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "NCMP Rest Interfaces", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 2, + "links": [], + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 14, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "disableTextWrap": false, + "editorMode": "builder", + "expr": "http_server_requests_seconds_count{instance=\"$Instance\", job=\"$Job\", status=~\"201|200|203|204\", uri=~\"/ncmp/v1/ch/id-searches|/ncmp/v1/ch/searches|/ncmp/v1/ch/{cm-handle}/data/ds/{datastore-name}|/ncmp/v1/data\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Inventory API calls by URI and STATUS", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PBFA97CFB590B2093" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 14 + }, + "id": 7, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "disableTextWrap": false, + "editorMode": "builder", + "expr": "sum({error=\"OutOfMemoryError\", instance=\"$Instance\", job=\"$Job\"})", + "fullMetaSearch": false, + "includeNullMetadata": true, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Out of Memory Errors", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5s", + "schemaVersion": 40, + "tags": [ + "ncmp", + "data", + "api" + ], + "templating": { + "list": [ + { + "current": { + "text": "129.192.80.24:9998", + "value": "129.192.80.24:9998" + }, + "definition": "label_values(instance)", + "label": "Instance", + "name": "Instance", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(instance)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "type": "query" + }, + { + "current": { + "text": "cps-and-ncmp-kpi", + "value": "cps-and-ncmp-kpi" + }, + "definition": "label_values(job)", + "label": "job", + "name": "Job", + "options": [], + "query": { + "qryType": 1, + "query": "label_values(job)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "", + "type": "query" + } + ] + }, + "time": { + "from": "2025-01-24T10:09:19.223Z", + "to": "2025-01-24T10:32:39.091Z" + }, + "timepicker": {}, + "timezone": "browser", + "title": "Data REST Interfaces", + "uid": "aeavdgvjyt2iob", + "version": 1, + "weekStart": "" +}
\ No newline at end of file diff --git a/docker-compose/config/grafana/lcm-state-dashboard.json b/docker-compose/config/grafana/inventory-dashboard.json index 5339b038d7..d4046b8b08 100644 --- a/docker-compose/config/grafana/lcm-state-dashboard.json +++ b/docker-compose/config/grafana/inventory-dashboard.json @@ -18,7 +18,7 @@ "editable": true, "fiscalYearStartMonth": 0, "graphTooltip": 0, - "id": 2, + "id": 3, "links": [], "panels": [ { @@ -105,7 +105,7 @@ { "disableTextWrap": false, "editorMode": "builder", - "expr": "cmHandlesByState{instance=\"$Instance\", job=\"$Job\"}", + "expr": "cps_ncmp_inventory_cm_handles_by_state{instance=\"$Instance\", job=\"$Job\"}", "fullMetaSearch": false, "includeNullMetadata": true, "legendFormat": "__auto", @@ -179,111 +179,11 @@ }, "gridPos": { "h": 8, - "w": 12, + "w": 24, "x": 0, "y": 9 }, - "id": 1, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "11.4.0", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "disableTextWrap": false, - "editorMode": "builder", - "expr": "cmHandlesByState{state=\"ADVISED\", instance=\"$Instance\", job=\"$Job\"}", - "fullMetaSearch": false, - "includeNullMetadata": true, - "legendFormat": "__auto", - "range": true, - "refId": "A", - "useBackend": false - } - ], - "title": "ADVISED", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 9 - }, - "id": 3, + "id": 6, "options": { "legend": { "calcs": [], @@ -301,7 +201,7 @@ { "disableTextWrap": false, "editorMode": "builder", - "expr": "cmHandlesByState{state=\"READY\", instance=\"$Instance\", job=\"$Job\"}", + "expr": "http_server_requests_seconds_count{instance=\"$Instance\", job=\"$Job\", status=~\"201|200\", uri=\"/ncmpInventory/v1/ch\"}", "fullMetaSearch": false, "includeNullMetadata": true, "legendFormat": "__auto", @@ -310,7 +210,7 @@ "useBackend": false } ], - "title": "READY", + "title": "Inventory API calls by URI and STATUS", "type": "timeseries" }, { @@ -375,11 +275,11 @@ }, "gridPos": { "h": 8, - "w": 12, + "w": 24, "x": 0, "y": 17 }, - "id": 2, + "id": 7, "options": { "legend": { "calcs": [], @@ -397,7 +297,7 @@ { "disableTextWrap": false, "editorMode": "builder", - "expr": "cmHandlesByState{state=\"LOCKED\", instance=\"$Instance\", job=\"$Job\"}", + "expr": "sum({error=\"OutOfMemoryError\", instance=\"$Instance\", job=\"$Job\"})", "fullMetaSearch": false, "includeNullMetadata": true, "legendFormat": "__auto", @@ -406,115 +306,23 @@ "useBackend": false } ], - "title": "LOCKED", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "barWidthFactor": 0.6, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 17 - }, - "id": 4, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "11.4.0", - "targets": [ - { - "disableTextWrap": false, - "editorMode": "builder", - "expr": "cmHandlesByState{state=\"DELETING\", instance=\"$Instance\", job=\"$Job\"}", - "fullMetaSearch": false, - "includeNullMetadata": true, - "legendFormat": "__auto", - "range": true, - "refId": "A", - "useBackend": false - } - ], - "title": "DELETING", + "title": "Out of Memory Errors", "type": "timeseries" } ], "preload": false, "schemaVersion": 40, - "tags": [], + "tags": [ + "lcm-state", + "inventory", + "ncmp" + ], "templating": { "list": [ { "current": { - "text": "172.17.0.1:8799", - "value": "172.17.0.1:8799" + "text": "129.192.80.24:9998", + "value": "129.192.80.24:9998" }, "definition": "label_values(instance)", "label": "Instance", @@ -531,8 +339,8 @@ }, { "current": { - "text": "cps-and-ncmp-endurance", - "value": "cps-and-ncmp-endurance" + "text": "cps-and-ncmp-kpi", + "value": "cps-and-ncmp-kpi" }, "definition": "label_values(job)", "label": "job", @@ -555,8 +363,8 @@ }, "timepicker": {}, "timezone": "browser", - "title": "LCM State", - "uid": "ae9zcowku03k0d", + "title": "Inventory REST Interfaces", + "uid": "beao8xrt6qjnkc", "version": 1, "weekStart": "" }
\ No newline at end of file diff --git a/docker-compose/config/grafana/jvm-micrometer-dashboard.json b/docker-compose/config/grafana/jvm-micrometer-dashboard.json index 8f7747c596..9f09d2e2cb 100644 --- a/docker-compose/config/grafana/jvm-micrometer-dashboard.json +++ b/docker-compose/config/grafana/jvm-micrometer-dashboard.json @@ -22,6 +22,7 @@ }, "enable": true, "expr": "resets(process_uptime_seconds{application=\"$application\", instance=\"$instance\"}[1m]) > 0", + "hide": false, "iconColor": "rgba(255, 96, 96, 1)", "name": "Restart Detection", "showIn": 0, @@ -35,17 +36,12 @@ "description": "Dashboard for Micrometer instrumented applications (Java, Spring Boot, Micronaut)", "editable": true, "fiscalYearStartMonth": 0, - "gnetId": 4701, "graphTooltip": 1, - "id": 1, + "id": 3, "links": [], "panels": [ { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -54,15 +50,6 @@ }, "id": 139, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "Quick Facts", "type": "row" }, @@ -73,6 +60,10 @@ }, "fieldConfig": { "defaults": { + "color": { + "fixedColor": "green", + "mode": "fixed" + }, "decimals": 1, "mappings": [ { @@ -127,7 +118,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.4", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -153,6 +144,10 @@ }, "fieldConfig": { "defaults": { + "color": { + "fixedColor": "green", + "mode": "fixed" + }, "mappings": [ { "options": { @@ -206,7 +201,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.4", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -292,7 +287,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.4", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -387,7 +382,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.1.4", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -407,10 +402,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -419,15 +410,6 @@ }, "id": 140, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "I/O Overview", "type": "row" }, @@ -448,6 +430,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -513,6 +496,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -546,6 +530,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -642,6 +627,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -675,6 +661,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -740,6 +727,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -787,6 +775,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -852,6 +841,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -931,10 +921,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -943,15 +929,6 @@ }, "id": 141, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "JVM Memory", "type": "row" }, @@ -972,6 +949,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1038,6 +1016,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1097,6 +1076,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1163,6 +1143,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1223,6 +1204,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1289,6 +1271,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1348,6 +1331,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1414,6 +1398,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1468,10 +1453,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -1480,15 +1461,6 @@ }, "id": 142, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "JVM Misc", "type": "row" }, @@ -1509,6 +1481,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1577,6 +1550,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1637,6 +1611,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1704,6 +1679,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1750,6 +1726,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1817,6 +1794,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1890,6 +1868,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2046,6 +2025,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2080,6 +2060,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2148,6 +2129,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2181,6 +2163,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2324,6 +2307,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2360,6 +2344,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2428,6 +2413,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2463,10 +2449,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -2476,15 +2458,6 @@ "id": 143, "panels": [], "repeat": "persistence_counts", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "JVM Memory Pools (Heap)", "type": "row" }, @@ -2536,8 +2509,7 @@ "mode": "absolute", "steps": [ { - "color": "green", - "value": null + "color": "green" }, { "color": "red", @@ -2551,7 +2523,7 @@ }, "gridPos": { "h": 7, - "w": 8, + "w": 24, "x": 0, "y": 36 }, @@ -2572,6 +2544,7 @@ } }, "repeat": "jvm_memory_pool_heap", + "repeatDirection": "h", "targets": [ { "datasource": { @@ -2624,10 +2597,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, @@ -2636,66 +2605,29 @@ }, "id": 144, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "JVM Memory Pools (Non-Heap)", "type": "row" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "editable": true, - "error": false, - "fill": 1, - "grid": { - "leftLogBase": 1, - "rightLogBase": 1 + "fieldConfig": { + "defaults": {}, + "overrides": [] }, "gridPos": { "h": 7, - "w": 8, + "w": 24, "x": 0, "y": 44 }, "id": 78, - "legend": { - "alignAsTable": false, - "avg": false, - "current": true, - "max": true, - "min": false, - "rightSide": false, - "show": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, "maxPerRow": 3, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", + "options": {}, "repeat": "jvm_memory_pool_nonheap", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "repeatDirection": "h", "targets": [ { "datasource": { @@ -2743,104 +2675,39 @@ "step": 1800 } ], - "thresholds": [], "title": "$jvm_memory_pool_nonheap", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "timeseries", - "x-axis": true, - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "mbytes", - "short" - ], - "yaxes": [ - { - "format": "bytes", - "logBase": 1, - "min": 0, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 58 + "y": 51 }, "id": 145, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "Garbage Collection", "type": "row" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "fill": 1, + "fieldConfig": { + "defaults": {}, + "overrides": [] + }, "gridPos": { "h": 7, "w": 8, "x": 0, - "y": 59 + "y": 52 }, "id": 98, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "options": {}, "targets": [ { "datasource": { @@ -2855,72 +2722,26 @@ "refId": "A" } ], - "thresholds": [], "title": "Collections", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "timeseries", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "ops", - "logBase": 1, - "min": "0", - "show": true - }, - { - "format": "short", - "label": "", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "fill": 1, + "fieldConfig": { + "defaults": {}, + "overrides": [] + }, "gridPos": { "h": 7, "w": 8, "x": 8, - "y": 59 + "y": 52 }, "id": 101, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "options": {}, "targets": [ { "datasource": { @@ -2949,72 +2770,26 @@ "refId": "B" } ], - "thresholds": [], "title": "Pause Durations", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "timeseries", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "s", - "logBase": 1, - "min": "0", - "show": true - }, - { - "format": "short", - "label": "", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "fill": 1, + "fieldConfig": { + "defaults": {}, + "overrides": [] + }, "gridPos": { "h": 7, "w": 8, "x": 16, - "y": 59 + "y": 52 }, "id": 99, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "options": {}, "targets": [ { "datasource": { @@ -3041,103 +2816,39 @@ "refId": "B" } ], - "thresholds": [], "title": "Allocated/Promoted", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "timeseries", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "Bps", - "logBase": 1, - "min": "0", - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 66 + "y": 59 }, "id": 146, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "Classloading", "type": "row" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "editable": true, - "error": false, - "fill": 1, - "grid": { - "leftLogBase": 1, - "rightLogBase": 1 + "fieldConfig": { + "defaults": {}, + "overrides": [] }, "gridPos": { "h": 7, "w": 12, "x": 0, - "y": 67 + "y": 60 }, "id": 37, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "options": {}, "targets": [ { "datasource": { @@ -3153,84 +2864,26 @@ "step": 1200 } ], - "thresholds": [], "title": "Classes loaded", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "timeseries", - "x-axis": true, - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "short", - "short" - ], - "yaxes": [ - { - "format": "short", - "logBase": 1, - "min": 0, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "editable": true, - "error": false, - "fill": 1, - "grid": { - "leftLogBase": 1, - "rightLogBase": 1 + "fieldConfig": { + "defaults": {}, + "overrides": [] }, "gridPos": { "h": 7, "w": 12, "x": 12, - "y": 67 + "y": 60 }, "id": 38, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "options": {}, "targets": [ { "datasource": { @@ -3248,115 +2901,42 @@ "step": 1200 } ], - "thresholds": [], "title": "Class delta", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "timeseries", - "x-axis": true, - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "ops", - "short" - ], - "yaxes": [ - { - "format": "short", - "label": "", - "logBase": 1, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ] + "type": "timeseries" }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 74 + "y": 67 }, "id": 147, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "PBFA97CFB590B2093" - }, - "refId": "A" - } - ], "title": "Buffer Pools", "type": "row" }, { - "aliasColors": {}, - "autoMigrateFrom": "graph", - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" }, - "fill": 1, + "fieldConfig": { + "defaults": {}, + "overrides": [] + }, "gridPos": { "h": 7, - "w": 8, + "w": 24, "x": 0, - "y": 75 + "y": 68 }, "id": 131, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, "maxPerRow": 3, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", + "options": {}, "repeat": "jvm_buffer_pool", - "seriesOverrides": [ - { - "alias": "count", - "yaxis": 2 - }, - { - "alias": "buffers", - "yaxis": 2 - } - ], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "repeatDirection": "h", "targets": [ { "datasource": { @@ -3393,46 +2973,23 @@ "refId": "C" } ], - "thresholds": [], "title": "$jvm_buffer_pool", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "timeseries", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "decbytes", - "logBase": 1, - "min": "0", - "show": true - }, - { - "decimals": 0, - "format": "short", - "label": "", - "logBase": 1, - "min": "0", - "show": true - } - ] + "type": "timeseries" } ], + "preload": false, "refresh": "auto", - "schemaVersion": 39, - "tags": [], + "schemaVersion": 40, + "tags": [ + "jvm", + "prometheus", + "micrometer" + ], "templating": { "list": [ { "current": { "isNone": true, - "selected": false, "text": "None", "value": "" }, @@ -3441,26 +2998,17 @@ "uid": "PBFA97CFB590B2093" }, "definition": "", - "hide": 0, "includeAll": false, "label": "Application", - "multi": false, "name": "application", "options": [], "query": "label_values(application)", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "docker-compose-cps-and-ncmp-1:8080", "value": "docker-compose-cps-and-ncmp-1:8080" }, @@ -3469,27 +3017,17 @@ "uid": "PBFA97CFB590B2093" }, "definition": "", - "hide": 0, "includeAll": false, "label": "Instance", - "multi": false, - "multiFormat": "glob", "name": "instance", "options": [], "query": "label_values(jvm_memory_used_bytes{application=\"$application\"}, instance)", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "All", "value": "$__all" }, @@ -3501,24 +3039,16 @@ "hide": 2, "includeAll": true, "label": "JVM Memory Pools Heap", - "multi": false, - "multiFormat": "glob", "name": "jvm_memory_pool_heap", "options": [], "query": "label_values(jvm_memory_used_bytes{application=\"$application\", instance=\"$instance\", area=\"heap\"},id)", "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 1, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "All", "value": "$__all" }, @@ -3530,24 +3060,16 @@ "hide": 2, "includeAll": true, "label": "JVM Memory Pools Non-Heap", - "multi": false, - "multiFormat": "glob", "name": "jvm_memory_pool_nonheap", "options": [], "query": "label_values(jvm_memory_used_bytes{application=\"$application\", instance=\"$instance\", area=\"nonheap\"},id)", "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 2, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "All", "value": "$__all" }, @@ -3559,19 +3081,13 @@ "hide": 2, "includeAll": true, "label": "JVM Buffer Pools", - "multi": false, - "multiFormat": "glob", "name": "jvm_buffer_pool", "options": [], "query": "label_values(jvm_buffer_memory_used_bytes{application=\"$application\", instance=\"$instance\"},id)", "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 1, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" } ] }, @@ -3579,35 +3095,10 @@ "from": "now-30m", "to": "now" }, - "timepicker": { - "now": true, - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, + "timepicker": {}, "timezone": "browser", "title": "JVM (Micrometer)", "uid": "bdvp1kgecrda8f", "version": 1, "weekStart": "" -} +}
\ No newline at end of file diff --git a/docker-compose/config/grafana/postgresql-statistics-dashboard.json b/docker-compose/config/grafana/postgresql-statistics-dashboard.json new file mode 100644 index 0000000000..afc428f8cf --- /dev/null +++ b/docker-compose/config/grafana/postgresql-statistics-dashboard.json @@ -0,0 +1,2239 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "description": "Dashboard for PostgreSQL Statistics.", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 5, + "links": [], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 34, + "panels": [], + "title": "Settings", + "type": "row" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 0, + "y": 1 + }, + "id": 2, + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "name", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_static{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{short_version}}", + "refId": "A" + } + ], + "title": "Version", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 3, + "y": 1 + }, + "id": 54, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_max_connections{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Max Connections", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 6, + "y": 1 + }, + "id": 56, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_shared_buffers_bytes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Shared Buffers", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 10, + "y": 1 + }, + "id": 58, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_effective_cache_size_bytes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Effective Cache", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 14, + "y": 1 + }, + "id": 60, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_maintenance_work_mem_bytes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Maintenance Work Mem", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 18, + "y": 1 + }, + "id": 66, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_work_mem_bytes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Work Mem", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 1, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 21, + "y": 1 + }, + "id": 32, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_max_wal_size_bytes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Max WAL Size", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 0, + "y": 4 + }, + "id": 62, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_random_page_cost{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Random Page Cost", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 3, + "x": 3, + "y": 4 + }, + "id": 70, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_seq_page_cost{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Seq Page Cost", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 6, + "y": 4 + }, + "id": 64, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_max_worker_processes{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Max Worker Processes", + "type": "stat" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 10, + "y": 4 + }, + "id": 68, + "maxDataPoints": 100, + "options": { + "colorMode": "none", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_settings_max_parallel_workers{instance=\"$instance\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "", + "refId": "A" + } + ], + "title": "Max Parallel Workers", + "type": "stat" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 7 + }, + "id": 72, + "panels": [], + "title": "Database", + "type": "row" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 72, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 8 + }, + "id": 74, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "last" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "editorMode": "code", + "expr": "pg_database_size_bytes{instance=\"$instance\", datname=\"$db\"}", + "legendFormat": "size", + "range": true, + "refId": "A" + } + ], + "title": "Size", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 16 + }, + "id": 36, + "panels": [], + "title": "Connection / Transaction Statistics", + "type": "row" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 17 + }, + "id": 6, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_stat_activity_count{instance=\"$instance\", datname=\"$db\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{state}}", + "refId": "A" + } + ], + "title": "Connections", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 17 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_xact_commit{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "commits", + "refId": "A" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_xact_rollback{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "rollbacks", + "refId": "B" + } + ], + "title": "Transactions", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Rows", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 27 + }, + "id": 18, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_tup_fetched{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "SELECT (index scan)", + "refId": "A" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_tup_returned{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "SELECT (table scan)", + "refId": "B" + } + ], + "title": "Read Stats", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Rows", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 27 + }, + "id": 20, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_tup_inserted{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "INSERT", + "refId": "A" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_tup_updated{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "UPDATE", + "refId": "B" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_tup_deleted{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "DELETE", + "refId": "C" + } + ], + "title": "Change Stats", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 35 + }, + "id": 42, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_stat_activity_max_tx_duration{instance=\"$instance\", datname=\"$db\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "max_tx_duration [{{state}}]", + "refId": "A" + } + ], + "title": "Longest Transaction", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 4, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 35 + }, + "id": 44, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_stat_database_blks_hit{instance=\"$instance\", datname=\"$db\"} / (pg_stat_database_blks_read{instance=\"$instance\", datname=\"$db\"} + pg_stat_database_blks_hit{instance=\"$instance\", datname=\"$db\"})", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "Cache Hit Rate", + "refId": "A" + } + ], + "title": "Cache Hit Rate", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 43 + }, + "id": 50, + "panels": [], + "title": "misc", + "type": "row" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 17, + "x": 0, + "y": 44 + }, + "id": 46, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_buffers_backend_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "buffers_backend", + "refId": "A" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_buffers_alloc_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "buffers_alloc", + "refId": "B" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_buffers_backend_fsync_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "backend_fsync", + "refId": "C" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_buffers_checkpoint_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "buffers_checkpoint", + "refId": "D" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_buffers_clean_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "buffers_clean", + "refId": "E" + } + ], + "title": "Buffers (bgwriter)", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 7, + "x": 17, + "y": 44 + }, + "id": 28, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_conflicts{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "conflicts", + "refId": "B" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_deadlocks{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "deadlocks", + "refId": "A" + } + ], + "title": "Conflicts/Deadlocks", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 17, + "x": 0, + "y": 50 + }, + "id": 30, + "options": { + "legend": { + "calcs": [ + "min", + "max", + "mean", + "lastNotNull" + ], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "pg_locks_count{instance=\"$instance\", datname=\"$db\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{mode}}", + "refId": "A" + } + ], + "title": "Lock Tables", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "description": "Total amount of data written to temporary files by queries in this database. All temporary files are counted, regardless of why the temporary file was created, and regardless of the log_temp_files setting.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 7, + "x": 17, + "y": 50 + }, + "id": 40, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_database_temp_bytes{instance=\"$instance\", datname=\"$db\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "Temp Bytes", + "refId": "A" + } + ], + "title": "Temp File", + "type": "timeseries" + }, + { + "datasource": "PBFA97CFB590B2093", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "ms" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 57 + }, + "id": 38, + "options": { + "legend": { + "calcs": [ + "mean", + "lastNotNull", + "max", + "min" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_checkpoint_write_time_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "write_time - Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk.", + "refId": "B" + }, + { + "datasource": "PBFA97CFB590B2093", + "exemplar": true, + "expr": "irate(pg_stat_bgwriter_checkpoint_sync_time_total{instance=\"$instance\"}[5m])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "sync_time - Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk.", + "refId": "A" + } + ], + "title": "Checkpoint Stats", + "type": "timeseries" + } + ], + "preload": false, + "refresh": "5s", + "schemaVersion": 40, + "tags": [ + "postgresql", + "postgres-exporter" + ], + "templating": { + "list": [ + { + "current": { + "text": "", + "value": "" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "label_values(pg_up, instance)", + "includeAll": false, + "label": "instance", + "name": "instance", + "options": [], + "query": { + "query": "label_values(pg_up, instance)", + "refId": "StandardVariableQuery" + }, + "refresh": 1, + "regex": "", + "type": "query" + }, + { + "current": { + "text": "cpsdb", + "value": "cpsdb" + }, + "datasource": "PBFA97CFB590B2093", + "definition": "label_values(pg_stat_database_tup_fetched{datname!~\"template.*|postgres\",instance=\"$instance\"},datname)", + "includeAll": false, + "label": "Database", + "name": "db", + "options": [], + "query": { + "query": "label_values(pg_stat_database_tup_fetched{datname!~\"template.*|postgres\",instance=\"$instance\"},datname)", + "refId": "StandardVariableQuery" + }, + "refresh": 1, + "regex": "", + "type": "query" + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "PostgreSQL Statistics", + "uid": "OpKZVIAMz", + "version": 1, + "weekStart": "" +}
\ No newline at end of file diff --git a/docker-compose/config/nginx/nginx.conf b/docker-compose/config/nginx/nginx.conf index 6e9d102fed..435b860821 100644 --- a/docker-compose/config/nginx/nginx.conf +++ b/docker-compose/config/nginx/nginx.conf @@ -24,6 +24,9 @@ http { server cps-and-ncmp:8080; } + # Set the max allowed size of the incoming request + client_max_body_size 2m; + server { listen 80; diff --git a/csit/plans/cps/pnfsim/netconf-config/LICENSE b/docker-compose/config/pnfsim/netconf-config/LICENSE index 3bc5b026c6..3bc5b026c6 100755 --- a/csit/plans/cps/pnfsim/netconf-config/LICENSE +++ b/docker-compose/config/pnfsim/netconf-config/LICENSE diff --git a/csit/plans/cps/pnfsim/netconf-config/stores.yang b/docker-compose/config/pnfsim/netconf-config/stores.yang index 56ad95c8d5..56ad95c8d5 100644 --- a/csit/plans/cps/pnfsim/netconf-config/stores.yang +++ b/docker-compose/config/pnfsim/netconf-config/stores.yang diff --git a/csit/plans/cps/pnfsim/netconf-config/subscriber.py b/docker-compose/config/pnfsim/netconf-config/subscriber.py index 5147c93458..5147c93458 100755 --- a/csit/plans/cps/pnfsim/netconf-config/subscriber.py +++ b/docker-compose/config/pnfsim/netconf-config/subscriber.py diff --git a/csit/plans/cps/pnfsim/tls/ca.pem b/docker-compose/config/pnfsim/tls/ca.pem index 4c4473815c..4c4473815c 100644 --- a/csit/plans/cps/pnfsim/tls/ca.pem +++ b/docker-compose/config/pnfsim/tls/ca.pem diff --git a/csit/plans/cps/pnfsim/tls/server_cert.pem b/docker-compose/config/pnfsim/tls/server_cert.pem index a022dc56ca..a022dc56ca 100644 --- a/csit/plans/cps/pnfsim/tls/server_cert.pem +++ b/docker-compose/config/pnfsim/tls/server_cert.pem diff --git a/csit/plans/cps/pnfsim/tls/server_key.pem b/docker-compose/config/pnfsim/tls/server_key.pem index 02fd68846d..02fd68846d 100644 --- a/csit/plans/cps/pnfsim/tls/server_key.pem +++ b/docker-compose/config/pnfsim/tls/server_key.pem diff --git a/docker-compose/config/prometheus.yml b/docker-compose/config/prometheus.yml index 8db07c8d05..1beea6ebce 100644 --- a/docker-compose/config/prometheus.yml +++ b/docker-compose/config/prometheus.yml @@ -20,4 +20,10 @@ scrape_configs: static_configs: - targets: - '172.17.0.1:8798' - - '172.17.0.1:8799'
\ No newline at end of file + - '172.17.0.1:8799' + +- job_name: 'postgres' + static_configs: + - targets: + - '172.17.0.1:9187' + - '172.17.0.1:9188'
\ No newline at end of file diff --git a/csit/plans/cps/sdnc/certs/certs.properties b/docker-compose/config/sdnc/certs/certs.properties index f8f3fa72b6..f8f3fa72b6 100644 --- a/csit/plans/cps/sdnc/certs/certs.properties +++ b/docker-compose/config/sdnc/certs/certs.properties diff --git a/csit/plans/cps/sdnc/certs/keys0.zip b/docker-compose/config/sdnc/certs/keys0.zip Binary files differindex b2dec5c7b2..b2dec5c7b2 100644 --- a/csit/plans/cps/sdnc/certs/keys0.zip +++ b/docker-compose/config/sdnc/certs/keys0.zip diff --git a/docker-compose/config/sdnc/check_sdnc_mount_node.sh b/docker-compose/config/sdnc/check_sdnc_mount_node.sh new file mode 100644 index 0000000000..8fa4bee8cf --- /dev/null +++ b/docker-compose/config/sdnc/check_sdnc_mount_node.sh @@ -0,0 +1,61 @@ +#!/bin/sh +# ============LICENSE_START======================================================= +# Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +set -x # Enable command echoing +apk --no-cache add curl + +SDNC_HOST=${SDNC_HOST:-'sdnc'} +SDNC_PORT=${SDNC_PORT:-8181} +SDNC_AUTH_HEADER=${SDNC_AUTH_HEADER:-'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=='} +PNF_SIM_HOST=${PNF_SIM_HOST:-'pnf-simulator'} +PNF_SIM_PORT=${PNF_SIM_PORT:-6513} +NODE_ID=${NODE_ID:-'ietfYang-PNFDemo'} + +echo "Attempting to mount node with id '$NODE_ID' to SDNC using RestConf" +curl --request PUT "http://$SDNC_HOST:$SDNC_PORT/restconf/config/network-topology:network-topology/topology/topology-netconf/node/$NODE_ID" \ +--silent --location \ +--header "$SDNC_AUTH_HEADER" \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "node": [ + { + "node-id": "'$NODE_ID'", + "netconf-node-topology:protocol": { + "name": "TLS" + }, + "netconf-node-topology:host": "'$PNF_SIM_HOST'", + "netconf-node-topology:key-based": { + "username": "netconf", + "key-id": "ODL_private_key_0" + }, + "netconf-node-topology:port": '$PNF_SIM_PORT', + "netconf-node-topology:tcp-only": false, + "netconf-node-topology:max-connection-attempts": 5 + } + ] +}' + +# Verify node has been mounted +RESPONSE=$(curl --silent --location --request GET "http://$SDNC_HOST:$SDNC_PORT/restconf/config/network-topology:network-topology/topology/topology-netconf" --header "$SDNC_AUTH_HEADER") + +if echo "$RESPONSE" | grep -q "$NODE_ID"; then + echo "Node mounted successfully" + exit 0 +else + echo "Could not mount node to SNDC" + exit 1 +fi diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index c9df8b9889..126b9590fc 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -1,7 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (c) 2020 Pantheon.tech. # Modifications Copyright (C) 2021 Bell Canada. -# Modifications Copyright (C) 2022-2025 Nordix Foundation. +# Modifications Copyright (C) 2022-2025 OpenInfra Foundation Europe. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,13 +18,13 @@ services: - ### docker-compose --profile dmi-service up -d -> run CPS services incl. dmi-plugin ### + ### docker-compose --profile dmi-service up -d --wait -> run CPS services incl. dmi-plugin ### docker-compose --profile dmi-stub --profile monitoring up -d -> run CPS with stubbed dmi-plugin (for registration performance testing) ### docker-compose --profile dmi-stub --profile tracing up -d -> run CPS with stubbed dmi-plugin (for open telemetry tracing testing make ONAP_TRACING_ENABLED "true" later "http://localhost:16686" can be accessed from browser) ### docker-compose --profile dmi-stub --profile policy-executor-stub up -d -> run CPS with stubbed dmi-plugin and policy executor stub (for policy executor service testing make POLICY_SERVICE_ENABLED "true") - ### to disable notifications make notification.enabled to false & comment out kafka/zookeeper services ### + ### to disable notifications make notification.enabled to false & comment out kafka/zookeeper services ### DEBUG: Look for '### DEBUG' comments to enable CPS-NCMP debugging - ### docker-compose --profile dmi-stub --project-name endurance --env-file config/endurance.env up -d -> run CPS with stubbed dmi-plugin for endurance testing + ### docker-compose --profile dmi-stub --project-name endurance --env-file env/endurance.env up -d -> run CPS with stubbed dmi-plugin for endurance testing ### docker-compose --profile dmi-stub --project-name endurance down --volumes dbpostgresql: @@ -46,6 +46,13 @@ services: limits: cpus: '6' memory: 3G + healthcheck: + test: pg_isready || exit 1 # This command runs inside the container, returning 0 for success, non-zero for failure. + timeout: 10s # Time-out of the above test command. + interval: 10s # How often the health is run. + retries: 3 # If 3 health checks fail, the container is unhealthy. + start_period: 30s # Ignore failed health checks for first 30 seconds, to give system time to start + # Full start up time allowed = 30 seconds start period + 3 tries * 10 seconds interval = 60 seconds cps-and-ncmp: image: ${DOCKER_REPO:-nexus3.onap.org:10003}/onap/cps-and-ncmp:${CPS_VERSION:-latest} @@ -54,8 +61,6 @@ services: ### DEBUG: Uncomment next line to enable java debugging (ensure 'ports' aligns with 'deploy') ### - ${CPS_CORE_DEBUG_PORT:-5005}:5005- environment: - CPS_USERNAME: ${CPS_CORE_USERNAME:-cpsuser} - CPS_PASSWORD: ${CPS_CORE_PASSWORD:-cpsr0cks!} DB_HOST: ${DB_HOST:-dbpostgresql} DB_USERNAME: ${DB_USERNAME:-cps} DB_PASSWORD: ${DB_PASSWORD:-cps} @@ -68,10 +73,11 @@ services: ONAP_OTEL_EXPORTER_ENDPOINT: http://jaeger-service:4317 POLICY_SERVICE_ENABLED: 'false' POLICY_SERVICE_DEFAULT_DECISION: 'deny from env' - JAVA_TOOL_OPTIONS: "-XX:InitialRAMPercentage=75.0 -XX:MaxRAMPercentage=75.0" + CPS_MONITORING_MICROMETER_JVM_EXTRAS: 'true' + JAVA_TOOL_OPTIONS: "-XX:InitialRAMPercentage=70.0 -XX:MaxRAMPercentage=70.0" ### DEBUG: Uncomment next line to enable java debugging - ### JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 - restart: unless-stopped + # JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 + restart: on-failure:3 depends_on: - dbpostgresql deploy: @@ -80,7 +86,14 @@ services: resources: limits: cpus: '3' - memory: 2G + memory: 3G + memswap_limit: 3G + healthcheck: + test: wget -q -O - http://localhost:8080/actuator/health/readiness | grep -q '{"status":"UP"}' || exit 1 + interval: 10s + timeout: 10s + retries: 10 + start_period: 60s nginx: container_name: ${NGINX_CONTAINER_NAME:-nginx-loadbalancer} @@ -92,18 +105,30 @@ services: volumes: - ./config/nginx/nginx.conf:/etc/nginx/nginx.conf - ./config/nginx/proxy_params:/etc/nginx/proxy_params + healthcheck: + test: curl -fs http://localhost/actuator/health/readiness || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 60s ### if kafka is not required comment out zookeeper and kafka ### zookeeper: - image: confluentinc/cp-zookeeper:6.2.1 + image: confluentinc/cp-zookeeper:7.8.0 container_name: ${ZOOKEEPER_CONTAINER_NAME:-zookeeper} ports: - ${ZOOKEEPER_PORT:-2181}:2181 environment: ZOOKEEPER_CLIENT_PORT: 2181 + healthcheck: + test: nc -z localhost 2181 || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s kafka: - image: confluentinc/cp-kafka:6.2.1 + image: confluentinc/cp-kafka:7.8.0 container_name: ${KAFKA_CONTAINER_NAME:-kafka} ports: - ${KAFKA_PORT:-9092}:9092 @@ -115,17 +140,26 @@ services: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONNECTIONS_FROM_HOST:PLAINTEXT KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,CONNECTIONS_FROM_HOST://localhost:9092 KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + healthcheck: + test: kafka-topics --bootstrap-server kafka:29092 --list || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s ncmp-dmi-plugin: container_name: ${NCMP_DMI_PLUGIN_CONTAINER_NAME:-ncmp-dmi-plugin} - image: ${DOCKER_REPO:-nexus3.onap.org:10003}/onap/ncmp-dmi-plugin:${DMI_VERSION:-1.6.0-SNAPSHOT-latest} + image: ${DOCKER_REPO:-nexus3.onap.org:10003}/onap/ncmp-dmi-plugin:${DMI_VERSION:-1.7.0-SNAPSHOT-latest} ports: - ${DMI_PORT:-8783}:8080 + depends_on: + - sdnc + - pnf-simulator environment: CPS_USERNAME: ${CPS_CORE_USERNAME:-cpsuser} CPS_PASSWORD: ${CPS_CORE_PASSWORD:-cpsr0cks!} - CPS_CORE_HOST: ${CPS_CORE_HOST:-cps-and-ncmp} - CPS_CORE_PORT: ${CPS_CORE_PORT:-8080} + CPS_CORE_HOST: ${CPS_CORE_HOST:-nginx} + CPS_CORE_PORT: ${CPS_CORE_PORT:-80} CPS_CORE_USERNAME: ${CPS_CORE_USERNAME:-cpsuser} CPS_CORE_PASSWORD: ${CPS_CORE_PASSWORD:-cpsr0cks!} SDNC_HOST: ${SDNC_HOST:-sdnc} @@ -140,6 +174,12 @@ services: restart: unless-stopped profiles: - dmi-service + healthcheck: + test: wget -q -O - http://localhost:8080/actuator/health/readiness | grep -q '{"status":"UP"}' || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s ncmp-dmi-plugin-demo-and-csit-stub: container_name: ${NCMP_DMI_PLUGIN_DEMO_AND_CSIT_STUB_CONTAINER_NAME:-ncmp-dmi-plugin-demo-and-csit-stub} @@ -150,7 +190,7 @@ services: KAFKA_BOOTSTRAP_SERVER: kafka:29092 NCMP_CONSUMER_GROUP_ID: ncmp-group NCMP_ASYNC_M2M_TOPIC: ncmp-async-m2m - MODULE_INITIAL_PROCESSING_DELAY_MS: 120000 + MODULE_INITIAL_PROCESSING_DELAY_MS: 180000 MODULE_REFERENCES_DELAY_MS: 100 MODULE_RESOURCES_DELAY_MS: 1000 READ_DATA_FOR_CM_HANDLE_DELAY_MS: 300 @@ -158,7 +198,104 @@ services: restart: unless-stopped profiles: - dmi-stub + healthcheck: + test: wget -q -O - http://localhost:8092/actuator/health/readiness | grep -q '{"status":"UP"}' || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s + + sdnc: + container_name: sdnc + image: onap/sdnc-image:${SDNC_VERSION:-2.2.3} + entrypoint: /opt/onap/sdnc/bin/startODL.sh + ports: + - 8181:8181 + depends_on: + sdnc-db: + condition: service_healthy + hostname: sdnc + links: + - sdnc-db:dbhost + - sdnc-db:sdnctldb01 + - sdnc-db:sdnctldb02 + environment: + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} + - MYSQL_USER=${MYSQL_USER:-sdnc} + - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} + - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} + - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties + - SDNC_BIN=/opt/onap/sdnc/bin + - ODL_CERT_DIR=/opt/opendaylight/certs + - ODL_ADMIN_USERNAME=${SDNC_USERNAME:-admin} + - ODL_ADMIN_PASSWORD=${SDNC_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} + - SDNC_DB_INIT=true + - SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} + volumes: + - ./config/sdnc/certs/certs.properties:/opt/opendaylight/certs/certs.properties + - ./config/sdnc/certs/keys0.zip:/opt/opendaylight/certs/keys0.zip + profiles: - dmi-service + healthcheck: + test: "wget -q -O - --header 'Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==' http://localhost:8181/restconf/operational/network-topology:network-topology || exit 1" + interval: 10s + timeout: 10s + retries: 6 + start_period: 60s + + sdnc-sidecar: # This container runs a script to mount the PNFDemo node to SDNC, needed for CSITs. + container_name: sdnc-sidecar + image: alpine:latest + volumes: + - ./config/sdnc/check_sdnc_mount_node.sh:/root/check_sdnc_mount_node.sh + command: sh /root/check_sdnc_mount_node.sh + depends_on: + sdnc: + condition: service_healthy + pnf-simulator: + condition: service_healthy + profiles: + - dmi-service + # Note: This container does not need a health-check as it immediately exits with status 0 or 1. + + sdnc-db: + container_name: sdnc-db + image: mariadb:10.5 + ports: + - 3306:3306 + environment: + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-password} + - MYSQL_ROOT_HOST=% + - MYSQL_USER=${MYSQL_USER:-sdnc} + - MYSQL_PASSWORD=${MYSQL_PASSWORD:-password} + - MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} + profiles: + - dmi-service + healthcheck: + test: healthcheck.sh --connect --innodb_initialized || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s + + pnf-simulator: + container_name: pnf-simulator + image: blueonap/netconf-pnp-simulator:v2.8.6 + restart: always + ports: + - 830:830 + - 6513:6513 + volumes: + - ./config/pnfsim/netconf-config:/config/modules/stores + - ./config/pnfsim/tls:/config/tls + profiles: + - dmi-service + healthcheck: + test: nc -z 127.0.0.1 6513 || exit 1 + interval: 10s + timeout: 10s + retries: 3 + start_period: 30s policy-executor-stub: container_name: ${POLICY_EXECUTOR_STUB_CONTAINER_NAME:-policy-executor-stub} @@ -168,6 +305,7 @@ services: restart: unless-stopped profiles: - policy-executor-stub + # Note policy-executor-stub does not have a healthcheck as it does not expose /actuator/health endpoint prometheus: container_name: ${PROMETHEUS_CONTAINER_NAME:-prometheus} @@ -180,11 +318,6 @@ services: - prometheus_data:/prometheus environment: - PROMETHEUS_RETENTION_TIME=${PROMETHEUS_RETENTION_TIME:-30d} - healthcheck: - test: [ "CMD-SHELL", "wget --spider --quiet --tries=1 --timeout=10 http://localhost:9090/-/healthy || exit 1" ] - interval: 30s - timeout: 10s - retries: 3 profiles: - monitoring @@ -192,14 +325,16 @@ services: image: grafana/grafana:latest container_name: ${GRAFANA_CONTAINER_NAME:-grafana} depends_on: - prometheus: - condition: service_started + - prometheus ports: - ${GRAFANA_PORT:-3000}:3000 volumes: - ./config/grafana/provisioning/:/etc/grafana/provisioning/ - ./config/grafana/jvm-micrometer-dashboard.json:/var/lib/grafana/dashboards/jvm-micrometer-dashboard.json - - ./config/grafana/lcm-state-dashboard.json:/var/lib/grafana/dashboards/lcm-state-dashboard.json + - ./config/grafana/inventory-dashboard.json:/var/lib/grafana/dashboards/inventory-dashboard.json + - ./config/grafana/data-dashboard.json:/var/lib/grafana/dashboards/data-dashboard.json + - ./config/grafana/postgresql-statistics-dashboard.json:/var/lib/grafana/dashboards/postgresql-statistics-dashboard.json + - ./config/grafana/cps-database-pool.json:/var/lib/grafana/dashboards/cps-database-pool.json - grafana:/var/lib/grafana environment: - GF_SECURITY_ADMIN_PASSWORD=admin @@ -228,6 +363,16 @@ services: profiles: - tracing + postgres-exporter: + container_name: ${POSTGRES_EXPORTER_CONTAINER_NAME:-postgres-exporter} + image: quay.io/prometheuscommunity/postgres-exporter + environment: + - DATA_SOURCE_NAME=postgresql://${DB_USERNAME:-cps}:${DB_PASSWORD:-cps}@${DB_CONTAINER_NAME:-dbpostgresql}:5432/postgres?sslmode=disable + ports: + - ${POSTGRES_EXPORTER_PORT:-9187}:9187 + depends_on: + - dbpostgresql + volumes: grafana: driver: local diff --git a/docker-compose/config/endurance.env b/docker-compose/env/endurance.env index be337219cf..907c63a6ae 100644 --- a/docker-compose/config/endurance.env +++ b/docker-compose/env/endurance.env @@ -1,6 +1,9 @@ DB_CONTAINER_NAME=endurance-dbpostgresql DB_PORT=5433 +POSTGRES_EXPORTER_CONTAINER_NAME=endurance-postgres-exporter +POSTGRES_EXPORTER_PORT=9188 + NGINX_CONTAINER_NAME=endurance-nginx-loadbalancer CPS_CORE_PORT=8884 CPS_PORT_RANGE=8798-8799 @@ -33,4 +36,4 @@ JAEGER_SERVICE_CONTAINER_NAME=endurance-jaeger-service JAEGER_SERVICE_PORT=16687 CPS_NCMP_CACHES_CLUSTER_NAME=endurance-cps-and-ncmp-common-cache-cluster -CPS_NCMP_INSTANCE_CONFIG_NAME=endurance-cps-and-ncmp-hazelcast-instance-config
\ No newline at end of file +CPS_NCMP_INSTANCE_CONFIG_NAME=endurance-cps-and-ncmp-hazelcast-instance-config diff --git a/docker-compose/env/kpi.env b/docker-compose/env/kpi.env new file mode 100644 index 0000000000..0fd8ef2ef8 --- /dev/null +++ b/docker-compose/env/kpi.env @@ -0,0 +1,39 @@ +DB_CONTAINER_NAME=kpi-dbpostgresql +DB_PORT=5432 + +POSTGRES_EXPORTER_CONTAINER_NAME=kpi-postgres-exporter +POSTGRES_EXPORTER_PORT=9187 + +NGINX_CONTAINER_NAME=kpi-nginx-loadbalancer +CPS_CORE_PORT=8883 +CPS_PORT_RANGE=8698-8699 + +ZOOKEEPER_CONTAINER_NAME=kpi-zookeeper +ZOOKEEPER_PORT=2181 + +KAFKA_CONTAINER_NAME=kpi-kafka +KAFKA_PORT=9092 + +NCMP_DMI_PLUGIN_CONTAINER_NAME=kpi-ncmp-dmi-plugin +DMI_PORT=8783 + +NCMP_DMI_PLUGIN_DEMO_AND_CSIT_STUB_CONTAINER_NAME=kpi-ncmp-dmi-plugin-demo-and-csit-stub +DMI_DEMO_STUB_PORT=8784 + +POLICY_EXECUTOR_STUB_CONTAINER_NAME=kpi-policy-executor-stub +POLICY_EXECUTOR_STUB_PORT=8785 + +PROMETHEUS_CONTAINER_NAME=kpi-prometheus +PROMETHEUS_PORT=9090 + +GRAFANA_CONTAINER_NAME=kpi-grafana +GRAFANA_PORT=3000 + +KAFKA_UI_CONTAINER_NAME=kpi-kafka-ui +KAFKA_UI_PORT=8089 + +JAEGER_SERVICE_CONTAINER_NAME=kpi-jaeger-service +JAEGER_SERVICE_PORT=16686 + +CPS_NCMP_CACHES_CLUSTER_NAME=kpi-cps-and-ncmp-common-cache-cluster +CPS_NCMP_INSTANCE_CONFIG_NAME=kpi-cps-and-ncmp-hazelcast-instance-config diff --git a/docs/ScrapeMetrics.py b/docs/ScrapeMetrics.py new file mode 100644 index 0000000000..9995178746 --- /dev/null +++ b/docs/ScrapeMetrics.py @@ -0,0 +1,123 @@ +# ============LICENSE_START======================================================= +# Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= + +import os +import re + +def find_java_files(root_dir): + """ + Recursively finds all .java files within the given root directory. + + Args: + root_dir (str): The root directory to search. + + Returns: + list: A list of absolute paths to .java files. + """ + java_files = [] + for root, _, files in os.walk(root_dir): + for file in files: + if file.endswith(".java"): + java_files.append(os.path.join(root, file)) + return java_files + +def scrape_metrics(file_content): + """ + Matches @CountCmHandleSearchExecution, @Timed, and @TimedCustom and + Extracts name (confusingly labeled as 'value' in @Timed) and description from the given Java file content. + The regex will also handle the new line if the annotation would not fit in a single line. + + Args: + file_content (str): The content of a Java file. + + Returns: + list: A list of formatted metric strings. + """ + pattern_regex = re.compile(r'@(CountCmHandleSearchExecution|Timed|TimedCustom)\((?:name\s*=\s*"(.*?)",?|value\s*=\s*"(.*?)",?)?.*?description\s*=\s*"(.*?)"', re.DOTALL) + all_metrics = [] + matches = pattern_regex.findall(file_content) + for match in matches: + count_metric = match[0] + if count_metric == "CountCmHandleSearchExecution": + name = "cm_handle_search_invocation_total" + else: + name = match[1] + value = match[2] + description = match[3] + all_metrics.append(f'"{name or value}","{description}"') + return all_metrics + +def scrape_all_metrics_from_file(file_path): + """ + Scrapes all defined metrics from a single Java file. + + Args: + file_path (str): The path to the Java file. + + Returns: + list: A list of all extracted metric strings from the file. + """ + all_metrics = [] + with open(file_path, 'r') as f: + java_class_content = f.read() + all_metrics.extend(scrape_metrics(java_class_content)) + return all_metrics + +def write_metrics_to_file(metrics_data, output_file): + """ + Writes the extracted metrics data to the specified output file. + + Args: + metrics_data (list): A list of metric strings to write. + output_file (str): The path to the output file. + """ + if metrics_data: + os.makedirs(os.path.dirname(output_file), exist_ok=True) + with open(output_file, 'w') as outfile: + outfile.write('"Metric Name","Description"\n') + for metric in metrics_data: + outfile.write(metric + '\n') + print(f"{len(metrics_data)} scraped metrics written to: {output_file}") + +def search_metrics_and_scrape(root_dir, output_file): + """ + Orchestrates the search and scraping of metrics from Java files. + + Args: + root_dir (str): The root directory to search for .java files. + output_file (str): The text file to store the metrics. + """ + java_files = find_java_files(root_dir) + all_scraped_metrics = [] + for java_file in java_files: + metrics = scrape_all_metrics_from_file(java_file) + all_scraped_metrics.extend(metrics) + write_metrics_to_file(all_scraped_metrics, output_file) + +if __name__ == "__main__": + # Get the absolute path of the current directory. + current_directory = os.path.dirname(os.path.abspath(__file__)) + + # Get the absolute path of the cps root directory. + cps_root_directory = os.path.abspath(os.path.join(current_directory, "..")) + + # Define the location for the output file, and ensure its directory exists. + output_file = os.path.join(current_directory, "csv", "metrics.csv") + + # Search and scrape the metrics. + search_metrics_and_scrape(cps_root_directory, output_file)
\ No newline at end of file diff --git a/docs/admin-guide.rst b/docs/admin-guide.rst index 4a40f9b29c..8f0634a525 100644 --- a/docs/admin-guide.rst +++ b/docs/admin-guide.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2021-2022 Nordix Foundation +.. Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _adminGuide: @@ -12,6 +12,40 @@ CPS Admin Guide .. toctree:: :maxdepth: 1 +Regular Maintenance +=================== +This section details tasks that an administrator of the CPS application should execute on regular basis +to ensure optimum working of CPS. + +Dataspace Clean Up +------------------ +Certain data in the CPS database might not be explicitly removed after it is no longer required ('orphaned data'). +For example, schema sets and their associated unique module resources no longer used by any anchor because of model upgrades. +This data would unnecessarily take up space and could eventually affect the performance of the DB if it is not deleted. +How often this needs to be done depends on how often schema sets are being deprecated. +Typically once per month should suffice. + +To remove orphaned data in a given dataspace use the following post request: + +.. code:: + + http://<cps-component-service-name>:<cps-port>/v2/admin/dataspaces/<dataspace-name>/actions/clean + +for example + +.. code-block:: bash + + curl --location --request POST 'http://cps:8080/admin/datsaspaces/bookstore/actions/clean' \ + --header 'Content-Type: application/json; charset=utf-8' + + Response : HTTP Status 204 + +For more details refer to the CPS-Core API: :doc:`design`. + +.. note:: + NCMP has no specific maintenance tasks but it will also build up orphaned data when CM Handles get updated and or deleted. + To delete this data execute the above procedure for the dataspace named 'NFP-Operational'. + Logging Configuration ===================== @@ -186,23 +220,28 @@ This also includes both the liveliness state and readiness state. Metrics ------- +Below table lists all CPS-NCMP custom metrics + +.. csv-table:: + :file: csv/metrics.csv + :widths: 50, 50 + :header-rows: 1 + Prometheus Metrics can be checked at the following endpoint .. code:: http://<cps-component-service-name>:8080/actuator/prometheus -Hazelcast ---------- +Heap Dump and Thread Dump +------------------------- -Hazelcast cluster state and health check can be seen using the below endpoints +On demand heap dump and thread dump generation using the below endpoints. .. code:: - http://<cps-component-service-name>:<member-port>/hazelcast/health - http://<cps-component-service-name>:<member-port>/hazelcast/rest/management/cluster/state - -See also : :ref:`cps_common_distributed_datastructures` + http://<cps-component-service-name>:8080/actuator/heapdump + http://<cps-component-service-name>:8080/actuator/threaddump Naming Validation ----------------- @@ -210,9 +249,8 @@ Naming Validation As part of the Kohn 3.1.0 release, CPS has added validation to the names of the following components: - Dataspace names - - Schema Set names - Anchor names - - Cm-Handle identifiers + - CM Handle identifiers The following characters along with spaces are no longer valid for naming of these components. diff --git a/docs/api/swagger/cps/openapi.yaml b/docs/api/swagger/cps/openapi.yaml index c84609b638..ccfddaced4 100644 --- a/docs/api/swagger/cps/openapi.yaml +++ b/docs/api/swagger/cps/openapi.yaml @@ -9,7 +9,7 @@ info: name: Apache 2.0 url: http://www.apache.org/licenses/LICENSE-2.0 title: ONAP Open API v3 Configuration Persistence Service - version: 3.6.0 + version: 3.6.2 servers: - url: /cps/api security: @@ -19,6 +19,8 @@ tags: name: cps-admin - description: cps Data name: cps-data +- description: CPS Delta + name: cps-delta paths: /v1/dataspaces: post: @@ -330,6 +332,65 @@ paths: summary: Get a dataspace tags: - cps-admin + /{apiVersion}/admin/dataspaces/{dataspace-name}/actions/clean: + post: + description: Clean the dataspace (remove orphaned schema sets and modules) + operationId: cleanDataspace + parameters: + - description: apiVersion + in: path + name: apiVersion + required: true + schema: + default: v2 + enum: + - v1 + - v2 + type: string + - description: dataspace-name + in: path + name: dataspace-name + required: true + schema: + example: my-dataspace + type: string + responses: + "204": + content: {} + description: No Content + "400": + content: + application/json: + example: + status: 400 + message: Bad Request + details: The provided request is not valid + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Bad Request + "403": + content: + application/json: + example: + status: 403 + message: Request Forbidden + details: This request is forbidden + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Forbidden + "500": + content: + application/json: + example: + status: 500 + message: Internal Server Error + details: Internal Server Error occurred + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Internal Server Error + summary: Clean the dataspace + tags: + - cps-admin /v1/dataspaces/{dataspace-name}/anchors: post: deprecated: true @@ -2126,7 +2187,7 @@ paths: description: Internal Server Error summary: Get delta between anchors in the same dataspace tags: - - cps-data + - cps-delta x-codegen-request-body-name: xpath post: description: Get delta between an anchor in a dataspace and JSON payload @@ -2216,7 +2277,7 @@ paths: description: Internal Server Error summary: Get delta between an anchor and JSON payload tags: - - cps-data + - cps-delta /v1/dataspaces/{dataspace-name}/anchors/{anchor-name}/nodes/query: get: deprecated: true @@ -2503,6 +2564,208 @@ paths: tags: - cps-query x-codegen-request-body-name: xpath + /v2/notification-subscription: + delete: + description: Delete cps notification subscription + operationId: deleteNotificationSubscription + parameters: + - description: "For more details on xpath, please refer https://docs.onap.org/projects/onap-cps/en/latest/xpath.html" + examples: + subscription by dataspace xpath: + value: "/dataspaces/dataspace[@name='dataspace01']" + subscription by anchor xpath: + value: "/dataspaces/dataspace[@name='dataspace01']/anchors/anchor[@name='anchor01']" + in: query + name: xpath + required: true + schema: + default: /dataspaces + type: string + responses: + "204": + content: {} + description: No Content + "400": + content: + application/json: + example: + status: 400 + message: Bad Request + details: The provided request is not valid + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Bad Request + "403": + content: + application/json: + example: + status: 403 + message: Request Forbidden + details: This request is forbidden + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflicting request + details: The request cannot be processed as the resource is in use. + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict + "500": + content: + application/json: + example: + status: 500 + message: Internal Server Error + details: Internal Server Error occurred + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Internal Server Error + summary: Delete cps notification subscription + tags: + - cps-admin + get: + description: Get cps notification subscription + operationId: getNotificationSubscription + parameters: + - description: "For more details on xpath, please refer https://docs.onap.org/projects/onap-cps/en/latest/xpath.html" + examples: + subscription by dataspace xpath: + value: "/dataspaces/dataspace[@name='dataspace01']" + subscription by anchor xpath: + value: "/dataspaces/dataspace[@name='dataspace01']/anchors/anchor[@name='anchor01']" + in: query + name: xpath + required: true + schema: + default: /dataspaces + type: string + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationSubscriptionsDataSample' + description: OK + "400": + content: + application/json: + example: + status: 400 + message: Bad Request + details: The provided request is not valid + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Bad Request + "403": + content: + application/json: + example: + status: 403 + message: Request Forbidden + details: This request is forbidden + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflicting request + details: The request cannot be processed as the resource is in use. + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict + "500": + content: + application/json: + example: + status: 500 + message: Internal Server Error + details: Internal Server Error occurred + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Internal Server Error + summary: Get cps notification subscription + tags: + - cps-admin + post: + description: Create cps notification subscription + operationId: createNotificationSubscription + parameters: + - description: "For more details on xpath, please refer https://docs.onap.org/projects/onap-cps/en/latest/xpath.html" + examples: + subscription by dataspace xpath: + value: "/dataspaces/dataspace[@name='dataspace01']" + subscription by anchor xpath: + value: "/dataspaces/dataspace[@name='dataspace01']/anchors/anchor[@name='anchor01']" + in: query + name: xpath + required: true + schema: + default: /dataspaces + type: string + requestBody: + content: + application/json: + examples: + dataSample: + $ref: '#/components/examples/NotificationSubscriptionsDataSample' + value: null + schema: + type: object + required: true + responses: + "201": + description: Created without response body + "400": + content: + application/json: + example: + status: 400 + message: Bad Request + details: The provided request is not valid + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Bad Request + "403": + content: + application/json: + example: + status: 403 + message: Request Forbidden + details: This request is forbidden + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflicting request + details: The request cannot be processed as the resource is in use. + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict + "500": + content: + application/json: + example: + status: 500 + message: Internal Server Error + details: Internal Server Error occurred + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Internal Server Error + summary: Create cps notification subscription + tags: + - cps-admin components: examples: dataSample: @@ -2556,6 +2819,12 @@ components: name: SciFi - code: 2 name: kids + NotificationSubscriptionsDataSample: + value: + cps-notification-subscriptions:dataspaces: + dataspace: + - name: dataspace01 + - name: dataspace02 parameters: dataspaceNameInQuery: description: dataspace-name @@ -2736,6 +3005,19 @@ components: schema: example: 10 type: integer + notificationSubscriptionXpathInQuery: + description: "For more details on xpath, please refer https://docs.onap.org/projects/onap-cps/en/latest/xpath.html" + examples: + subscription by dataspace xpath: + value: "/dataspaces/dataspace[@name='dataspace01']" + subscription by anchor xpath: + value: "/dataspaces/dataspace[@name='dataspace01']/anchors/anchor[@name='anchor01']" + in: query + name: xpath + required: true + schema: + default: /dataspaces + type: string responses: Created: content: @@ -2897,6 +3179,7 @@ components: type: string title: Module reference object type: object + NotificationSubscriptionsDataSample: {} getDeltaByDataspaceAnchorAndPayload_request: properties: json: diff --git a/docs/api/swagger/ncmp/openapi-inventory.yaml b/docs/api/swagger/ncmp/openapi-inventory.yaml index d565f9c558..069239b4ff 100644 --- a/docs/api/swagger/ncmp/openapi-inventory.yaml +++ b/docs/api/swagger/ncmp/openapi-inventory.yaml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: description: NCMP Inventory API title: NCMP Inventory API - version: 3.6.0 + version: 3.6.2 servers: - url: /ncmpInventory security: diff --git a/docs/api/swagger/ncmp/openapi.yaml b/docs/api/swagger/ncmp/openapi.yaml index 692266183b..ee9c46b5c8 100644 --- a/docs/api/swagger/ncmp/openapi.yaml +++ b/docs/api/swagger/ncmp/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: description: NCMP to CPS Proxy API title: NCMP to CPS Proxy API - version: 3.6.0 + version: 3.6.2 servers: - url: /ncmp security: @@ -32,22 +32,11 @@ paths: - description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -102,6 +91,16 @@ paths: schema: $ref: '#/components/schemas/ErrorMessage' description: The specified resource was not found + "409": + content: + application/json: + example: + status: 409 + message: Conflict error message + details: Conflict error details + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict "500": content: application/json: @@ -149,22 +148,11 @@ paths: - description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -174,18 +162,18 @@ paths: The `options` parameter specifies additional query options. It is mandatory to wrap key(s)=value(s) in parentheses `()`. Examples for GNBDUFunctionConfig queries: - Limit depth of returned sub-tree: `(depth=2)` - - Select specific fields: `(fields=gNBId,gNBDUName)` - - Combine options: `(depth=3,fields=gNBId,gNBDUName)` + - Select specific fields: `(fields=attributes(gNBId;gNBDUName))` + - Combine options: `(depth=3,fields=attributes(gNBId;gNBDUName))` examples: Limit Depth: value: options: (depth=2) Select Specific Fields: value: - options: "(fields=gNBId,gNBDUName)" + options: (fields=attributes(gNBId;gNBDUName)) Combine Depth and Fields: value: - options: "(depth=3,fields=gNBId,gNBDUName)" + options: "(depth=3,fields=attributes(gNBId;gNBDUName))" in: query name: options required: false @@ -293,22 +281,11 @@ paths: - description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -366,6 +343,16 @@ paths: schema: $ref: '#/components/schemas/ErrorMessage' description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflict error message + details: Conflict error details + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict "500": content: application/json: @@ -413,22 +400,11 @@ paths: - description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -490,6 +466,16 @@ paths: schema: $ref: '#/components/schemas/ErrorMessage' description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflict error message + details: Conflict error details + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict "500": content: application/json: @@ -538,22 +524,11 @@ paths: - description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -618,6 +593,16 @@ paths: schema: $ref: '#/components/schemas/ErrorMessage' description: Forbidden + "409": + content: + application/json: + example: + status: 409 + message: Conflict error message + details: Conflict error details + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict "500": content: application/json: @@ -754,43 +739,34 @@ paths: schema: example: my-cm-handle type: string - - description: | - The `cps-path` parameter allows referencing elements in the GNBDUFunctionConfig data model. - For more details on cps path, please refer to: - [CPS Path Documentation](https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html). - Example paths: - - Root GNBDUFunction: `/GNBDUFunction` - - Specific gNB ID: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Config: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - description: "For more details on cps path, please refer https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html" examples: - GNBDUFunction Root: + container cps path: value: //GNBDUFunction - Specific gNB ID: - value: "//GNBDUFunction[@gNBId='1001']" - RIM-RS Reporting Config: - value: "//GNBDUFunction[@gNBId='1001']/rimRSReportConf" + list attributes cps path: + value: "//GNBDUFunction[@id='1001']" in: query name: cps-path required: false schema: - default: /GNBDUFunction + default: / type: string - description: | The `options` parameter specifies additional query options. It is mandatory to wrap key(s)=value(s) in parentheses `()`. Examples for GNBDUFunctionConfig queries: - Limit depth of returned sub-tree: `(depth=2)` - - Select specific fields: `(fields=gNBId,gNBDUName)` - - Combine options: `(depth=3,fields=gNBId,gNBDUName)` + - Select specific fields: `(fields=attributes(gNBId;gNBDUName))` + - Combine options: `(depth=3,fields=attributes(gNBId;gNBDUName))` examples: Limit Depth: value: options: (depth=2) Select Specific Fields: value: - options: "(fields=gNBId,gNBDUName)" + options: (fields=attributes(gNBId;gNBDUName)) Combine Depth and Fields: value: - options: "(depth=3,fields=gNBId,gNBDUName)" + options: "(depth=3,fields=attributes(gNBId;gNBDUName))" in: query name: options required: false @@ -1402,10 +1378,12 @@ components: summary: Sample response value: gnbdu3gpp:GNBDUFunction: - gNBId: 12345 - gNBIdLength: 32 - gNBDUId: 67890 - gNBDUName: DU-1 + id: 12345 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 67890 + gNBDUName: DU-1 rimRSReportConf: reportIndicator: enabled reportInterval: 1000 @@ -1425,10 +1403,12 @@ components: summary: Sample request value: gnbdu3gpp:GNBDUFunction: - gNBId: 12345 - gNBIdLength: 32 - gNBDUId: 67890 - gNBDUName: DU-1 + id: 12345 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 67890 + gNBDUName: DU-1 rimRSReportConf: reportIndicator: enabled reportInterval: 1000 @@ -1453,10 +1433,12 @@ components: target: /gnbdu3gpp:GNBDUFunction value: gnbdu3gpp:GNBDUFunction: - gNBId: 54321 - gNBIdLength: 32 - gNBDUId: 98765 - gNBDUName: DU-2 + id: 54321 + attributes: + gNBId: 5 + gNBIdLength: 32 + gNBDUId: 98765 + gNBDUName: DU-2 rimRSReportConf: reportIndicator: disabled reportInterval: 2000 @@ -1536,22 +1518,11 @@ components: description: | The `resourceIdentifier` parameter specifies the target resource in the GNBDUFunctionConfig model. For ONAP DMI Plugin, the format will follow RESTConf paths. Examples: - - All GNBDUFunctions: `/GNBDUFunction` - - Specific GNBDUFunction by gNBId: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Configuration: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + - All GNBDUFunctions: `/ManagedElement=node1/GNBDUFunction=1` examples: sample 1: value: - resourceIdentifier: \GNBDUFunction - sample 2: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']" - sample 3: - value: - resourceIdentifier: "\\GNBDUFunction[@gNBId='1001']\\rimRSReportConf" - sample 4: - value: - resourceIdentifier: "parent=GNBDUFunction,child=gNBId:1001" + resourceIdentifier: /ManagedElement=node1/GNBDUFunction=1 in: query name: resourceIdentifier required: true @@ -1562,18 +1533,18 @@ components: The `options` parameter specifies additional query options. It is mandatory to wrap key(s)=value(s) in parentheses `()`. Examples for GNBDUFunctionConfig queries: - Limit depth of returned sub-tree: `(depth=2)` - - Select specific fields: `(fields=gNBId,gNBDUName)` - - Combine options: `(depth=3,fields=gNBId,gNBDUName)` + - Select specific fields: `(fields=attributes(gNBId;gNBDUName))` + - Combine options: `(depth=3,fields=attributes(gNBId;gNBDUName))` examples: Limit Depth: value: options: (depth=2) Select Specific Fields: value: - options: "(fields=gNBId,gNBDUName)" + options: (fields=attributes(gNBId;gNBDUName)) Combine Depth and Fields: value: - options: "(depth=3,fields=gNBId,gNBDUName)" + options: "(depth=3,fields=attributes(gNBId;gNBDUName))" in: query name: options required: false @@ -1636,26 +1607,17 @@ components: example: my-cm-handle type: string cpsPathInQuery: - description: | - The `cps-path` parameter allows referencing elements in the GNBDUFunctionConfig data model. - For more details on cps path, please refer to: - [CPS Path Documentation](https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html). - Example paths: - - Root GNBDUFunction: `/GNBDUFunction` - - Specific gNB ID: `/GNBDUFunction[@gNBId='1001']` - - RIM-RS Reporting Config: `/GNBDUFunction[@gNBId='1001']/rimRSReportConf` + description: "For more details on cps path, please refer https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html" examples: - GNBDUFunction Root: + container cps path: value: //GNBDUFunction - Specific gNB ID: - value: "//GNBDUFunction[@gNBId='1001']" - RIM-RS Reporting Config: - value: "//GNBDUFunction[@gNBId='1001']/rimRSReportConf" + list attributes cps path: + value: "//GNBDUFunction[@id='1001']" in: query name: cps-path required: false schema: - default: /GNBDUFunction + default: / type: string moduleNameInQuery: description: Filter for a module name.This is an optional parameter @@ -1739,6 +1701,16 @@ components: schema: type: object description: OK + Conflict: + content: + application/json: + example: + status: 409 + message: Conflict error message + details: Conflict error details + schema: + $ref: '#/components/schemas/ErrorMessage' + description: Conflict Created: content: {} description: Created @@ -1788,7 +1760,7 @@ components: DataOperationRequest: example: operations: - - resourceIdentifier: NRCellDU/attributes/cellLocalId + - resourceIdentifier: /ManagedElement=NRNode1/GNBDUFunction=1 targetIds: - "[\"da310eecdb8d44c2acc0ddaae01174b1\",\"c748c58f8e0b438f9fd1f28370b17d47\"\ ]" @@ -1798,7 +1770,7 @@ components: options: (fields=NRCellDU/attributes/cellLocalId) operationId: "12" operation: read - - resourceIdentifier: NRCellDU/attributes/cellLocalId + - resourceIdentifier: /ManagedElement=NRNode1/GNBDUFunction=1 targetIds: - "[\"da310eecdb8d44c2acc0ddaae01174b1\",\"c748c58f8e0b438f9fd1f28370b17d47\"\ ]" @@ -1818,7 +1790,7 @@ components: type: object DataOperationDefinition: example: - resourceIdentifier: NRCellDU/attributes/cellLocalId + resourceIdentifier: /ManagedElement=NRNode1/GNBDUFunction=1 targetIds: - "[\"da310eecdb8d44c2acc0ddaae01174b1\",\"c748c58f8e0b438f9fd1f28370b17d47\"\ ]" @@ -1842,7 +1814,7 @@ components: example: (fields=NRCellDU/attributes/cellLocalId) type: string resourceIdentifier: - example: NRCellDU/attributes/cellLocalId + example: /ManagedElement=NRNode1/GNBDUFunction=1 type: string targetIds: items: diff --git a/docs/cm-handle-lcm-events.rst b/docs/cm-handle-lcm-events.rst index 8446834c31..a07354d84d 100644 --- a/docs/cm-handle-lcm-events.rst +++ b/docs/cm-handle-lcm-events.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2023 Nordix Foundation +.. Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _cmHandleLcmEvents: @@ -15,7 +15,7 @@ CM Handle Lifecycle Management (LCM) Events Introduction ============ -LCM events for CM Handles are published when a CM Handle is created, deleted or another change in the cm handle state occurs. +LCM events for CM Handles are sent when a CM Handle is created, deleted or another change in the CM Handle state occurs. **3 possible event types:** @@ -25,9 +25,10 @@ LCM events for CM Handles are published when a CM Handle is created, deleted or CM Handle LCM Event Schema --------------------------- -The current published LCM event is based on the following schema: +The current sent LCM events are based on the following schema: -:download:`Life cycle management event schema <schemas/lcm-event-schema-v1.json>` +:download:`Life cycle management event header <schemas/lcm/lcm-event-header-v1.json>` +:download:`Life cycle management event schema <schemas/lcm/lcm-event-schema-v1.json>` CM Handle LCM Event structure ----------------------------- @@ -55,7 +56,7 @@ Event payload varies based on the type of event. **CREATE** -Event payload for this event contains the properties of the new cm handle created. +Event payload for this event contains the properties of the new CM Handle created. *Create event payload prototype* @@ -77,7 +78,7 @@ Event payload for this event contains the properties of the new cm handle create **UPDATE** -Event payload for this event contains the difference in state and properties of the cm handle. +Event payload for this event contains the difference in state and properties of the CM Handle. *Update event payload prototype* @@ -106,7 +107,7 @@ Event payload for this event contains the difference in state and properties of **DELETE** -Event payload for this event contains the identifier of the deleted cm handle. +Event payload for this event contains the identifier of the deleted CM Handle. *Delete event payload prototype* @@ -114,4 +115,4 @@ Event payload for this event contains the identifier of the deleted cm handle. "event": { "cmHandleId" : "cmhandle-001", - }
\ No newline at end of file + } diff --git a/docs/cm-notification-subscriptions.rst b/docs/cm-notification-subscriptions.rst index e1d1c2f800..0643b8e627 100644 --- a/docs/cm-notification-subscriptions.rst +++ b/docs/cm-notification-subscriptions.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2024 Nordix Foundation +.. Copyright (C) 2024-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _cmNotificationSubscriptions: @@ -21,11 +21,11 @@ The CM Subscription flow is event driven and adheres to the CNCF Cloud Events Sp Event to create and delete a subscription. -:download:`CM Subscription Event Schema <schemas/ncmp-in-event-schema-1.0.0.json>` +:download:`CM Subscription Event Schema <schemas/ncmp/avc/ncmp-in-event-schema-1.0.0.json>` Event to receive status of participants in a subscription. -:download:`CM Subscription Response Event Schema <schemas/ncmp-out-event-schema-1.0.0.json>` +:download:`CM Subscription Response Event Schema <schemas/ncmp/avc/ncmp-out-event-schema-1.0.0.json>` CM Subscriptions Creation ------------------------- @@ -52,7 +52,7 @@ CM Notifications are triggered by any change in the network, provided the client The CM Notification Event follows the structure outlined in the schema below: -:download:`CM Data Notification Event Schema <schemas/dmidataavc/avc-event-schema-1.0.0.json>` +:download:`CM Data Notification Event Schema <schemas/dmi/cm-events/avc-event-schema-1.0.0.json>` **Note.** NCMP uses the CM Notification event key from the source topic to forward notifications to the client, ensuring that the order of notifications within a topic partition is maintained during forwarding. **Note.** If the notification key from the source topic is null, NCMP cannot guarantee the order of events within a topic partition when forwarding. diff --git a/docs/conf.py b/docs/conf.py index 364efe8f1d..d3ec5df1ec 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2021 Nordix Foundation +# Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ from docutils.parsers.rst import directives +import subprocess + project = "onap" release = "master" version = "master" @@ -69,6 +71,12 @@ linkcheck_ignore = [ html_last_updated_fmt = '%d-%b-%y %H:%M' +# Run ScrapeMetrics.py +subprocess.run(["python3", "ScrapeMetrics.py"], check=True) + +# Run test_ScrapeMetrics.py +subprocess.run(["python3", "test_ScrapeMetrics.py"], check=True) + def setup(app): app.add_css_file("css/ribbon.css") diff --git a/docs/cps-events.rst b/docs/cps-events.rst index 47aa73f12e..1097af9a12 100644 --- a/docs/cps-events.rst +++ b/docs/cps-events.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2022-2023 Nordix Foundation +.. Copyright (C) 2022-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _cpsEvents: @@ -16,9 +16,9 @@ CPS Events cm-notification-subscriptions.rst .. note:: - Legacy async response on a client supplied topic for single cm handle data request are no longer supported. Click link below for the legacy specification. + Legacy async response on a client supplied topic for single CM Handle data request are no longer supported. Click link below for the legacy specification. .. toctree:: :maxdepth: 0 - ncmp-async-events.rst
\ No newline at end of file + ncmp-async-events.rst diff --git a/docs/cps-ncmp-message-status-codes.rst b/docs/cps-ncmp-message-status-codes.rst index e0a3f0308b..799838ae44 100644 --- a/docs/cps-ncmp-message-status-codes.rst +++ b/docs/cps-ncmp-message-status-codes.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2023-2024 Nordix Foundation +.. Copyright (C) 2023-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _dataOperationMessageStatusCodes: @@ -16,9 +16,9 @@ CPS-NCMP Message Status Codes +-----------------+------------------------------------------------------+-----------------------------------+ | 1 | ACCEPTED | CM Data Notification Subscription | +-----------------+------------------------------------------------------+-----------------------------------+ - | 100 | cm handle id(s) is(are) not found | All features | + | 100 | CM Handle id(s) is(are) not found | All features | +-----------------+------------------------------------------------------+-----------------------------------+ - | 101 | cm handle(s) not ready | Data Operation | + | 101 | CM Handle(s) not ready | Data Operation | +-----------------+------------------------------------------------------+-----------------------------------+ | 102 | dmi plugin service is not responding | Data Operation | +-----------------+------------------------------------------------------+-----------------------------------+ @@ -30,9 +30,9 @@ CPS-NCMP Message Status Codes +-----------------+------------------------------------------------------+-----------------------------------+ | 108 | Unknown error | All features | +-----------------+------------------------------------------------------+-----------------------------------+ - | 109 | cm-handle already exists | Inventory | + | 109 | CM Handle already exists | Inventory | +-----------------+------------------------------------------------------+-----------------------------------+ - | 110 | cm-handle has an invalid character(s) in id | Inventory | + | 110 | CM Handle has an invalid character(s) in id | Inventory | +-----------------+------------------------------------------------------+-----------------------------------+ | 111 | alternate id already associated | Inventory | +-----------------+------------------------------------------------------+-----------------------------------+ diff --git a/docs/cps-path.rst b/docs/cps-path.rst index eb203d8918..cfaad3ca57 100644 --- a/docs/cps-path.rst +++ b/docs/cps-path.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2021-2023 Nordix Foundation +.. Copyright (C) 2021-2025 Nordix Foundation .. Modifications Copyright (C) 2023 TechMahindra Ltd .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING @@ -183,7 +183,7 @@ General Notes Query Syntax ============ -``( <absolute-path> | <descendant-path> ) [ <leaf-conditions> ] [ <text()-condition> ] [ <contains()-condition> ] [ <ancestor-axis> ]`` +``( <absolute-path> | <descendant-path> ) [ <leaf-conditions> ] [ <text()-condition> ] [ <contains()-condition> ] [ <ancestor-axis> ] [ <attribute-axis> ]`` Each CPS path expression need to start with an 'absolute' or 'descendant' xpath. @@ -310,3 +310,21 @@ The ancestor axis can be added to any CPS path query but has to be the last part **Limitations** - Ancestor list elements can only be addressed using the list key leaf. - List elements with compound keys are not supported. + +attribute-axis +-------------- + +The attribute axis can be added to a CPS path query at the end. It will return only distinct values of a specified leaf. + +**Syntax**: ``<cps-path> ( '/@' <leaf-name> )?`` + - ``cps-path``: Any CPS path query. + - ``leaf-name``: The name of the leaf (attribute) for which values should be returned. + +**Examples** + - ``//categories/@name`` + - ``//categories[@code='1']/books/@price`` + - ``//books/ancestor::bookstore/@bookstore-name`` + +**Notes** + - The output is a list of attribute-value pairs. For example, ``[{"name":"Kids"},{"name":"SciFi"}]`` + - Only unique values will be returned. For example, if 3 books have a price of 5, then 5 will be returned only once. diff --git a/docs/cps-scheduled-processes.rst b/docs/cps-scheduled-processes.rst index c204e6ca0a..83cba9f8bd 100644 --- a/docs/cps-scheduled-processes.rst +++ b/docs/cps-scheduled-processes.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2022 Nordix Foundation +.. Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved. .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING @@ -20,18 +20,18 @@ The following section is a list of the current scheduled processes running withi Module Sync ----------- -The module sync is a user :ref:`configurable timed process<additional-cps-ncmp-customizations>`, -which is set to search for CM-Handles within CPS with an *'ADVISED'* state. -Once the CM-Handle is processed by the module sync, the CM-Handle state is then set to *'READY'*, if the process completes successfully. -If for any reason the module sync fails, the CM-Handle state will then be set to *'LOCKED'*, +The module sync is a user :ref:`configurable timed process<configuration-properties>`, +which is set to search for CM Handles within CPS with an *'ADVISED'* state. +Once the CM Handle is processed by the module sync, the CM Handle state is then set to *'READY'*, if the process completes successfully. +If for any reason the module sync fails, the CM Handle state will then be set to *'LOCKED'*, and the reason for the lock will also be stored within CPS. -CM-Handles in the *'LOCKED'* state will be retried when the system has availability. CM-Handles in a *'LOCKED'* -state are processed by the retry mechanism, by setting CM-Handle state back to *'ADVISED'* so the next sync cycle will process those again. +CM Handles in the *'LOCKED'* state will be retried when the system has availability. CM Handles in a *'LOCKED'* +state are processed by the retry mechanism, by setting CM Handle state back to *'ADVISED'* so the next sync cycle will process those again. Data Sync --------- -The data sync is a user :ref:`configurable timed process<additional-cps-ncmp-customizations>`, -which is set to search for CM-Handles with a sync state of *'UNSYNCHRONIZED'*. -Once the CM-Handle(s) with a sync state of *'UNSYNCHRONIZED'* is processed by the data sync, -the CM-Handle sync state is then set to *'SYNCHRONIZED'*, if the process completes successfully. -If the data sync fails, the CM-Handle sync state will remain as *'UNSYNCHRONIZED'*, and will be re-attempted. +The data sync is a user :ref:`configurable timed process<configuration-properties>`, +which is set to search for CM Handles with a sync state of *'UNSYNCHRONIZED'*. +Once the CM Handle(s) with a sync state of *'UNSYNCHRONIZED'* is processed by the data sync, +the CM Handle sync state is then set to *'SYNCHRONIZED'*, if the process completes successfully. +If the data sync fails, the CM Handle sync state will remain as *'UNSYNCHRONIZED'*, and will be re-attempted. diff --git a/docs/csv/3pp_properties.csv b/docs/csv/3pp_properties.csv new file mode 100644 index 0000000000..822cb4681a --- /dev/null +++ b/docs/csv/3pp_properties.csv @@ -0,0 +1,9 @@ +Property,Description,Default Value +logging.level.org.onap.cps,Logging level set in cps & ncmp,INFO +spring.datasource.username,"Internal user name used by cps-core to connect to its own database.",cps +spring.datasource.password,"Internal password used by cps-core to connect to its own database. If not defined, the password is generated when deploying the application. See also :ref:`cps_common_credentials_retrieval`.",Not defined +spring.datasource.url,"URL to database name used by cps-core.", See note below +spring.datasource.hikari.maximumPoolSize,"Specifies number of database connections between database and application. This property controls the maximum size that the pool is allowed to reach, including both idle and in-use connections.",80 +spring.kafka.bootstrap-servers,"Kafka hostname and port",``localhost:9092`` +spring.kafka.consumer.client-id,"Kafka consumer client id",cps-core +spring.kafka.security.protocol,"Kafka security protocol. Some possible values are: PLAINTEXT, SASL_PLAINTEXT (for authentication), SASL_SSL (for authentication and encryption)",PLAINTEXT diff --git a/docs/csv/common_custom_properties.csv b/docs/csv/common_custom_properties.csv new file mode 100644 index 0000000000..3e6d3fe88c --- /dev/null +++ b/docs/csv/common_custom_properties.csv @@ -0,0 +1,7 @@ +Property,Description,Default Value +notification.async.executor.core-pool-size,"Core pool size in asynchronous execution of notification.",2 +notification.async.executor.max-pool-size,"Max pool size in asynchronous execution of notification.",10 +notification.async.executor.queue-capacity,"Queue Capacity in asynchronous execution of notification.",500 +notification.async.executor.wait-for-tasks-to-complete-on-shutdown,"If the executor should wait for the tasks to be completed on shutdown",true +notification.async.executor.thread-name-prefix,Prefix to be added to the thread name in asynchronous execution of notifications.",Async- +notification.async.executor.time-out-value-in-ms,"Maximum time allowed by the thread pool executor for execution of one of the threads in milliseconds.",60000 diff --git a/docs/csv/metrics.csv b/docs/csv/metrics.csv new file mode 100644 index 0000000000..25925dc669 --- /dev/null +++ b/docs/csv/metrics.csv @@ -0,0 +1,57 @@ +"Metric Name","Description" +"cps.ncmp.controller.get","Time taken to get resource data from datastore" +"cm_handle_search_invocation_total","Search for cm handles within CPS-E-05 interface" +"cm_handle_search_invocation_total","Search for cm handles within CPS-E-05 interface" +"cm_handle_search_invocation_total","Search for cm handle ids within CPS-NCMP-I-01 interface" +"cps.ncmp.inventory.controller.update","Time taken to handle registration request" +"cps_ncmp_inventory_cm_handles_by_state{state=ADVISED}","Current number of cm handles in advised state" +"cps_ncmp_inventory_cm_handles_by_state{state=READY}","Current number of cm handles in ready state" +"cps_ncmp_inventory_cm_handles_by_state{state=LOCKED}","Current number of cm handles in locked state" +"cps_ncmp_inventory_cm_handles_by_state{state=DELETING}","Current number of cm handles in deleting state" +"cps_ncmp_inventory_cm_handles_by_state{state=DELETED}","Number of cm handles that have been deleted since the application started" +"cps.ncmp.dmi.get","Time taken to fetch the resource data from operational data store for given cm handle " +"cps.ncmp.inventory.persistence.datanode.get","Time taken to get a data node (from ncmp dmi registry)" +"cps.ncmp.inventory.persistence.datanode.get","Time taken to get a data node (from ncmp dmi registry)" +"cps.ncmp.inventory.module.references.from.dmi","Time taken to get all module references for a cm handle from dmi" +"cps.ncmp.inventory.yang.resources.from.dmi","Time taken to get list of yang resources from dmi" +"cps.ncmp.cmhandle.state.update.batch","Time taken to update a batch of cm handle states" +"cps.rest.admin.controller.schemaset.create","Time taken to create schemaset from controller" +"cps.data.controller.datanode.query.v1","Time taken to query data nodes" +"cps.data.controller.datanode.query.v2","Time taken to query data nodes" +"cps.data.controller.datanode.query.across.anchors","Time taken to query data nodes across anchors" +"cps.data.controller.datanode.get.v1","Time taken to get data node" +"cps.data.controller.datanode.get.v2","Time taken to get data node" +"cps.delta.controller.get.delta","Time taken to get delta between anchors" +"cps.delta.controller.get.delta","Time taken to get delta between anchors" +"cps.module.persistence.schemaset.create","Time taken to store a schemaset (list of module references)" +"cps.module.persistence.schemaset.createFromNewAndExistingModules","Time taken to store a schemaset (from new and existing)" +"cps.data.persistence.service.datanode.query","Time taken to query data nodes" +"cps.data.persistence.service.datanode.query.anchors","Time taken to query data nodes across all anchors or list of anchors" +"cps.data.persistence.service.datanode.get","Time taken to get a data node" +"cps.data.persistence.service.datanode.batch.get","Time taken to get data nodes" +"cps.dataupdate.events.send","Time taken to send Data Update event" +"cps.module.service.schemaset.create","Time taken to create (and store) a schemaset" +"cps.data.service.datanode.query","Time taken to query data nodes" +"cps.data.service.datanode.query","Time taken to query data nodes with a limit on results" +"cps.data.service.datanode.root.save","Time taken to save a root data node" +"cps.data.service.datanode.child.save","Time taken to save a child data node" +"cps.data.service.list.element.save","Time taken to save list elements" +"cps.data.service.datanode.get","Time taken to get data nodes for an xpath" +"cps.data.service.datanode.batch.get","Time taken to get a batch of data nodes" +"cps.data.service.datanode.leaves.update","Time taken to update a batch of leaf data nodes" +"cps.data.service.datanode.leaves.descendants.leaves.update","Time taken to update data node leaves and existing descendants leaves" +"cps.data.service.datanode.descendants.update","Time taken to update a data node and descendants" +"cps.data.service.datanode.descendants.batch.update","Time taken to update a batch of data nodes and descendants" +"cps.data.service.list.update","Time taken to update a list" +"cps.data.service.list.batch.update","Time taken to update a batch of lists" +"cps.data.service.datanode.delete","Time taken to delete a datanode" +"cps.data.service.datanode.batch.delete","Time taken to delete a batch of datanodes" +"cps.data.service.datanode.delete.anchor","Time taken to delete all datanodes for an anchor" +"cps.data.service.datanode.delete.anchor.batch","Time taken to delete all datanodes for multiple anchors" +"cps.data.service.list.delete","Time taken to delete a list or list element" +"cps.delta.service.get.delta","Time taken to get delta between anchors" +"cps.delta.service.get.delta","Time taken to get delta between anchor and a payload" +"cps.utils.yangparser.nodedata.with.parent.parse","Time taken to parse node data with a parent" +"cps.utils.yangparser.nodedata.with.parent.with.yangResourceMap.parse","Time taken to parse node data with a parent" +"cps.yangtextschemasourceset.build","Time taken to build a yang text schema source set" +"cps.yang.schemasourceset.build","Time taken to build a ODL yang Model" diff --git a/docs/csv/ncmp_custom_properties.csv b/docs/csv/ncmp_custom_properties.csv new file mode 100644 index 0000000000..341017a286 --- /dev/null +++ b/docs/csv/ncmp_custom_properties.csv @@ -0,0 +1,16 @@ +Property,"Description",Default Value +ncmp.timers.advised-modules-sync.sleep-time-ms,"Specifies the delay in milliseconds in which the module sync watch dog will wake again after finishing.",5000 +ncmp.timers.advised-modules-sync.initial-delay-ms,"Specifies the delay in milliseconds in which the module sync watch dog will wake up for the first time.",40000 +ncmp.timers.cm-handle-data-sync.sleep-time-ms,"Specifies the delay in milliseconds in which the data sync watch dog will wake again after finishing.",30000 +ncmp.timers.cm-handle-data-sync.initial-delay-ms,"Specifies the delay in milliseconds in which the data sync watch dog will wake up for the first time.",40000 +ncmp.[app].httpclient.[services].maximumInMemorySizeInMegabytes,"Maximum size (in MB) of the in-memory buffer for HTTP response data.",16 +ncmp.[app].httpclient.[services].maximumConnectionsTotal,"Maximum number of simultaneous connections allowed in the connection pool.",100 +ncmp.[app].httpclient.[services].pendingAcquireMaxCount,"Maximum number of pending requests when the connection pool is full.",50 +ncmp.[app].httpclient.[services].connectionTimeoutInSeconds,"Specifies the maximum time in seconds, to wait for establishing a connection for the HTTP client",30 +ncmp.[app].httpclient.[services].readTimeoutInSeconds,"Timeout (in seconds) for reading data from the server after the connection is established.",30 +ncmp.[app].httpclient.[services].writeTimeoutInSeconds,"Timeout (in seconds) for writing data to the server.",30 +ncmp.[app].httpclient.[services].responseTimeoutInSeconds,"Total timeout (in seconds) for receiving a complete response, including all processing stages.",60 +ncmp.policy-executor.enabled,"Enables or disables the policy-executor feature.",false +ncmp.policy-executor.defaultDecision,"The default (fallback) decision in case a problem with the external service occurs.",allow +ncmp.policy-executor.server.address,"The server address for the external policy executor service.",``http://policy-executor-stub`` +ncmp.policy-executor.server.port,"The port used for the external policy executor service.",8093 diff --git a/docs/data-operation-events.rst b/docs/data-operation-events.rst index 51ec1254af..4e2be4419e 100644 --- a/docs/data-operation-events.rst +++ b/docs/data-operation-events.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2023 Nordix Foundation +.. Copyright (C) 2023-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _dataOperationEvents: @@ -15,7 +15,7 @@ Please refer to the `cloud events <https://cloudevents.io/>`_ for more details. Data operation response events ****************************** -:download:`Data operation event schema <schemas/data-operation-event-schema-1.0.0.json>` +:download:`Data operation event schema <schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json>` Event headers example ^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/deployment.rst b/docs/deployment.rst index 940bc50923..31673d6560 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2021-2025 Nordix Foundation +.. Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. .. Modifications Copyright (C) 2021 Bell Canada. .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING @@ -22,14 +22,24 @@ set appropriately. For example, given a database with 2GB of memory, 512MB is a CPS and NCMP Configuration ========================== +CPU and Memory Requirements +--------------------------- + +The following are minimum requirements for NCMP: + +* For 20,000 CM-handles: 2 CPUs and 2 GB RAM per instance, with 70% heap allocation. +* For 50,000 CM-handles: 3 CPUs and 3 GB RAM per instance, with 70% heap allocation. + JVM Memory Allocation +^^^^^^^^^^^^^^^^^^^^^ -Allocating 75% of the container's memory to the JVM heap ensures efficient memory management. -This helps the JVM make the best use of the allocated resources while leaving enough memory for other processes. +When running with 2 GB or more memory per instance, allocating 70% of the JVM memory to the heap ensures efficient +memory management. It is not recommended to go above 70%. .. code-block:: yaml - JAVA_TOOL_OPTIONS: "-XX:InitialRAMPercentage=75.0 -XX:MaxRAMPercentage=75.0" + JAVA_TOOL_OPTIONS: "-XX:InitialRAMPercentage=70.0 -XX:MaxRAMPercentage=70.0" + Load balancer configuration =========================== @@ -129,7 +139,7 @@ Each cps component can be restarted independently by issuing the following comma Credentials Retrieval ===================== -Application and database credentials are kept in Kubernetes secrets. They are defined as external secrets in the +When using OOM application and database credentials are kept in Kubernetes secrets. They are defined as external secrets in the values.yaml file to be used across different components as : .. container:: ulist @@ -179,149 +189,36 @@ To get a listing of the cps-core Pods, run the following command: .. note:: The CPS Service will have to be restarted each time a change is made to a configurable property. -Additional CPS-Core Customizations -================================== - -The following table lists some properties that can be specified as Helm chart -values to configure the application to be deployed. This list is not exhaustive. - -Any spring supported property can be configured by providing in ``config.additional.<spring-supported-property-name>: value`` Example: config.additional.spring.datasource.hikari.maximumPoolSize: 30 - -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| Property | Description | Default Value | -+===========================================+=========================================================================================================+===============================+ -| config.appUserName | User name used by cps-core service to configure the authentication for REST API it exposes. | ``cpsuser`` | -| | | | -| | This is the user name to be used by cps-core REST clients to authenticate themselves. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.appUserPassword | Password used by cps-core service to configure the authentication for REST API it exposes. | Not defined | -| | | | -| | If not defined, the password is generated when deploying the application. | | -| | | | -| | See also :ref:`cps_common_credentials_retrieval`. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| postgres.config.pgUserName | Internal user name used by cps-core to connect to its own database. | ``cps`` | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| postgres.config.pgUserPassword | Internal password used by cps-core to connect to its own database. | Not defined | -| | | | -| | If not defined, the password is generated when deploying the application. | | -| | | | -| | See also :ref:`cps_common_credentials_retrieval`. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| postgres.config.pgDatabase | Database name used by cps-core | ``cpsdb`` | -| | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| logging.level | Logging level set in cps-core | info | -| | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.useStrimziKafka | If targeting a custom kafka cluster, i.e. useStrimziKafka: false, the | true | -| | config.eventPublisher.spring.kafka values below must be set. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka hostname and port | ``<kafka-bootstrap>:9092`` | -| spring.kafka.bootstrap-servers | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka consumer client id | ``cps-core`` | -| spring.kafka.consumer.client-id | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security protocol. | ``SASL_PLAINTEXT`` | -| spring.kafka.security.protocol | Some possible values are: | | -| | | | -| | * ``PLAINTEXT`` | | -| | * ``SASL_PLAINTEXT``, for authentication | | -| | * ``SASL_SSL``, for authentication and encryption | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL mechanism. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined | -| spring.kafka.properties. | Some possible values are: | | -| sasl.mechanism | | | -| | * ``PLAIN``, for PLAINTEXT | | -| | * ``SCRAM-SHA-512``, for SSL | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL JAAS configuration. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined | -| spring.kafka.properties. | Some possible values are: | | -| sasl.jaas.config | | | -| | * ``org.apache.kafka.common.security.plain.PlainLoginModule required username="..." password="...";``, | | -| | for PLAINTEXT | | -| | * ``org.apache.kafka.common.security.scram.ScramLoginModule required username="..." password="...";``, | | -| | for SSL | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL SSL store type. Required for SASL_SSL protocol. | Not defined | -| spring.kafka.ssl.trust-store-type | Some possible values are: | | -| | | | -| | * ``JKS`` | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL SSL store file location. Required for SASL_SSL protocol. | Not defined | -| spring.kafka.ssl.trust-store-location | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL SSL store password. Required for SASL_SSL protocol. | Not defined | -| spring.kafka.ssl.trust-store-password | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.eventPublisher. | Kafka security SASL SSL broker hostname identification verification. Required for SASL_SSL protocol. | Not defined | -| spring.kafka.properties. | Possible value is: | | -| ssl.endpoint.identification.algorithm | | | -| | * ``""``, empty string to disable | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Core pool size in asynchronous execution of notification. | ``2`` | -| notification.async.executor. | | | -| core-pool-size | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Max pool size in asynchronous execution of notification. | ``1`` | -| notification.async.executor. | | | -| max-pool-size | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Queue Capacity in asynchronous execution of notification. | ``500`` | -| notification.async.executor. | | | -| queue-capacity | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | If the executor should wait for the tasks to be completed on shutdown | ``true`` | -| notification.async.executor. | | | -| wait-for-tasks-to-complete-on-shutdown | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Prefix to be added to the thread name in asynchronous execution of notifications. | ``Async-`` | -| notification.async.executor. | | | -| thread-name-prefix | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Maximum time allowed by the thread pool executor for execution of one of the threads in milliseconds. | ``60000`` | -| notification.async.executor. | | | -| time-out-value-in-ms | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional. | Specifies number of database connections between database and application. | ``10`` | -| spring.datasource.hikari. | This property controls the maximum size that the pool is allowed to reach, | | -| maximumPoolSize | including both idle and in-use connections. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ - -.. _additional-cps-ncmp-customizations: - -Additional CPS-NCMP Customizations -================================== -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.dmiPluginUserName | User name used by cps-core to authenticate themselves for using ncmp-dmi-plugin service. | ``dmiuser`` | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.dmiPluginUserPassword | Internal password used by cps-core to connect to ncmp-dmi-plugin service. | Not defined | -| | | | -| | If not defined, the password is generated when deploying the application. | | -| | | | -| | See also :ref:`cps_common_credentials_retrieval`. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.ncmp.timers | Specifies the delay in milliseconds in which the module sync watch dog will wake again after finishing. | ``5000`` | -| .advised-modules-sync.sleep-time-ms | | | -| | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.ncmp.timers | Specifies the delay in milliseconds in which the data sync watch dog will wake again after finishing. | ``30000`` | -| .cm-handle-data-sync.sleep-time-ms | | | -| | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional.ncmp.dmi.httpclient | Specifies the maximum time in seconds, to wait for establishing a connection for the HTTP Client. | ``30`` | -| .connectionTimeoutInSeconds | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional.ncmp.dmi.httpclient | Specifies the maximum number of connections allowed per route in the HTTP client. | ``50`` | -| .maximumConnectionsPerRoute | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional.ncmp.dmi.httpclient | Specifies the maximum total number of connections that can be held by the HTTP client. | ``100`` | -| .maximumConnectionsTotal | | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| config.additional.ncmp.dmi.httpclient | Specifies the duration in seconds for the threshold, after which idle connections will be evicted | ``5`` | -| .idleConnectionEvictionThresholdInSeconds | from the connection pool by the HTTP client. | | -+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ +.. _configuration-properties: + +Configuration Properties +======================== +The following tables list properties that can be configured in the deployment. This list is not exhaustive. + +.. csv-table:: 3PP Properties + :file: csv/3pp_properties.csv + :widths: 20, 50, 30 + :header-rows: 1 + +.. note:: + - The default datasource is defined as ``jdbc:postgresql://${DB_HOST:localhost}:${DB_PORT:5432}/cpsdb``. So it can also be configured using environment variables to just set the hostname ``DB_HOST`` and port ``DB_PORT``. + - The kafka bootstrap-servers can also be overridden with the environment variable ``KAFKA_BOOTSTRAP_SERVER``. + +.. csv-table:: Common CPS-NCMP Custom Properties + :file: csv/common_custom_properties.csv + :widths: 20, 50, 30 + :header-rows: 1 + +.. csv-table:: NCMP Custom Properties + :file: csv/ncmp_custom_properties.csv + :widths: 20, 50, 30 + :header-rows: 1 + +.. note:: + - [app]: can be ``policy-executor`` or ``dmi``. + - [services]: ``all-services`` for 'policy-executor'. + - [services]: ``data-services`` and 'model-services' for 'dmi'. + - All ncmp.policy-executor properties can also be overridden using environment variables: ``POLICY_SERVICE_ENABLED``, ``POLICY_SERVICE_DEFAULT_DECISION``, ``POLICY_SERVICE_URL``, ``POLICY_SERVICE_PORT`` CPS-Core Docker Installation ============================ @@ -342,13 +239,13 @@ Below are the list of distributed datastructures that we have. +--------------+------------------------------------+-----------------------------------------------------------+ | Component | Data Structure Name | Use | +==============+====================================+===========================================================+ -| cps-ncmp | moduleSyncStartedOnCmHandles | Watchdog process to register cm handles. | +| cps-ncmp | moduleSyncStartedOnCmHandles | Watchdog process to register CM Handles. | +--------------+------------------------------------+-----------------------------------------------------------+ | cps-ncmp | dataSyncSemaphores | Watchdog process to sync data from the nodes. | +--------------+------------------------------------+-----------------------------------------------------------+ | cps-ncmp | moduleSyncWorkQueue | Queue used internally for workers to pick the task. | +--------------+------------------------------------+-----------------------------------------------------------+ -| cps-ncmp | trustLevelPerCmHandle | Stores the trust level per cm handle id | +| cps-ncmp | trustLevelPerCmHandle | Stores the trust level per CM Handle id | +--------------+------------------------------------+-----------------------------------------------------------+ | cps-ncmp | trustLevelPerDmiPlugin | Stores the trust level for the dmi-plugins. | +--------------+------------------------------------+-----------------------------------------------------------+ @@ -356,5 +253,7 @@ Below are the list of distributed datastructures that we have. +--------------+------------------------------------+-----------------------------------------------------------+ | cps-ncmp | cpsAndNcmpLock | Cps and NCMP distributed lock for various use cases. | +--------------+------------------------------------+-----------------------------------------------------------+ +| cps-ncmp | cmHandleIdPerAlternateId | Stores cm handle ids per alternate ids. | ++--------------+------------------------------------+-----------------------------------------------------------+ -Total number of caches : 7 +Total number of caches : 8 diff --git a/docs/design.rst b/docs/design.rst index 52f977a99a..7ba3999aa1 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -57,6 +57,20 @@ and CPS-NCMP-Inventory using the drop down table in the top right: http://<hostname>:<port>/swagger-ui/index.html?configUrl=/v3/api-docs/swagger-config#/ +Notes +----- + +Input Size limitations +++++++++++++++++++++++ + +Depending on HTTP servers used, the request size is typically limited. For example Nginx has a default limit of 1MB. +In this case it means that a registration request is limited to approximately 3,000 cm handles (depending on the length of cm handle ids and other properties involved). +When needed, update your configuration to increase the maximum request size for REST request. e.g: + +.. code-block:: bash + + client_max_body_size 2m + Consumed APIs ============= diff --git a/docs/modeling.rst b/docs/modeling.rst index 7ebf6fecd5..65e4aa97ca 100644 --- a/docs/modeling.rst +++ b/docs/modeling.rst @@ -1,7 +1,7 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 .. Copyright (C) 2021 Pantheon.tech -.. Modifications Copyright (C) 2021-2023 Nordix Foundation +.. Modifications Copyright (C) 2021-2025 Nordix Foundation .. _modeling: .. toctree:: @@ -89,31 +89,31 @@ Note: Although additional-properties are present in the model of the dmi-registr Basic Concepts -------------- -- **CM-Handle** represents an instance a modeled Network Function(node) in ONAP. +- **CM Handle** represents an instance a modeled Network Function(node) in ONAP. These are stored as Anchors within CPS-Core. - - **CM-Handle States** are used to represent the potential states in which a CM-Handle can transition between. + - **CM Handle States** are used to represent the potential states in which a CM Handle can transition between. - The 5 possible CM-Handle states are: ADVISED, READY, LOCKED, DELETING, DELETED + The 5 possible CM Handle states are: ADVISED, READY, LOCKED, DELETING, DELETED - **ADVISED** indicates that a CM-Handle has been registered successfully, and is waiting for the module synchronization process to sync the CM-Handle. + **ADVISED** indicates that a CM Handle has been registered successfully, and is waiting for the module synchronization process to sync the CM Handle. - **READY** indicates that the CM-Handle has been synced successfully. + **READY** indicates that the CM Handle has been synced successfully. - **LOCKED** indicates that the CM-Handle has not synced successfully. A retry mechanism within CPS will set the state back to ADVISED after a set time. + **LOCKED** indicates that the CM Handle has not synced successfully. A retry mechanism within CPS will set the state back to ADVISED after a set time. - **DELETING** indicates that the CM-Handle is currently being deleted. + **DELETING** indicates that the CM Handle is currently being deleted. - **DELETED** indicates that the CM-Handle has been deleted successfully. + **DELETED** indicates that the CM Handle has been deleted successfully. - - **Data-sync state** is the state of the data synchronization process of the CM-Handle + - **Data-sync state** is the state of the data synchronization process of the CM Handle There are 3 possibles states: NONE_REQUESTED, UNSYNCHRONIZED, SYNCHRONIZED **NONE_REQUESTED** indicates that the data sync is not requested by the user - **UNSYNCHRONIZED** indicates the cm-handle is waiting for the data sync watchdog operation to carry out the sync process + **UNSYNCHRONIZED** indicates the CM Handle is waiting for the data sync watchdog operation to carry out the sync process **SYNCHRONIZED** indicates the watchdog process has finished the data synchronization successfully diff --git a/docs/ncmp-async-events.rst b/docs/ncmp-async-events.rst index 49bf57085b..4b1284591f 100644 --- a/docs/ncmp-async-events.rst +++ b/docs/ncmp-async-events.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2023 Nordix Foundation +.. Copyright (C) 2023-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _asyncEvents: @@ -17,7 +17,7 @@ Introduction Async events are triggered when a valid topic has been detected in a passthrough operation. -:download:`NCMP request response event schema <schemas/ncmp-async-request-response-event-schema-v1.json>` +:download:`NCMP request response event schema <schemas/ncmp/async-m2m/ncmp-async-request-response-event-schema-v1.json>` Event header ^^^^^^^^^^^^ diff --git a/docs/ncmp-data-operation.rst b/docs/ncmp-data-operation.rst index 10c3bfaca5..6999a58f79 100644 --- a/docs/ncmp-data-operation.rst +++ b/docs/ncmp-data-operation.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2023-2024 Nordix Foundation +.. Copyright (C) 2023-2025 Nordix Foundation .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _cmHandleDataOperation: @@ -15,7 +15,7 @@ Data Operations Endpoint Introduction ============ -For all data operations on cm handle(s), we have a post endpoint: +For all data operations on CM Handle(s), we have a post endpoint: - /ncmp/v1/data?topic={client-topic-name} forward request to it's dmi plugin service. @@ -50,7 +50,7 @@ This endpoint executes data operation for given array of operations: | | | implementation. For ONAP DMI Plugin it will be RESTConf paths but it can| | | | really be anything. | +--------------------------+-------------+-------------------------------------------------------------------------+ - | targetIds | Yes | List of cm handle references | + | targetIds | Yes | List of CM Handle references | +--------------------------+-------------+-------------------------------------------------------------------------+ The status codes used in the events resulting from these operations are defined here: @@ -109,15 +109,13 @@ DMI Service 1 (POST): `http://{dmi-host-name}:{dmi-port}/dmi/v1/data?topic=my-to { "id": "ec2e9495679a43c58659c07d87025e72", "cmHandleProperties": { - "id": "123", - "attributes":{"userLabel":"test"} + "neType": "RadioNode" } }, { "id": "0df4d39af6514d99b816758148389cfd", "cmHandleProperties": { - "id": "123", - "attributes":{"userLabel":"test"} + "neType": "RadioNode" } } ] @@ -139,15 +137,13 @@ DMI Service 2 (POST) : `http://{dmi-host-name}:{dmi-port}/dmi/v1/data?topic=my-t { "id": "836bb62201f34a7aa056a47bd95a81ed", "cmHandleProperties": { - "id": "123", - "attributes":{"userLabel":"test"} + "neType": "RadioNode" } }, { "id": "202acb75b4a54e43bb1ff8c0c17a8e08", "cmHandleProperties": { - "id": "123", - "attributes":{"userLabel":"test"} + "neType": "RadioNode" } } ] @@ -156,4 +152,4 @@ DMI Service 2 (POST) : `http://{dmi-host-name}:{dmi-port}/dmi/v1/data?topic=my-t Above examples are for illustration purposes only. Please refer to link below for latest schema. -:download:`Data operation event schema <schemas/data-operation-event-schema-1.0.0.json>`
\ No newline at end of file +:download:`Data operation event schema <schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json>` diff --git a/docs/overview.rst b/docs/overview.rst index 19ab8b4847..bc99214e2e 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -1,6 +1,8 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2021 Pantheon.tech, Nordix Foundation +.. Copyright (C) 2021 Pantheon.tech +.. Modifications Copyright (C) 2021-2025 Nordix Foundation + .. _overview: CPS Overview @@ -45,7 +47,7 @@ even though CPS-Core could be deployed without the NCMP extension. NCMP-DMI-Plugin --------------- -The Data-Model-Inventory (DMI) Plugin is a rest interface used to synchronize CM-Handles data between CPS and DMI through the DMI-Plugin. +The Data-Model-Inventory (DMI) Plugin is a rest interface used to synchronize CM Handles data between CPS and DMI through the DMI-Plugin. This is built previously from the CPS-NF-Proxy component. CPS Project diff --git a/docs/policy-executor.rst b/docs/policy-executor.rst index b934a579b1..d7a0c7bf53 100644 --- a/docs/policy-executor.rst +++ b/docs/policy-executor.rst @@ -1,11 +1,10 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2024 Nordix Foundation +.. Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. -.. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING +.. DO NOT CHANGE THIS LABEL - EVEN THOUGH IT GIVES A WARNING .. _policy_executor: - Policy Executor ############### @@ -15,7 +14,16 @@ Policy Executor Introduction ============ -Work In Progress: This feature is not yet completed and does not affect current NCMP functionality. +The Policy Executor feature can be used to connect an external system to make decisions on CM write operation. +When the feature is enabled, NCMP will first call the configured external system and depending on the response, return an error or continue. +The details of the interface can be found in the ':ref:`policy_executor_consumed_apis`' section. + +This feature is available on 'legacy data interface' for operation on a single cm handle: "/v1/ch/{cm-handle}/data/ds/{datastore-name}" and only applies to "ncmp-datastore:passthrough-running". + +By default, the feature is not enabled. This is controlled by 'config.additional.ncmp.policy-executor.enabled' and other deployment parameters in the same group to enable it. See :ref:`configuration-properties` + +.. DO NOT CHANGE THIS LABEL - EVEN THOUGH IT GIVES A WARNING +.. _policy_executor_consumed_apis: Consumed APIs ------------- diff --git a/docs/release-notes.rst b/docs/release-notes.rst index 9c825e4d35..2499faf4a2 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -1,6 +1,6 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright (C) 2021-2024 Nordix Foundation +.. Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved. .. DO NOT CHANGE THIS LABEL FOR RELEASE NOTES - EVEN THOUGH IT GIVES A WARNING .. _release_notes: @@ -11,9 +11,79 @@ CPS Release Notes .. contents:: :depth: 2 .. -.. ==================== +.. ===================== .. * * * PARIS * * * -.. ==================== +.. ===================== + +Version: 3.6.3 +============== + +Release Data +------------ + ++--------------------------------------+--------------------------------------------------------+ +| **CPS Project** | | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Docker images** | onap/cps-and-ncmp:3.6.3 | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release designation** | 3.6.3 Paris | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release date** | Not yet released | +| | | ++--------------------------------------+--------------------------------------------------------+ + +Version: 3.6.2 +============== + +Release Data +------------ + ++--------------------------------------+--------------------------------------------------------+ +| **CPS Project** | | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Docker images** | onap/cps-and-ncmp:3.6.2 | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release designation** | 3.6.2 Paris | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release date** | 2025 April 16 | +| | | ++--------------------------------------+--------------------------------------------------------+ + +Features +-------- + - `CPS-2169 <https://lf-onap.atlassian.net/browse/CPS-2169>`_ Support 50K CM Handles (performance improvements) + - `CPS-2416 <https://lf-onap.atlassian.net/browse/CPS-2416>`_ Implement XPath Attribute Axis in CPS + - `CPS-2725 <https://lf-onap.atlassian.net/browse/CPS-2725>`_ Uplift Spring Boot to 3.4.4 version + +Version: 3.6.1 +============== + +Release Data +------------ + ++--------------------------------------+--------------------------------------------------------+ +| **CPS Project** | | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Docker images** | onap/cps-and-ncmp:3.6.1 | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release designation** | 3.6.1 Paris | +| | | ++--------------------------------------+--------------------------------------------------------+ +| **Release date** | 2025 March 03 | +| | | ++--------------------------------------+--------------------------------------------------------+ + +Features +-------- + - Several performance improvements Version: 3.6.0 ============== @@ -31,16 +101,18 @@ Release Data | **Release designation** | 3.6.0 Paris | | | | +--------------------------------------+--------------------------------------------------------+ -| **Release date** | Not yet released | +| **Release date** | 2025 January 29 | | | | +--------------------------------------+--------------------------------------------------------+ Bug Fixes --------- + - `CPS-2563 <https://lf-onap.atlassian.net/browse/CPS-2563>`_ Fix for internal error code during duplicated registration. + - `CPS-2576 <https://lf-onap.atlassian.net/browse/CPS-2576>`_ Fix for cm handle stuck in LOCKED state during registration. Features -------- - + - `CPS-2249 <https://lf-onap.atlassian.net/browse/CPS-2249>`_ NCMP to support Conflict Handling. .. ==================== .. * * * OSLO * * * @@ -68,21 +140,19 @@ Release Data Bug Fixes --------- -3.5.5 - `CPS-2509 <https://lf-onap.atlassian.net/browse/CPS-2509>`_ Fix module endpoints using alternate identifier. - `CPS-2517 <https://lf-onap.atlassian.net/browse/CPS-2517>`_ Make Content-Type header default to JSON for CPS APIs. - - `CPS-2530 <https://lf-onap.atlassian.net/browse/CPS-2530>`_ NCMP Modules API giving empty response on READY cm handles if two sub systems discovered in parallel. + - `CPS-2530 <https://lf-onap.atlassian.net/browse/CPS-2530>`_ NCMP Modules API giving empty response on READY CM Handles if two sub systems discovered in parallel. Features -------- -3.5.5 - `CPS-2009 <https://lf-onap.atlassian.net/browse/CPS-2009>`_ Update legacy NCMP APIs interfaces to support alternate id. - `CPS-2082 <https://lf-onap.atlassian.net/browse/CPS-2082>`_ Support XML content type to data node APIs in cps-core. - `CPS-2433 <https://lf-onap.atlassian.net/browse/CPS-2433>`_ Remove traces of unmaintained CPS-TBDMT repository. - `CPS-2436 <https://lf-onap.atlassian.net/browse/CPS-2436>`_ CM Avc Event to publish source key to target key while forwarding. - `CPS-2445 <https://lf-onap.atlassian.net/browse/CPS-2445>`_ Expose CPS and NCMP version information using git plugin. - `CPS-2451 <https://lf-onap.atlassian.net/browse/CPS-2451>`_ Removing oparent from CPS-NCMP and ONAP DMI Plugin repository. - - `CPS-2478 <https://lf-onap.atlassian.net/browse/CPS-2478>`_ Optimized Cm Handle Registration and De-Registration use case. + - `CPS-2478 <https://lf-onap.atlassian.net/browse/CPS-2478>`_ Optimized CM Handle Registration and De-Registration use case. - `CPS-2507 <https://lf-onap.atlassian.net/browse/CPS-2507>`_ Upgrade liquibase to 4.30.0 version. Performance @@ -111,12 +181,10 @@ Release Data Bug Fixes --------- -3.5.4 - `CPS-2403 <https://lf-onap.atlassian.net/browse/CPS-2403>`_ Improve lock handling and queue management during CM-handle Module Sync. Features -------- -3.5.4 - `CPS-2408 <https://lf-onap.atlassian.net/browse/CPS-2408>`_ One Hazelcast instance per JVM to manage the distributed data structures. Version: 3.5.3 @@ -141,7 +209,6 @@ Release Data Bug Fixes --------- -3.5.3 - `CPS-2353 <https://lf-onap.atlassian.net/browse/CPS-2353>`_ Slow cmHandle registration when we use moduleSetTag, alternateId and dataProducerIdentifier - `CPS-2395 <https://lf-onap.atlassian.net/browse/CPS-2395>`_ Retry mechanism (with back off algorithm) is removed with more frequent watchdog poll - `CPS-2409 <https://lf-onap.atlassian.net/browse/CPS-2409>`_ Return NONE for get effective trust level api if the trust level caches empty (restart case) @@ -150,9 +217,6 @@ Bug Fixes Features -------- -3.5.3 - - `CPS-2247 <https://lf-onap.atlassian.net/browse/CPS-2247>`_ Policy Executor: Invoke Policy Executor and handle 'deny' response - - `CPS-2412 <https://lf-onap.atlassian.net/browse/CPS-2412>`_ Policy Executor: handle errors - `CPS-2417 <https://lf-onap.atlassian.net/browse/CPS-2417>`_ Remove Hazelcast cache for prefix resolver @@ -178,14 +242,12 @@ Release Data Bug Fixes --------- -3.5.2 - `CPS-2306 <https://lf-onap.atlassian.net/browse/CPS-2306>`_ Update response message for data validation failure and make it consistent across APIs - `CPS-2319 <https://lf-onap.atlassian.net/browse/CPS-2319>`_ Fix "Create a node" and "Add List Elements" APIs response code - `CPS-2372 <https://lf-onap.atlassian.net/browse/CPS-2372>`_ Blank alternate ID overwrites existing one Features -------- -3.5.2 - `CPS-1812 <https://lf-onap.atlassian.net/browse/CPS-1812>`_ CM Data Subscriptions ( Create, Delete and Merging ) with positive scenarios - `CPS-2326 <https://lf-onap.atlassian.net/browse/CPS-2326>`_ Uplift liquibase-core dependency to 4.28.0 - `CPS-2353 <https://lf-onap.atlassian.net/browse/CPS-2353>`_ Improve registration performance with moduleSetTag @@ -213,14 +275,12 @@ Release Data Bug Fixes --------- -3.5.1 - `CPS-2302 <https://lf-onap.atlassian.net/browse/CPS-2302>`_ Fix handling of special characters in moduleSetTag. Features -------- -3.5.1 - `CPS-2121 <https://lf-onap.atlassian.net/browse/CPS-2121>`_ Enabled http client prometheus metrics and manage high cardinality using URL template. - - `CPS-2289 <https://lf-onap.atlassian.net/browse/CPS-2289>`_ Support for CPS Path Query in NCMP Inventory Cm Handle Search. + - `CPS-2289 <https://lf-onap.atlassian.net/browse/CPS-2289>`_ Support for CPS Path Query in NCMP Inventory CM Handle Search. Version: 3.5.0 ============== @@ -242,13 +302,8 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.5.0 - Features -------- -3.5.0 - `CPS-989 <https://lf-onap.atlassian.net/browse/CPS-989>`_ Replace RestTemplate with WebClient. - `CPS-2172 <https://lf-onap.atlassian.net/browse/CPS-2172>`_ Support for OpenTelemetry Tracing. @@ -278,12 +333,10 @@ Release Data Bug Fixes --------- -3.4.9 - `CPS-2211 <https://lf-onap.atlassian.net/browse/CPS-2211>`_ Toggle switch to disable CPS Core change events if not used by application. Set CPS_CHANGE_EVENT_NOTIFICATIONS_ENABLED environment variable for the same. Features -------- -3.4.9 - `CPS-1836 <https://lf-onap.atlassian.net/browse/CPS-1836>`_ Delta between anchor and JSON payload. Version: 3.4.8 @@ -308,15 +361,11 @@ Release Data Bug Fixes --------- -3.4.8 - `CPS-2186 <https://lf-onap.atlassian.net/browse/CPS-2186>`_ Report async task failures to client topic during data operations request - `CPS-2190 <https://lf-onap.atlassian.net/browse/CPS-2190>`_ Improve performance of NCMP module searches - `CPS-2194 <https://lf-onap.atlassian.net/browse/CPS-2194>`_ Added defaults for CPS and DMI username and password - `CPS-2204 <https://lf-onap.atlassian.net/browse/CPS-2204>`_ Added error handling for yang module upgrade operation -Features --------- - Version: 3.4.7 ============== @@ -339,12 +388,10 @@ Release Data Bug Fixes --------- -3.4.7 - `CPS-2150 <https://lf-onap.atlassian.net/browse/CPS-2150>`_ Fix for Async task execution failed by TimeoutException. Features -------- -3.4.7 - `CPS-2061 <https://lf-onap.atlassian.net/browse/CPS-2061>`_ Liquibase Steps Condensing and Cleanup. - `CPS-2101 <https://lf-onap.atlassian.net/browse/CPS-2101>`_ Uplift Spring Boot to 3.2.4 version. @@ -370,7 +417,6 @@ Release Data Bug Fixes --------- -3.4.6 - `CPS-2126 <https://lf-onap.atlassian.net/browse/CPS-2126>`_ Passing HTTP Authorization Bearer Token to DMI Plugins. @@ -406,10 +452,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.4.5 - Features -------- @@ -438,7 +480,6 @@ Release Data Bug Fixes --------- -3.4.4 - `CPS-2027 <https://lf-onap.atlassian.net/browse/CPS-2027>`_ Upgrade Yang modules using module set tag functionalities fix Features @@ -469,7 +510,6 @@ Release Data Bug Fixes --------- -3.4.3 - `CPS-2000 <https://lf-onap.atlassian.net/browse/CPS-2000>`_ Fix for Schema object cache not being distributed. - `CPS-2027 <https://lf-onap.atlassian.net/browse/CPS-2027>`_ Fixes for upgrade yang modules using module set tag. - `CPS-2070 <https://lf-onap.atlassian.net/browse/CPS-2070>`_ Add retry interval for Kafka consumer. @@ -506,11 +546,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.4.2 - - Features -------- - `CPS-1638 <https://lf-onap.atlassian.net/browse/CPS-1638>`_ Introduce trust level for CM handle. @@ -530,7 +565,7 @@ Known Limitations, Issues and Workarounds For upgrading, CPS uses Liquibase for database upgrades. In order to enable Hibernate write batching (`CPS-1795 <https://lf-onap.atlassian.net/browse/CPS-1795>`_), a change to the database entity ID generation is required. As such, *this release does not fully support In-Service Software Upgrade* - CPS will not store new DataNodes and -NCMP will not register new CM-handles during an upgrade with old and new versions of CPS running concurrently. +NCMP will not register new CM Handles during an upgrade with old and new versions of CPS running concurrently. Other operations (read, update, delete) are not impacted. @@ -556,7 +591,6 @@ Release Data Bug Fixes --------- -3.4.1 - `CPS-1979 <https://lf-onap.atlassian.net/browse/CPS-1979>`_ Bug fix for Invalid topic name suffix. Features @@ -594,7 +628,6 @@ Release Data Bug Fixes --------- -3.4.0 - `CPS-1956 <https://lf-onap.atlassian.net/browse/CPS-1956>`_ Bug fix for No yang resources stored during cmhandle discovery. .. ======================== @@ -623,13 +656,9 @@ Release Data Bug Fixes --------- -3.3.9 - `CPS-1923 <https://lf-onap.atlassian.net/browse/CPS-1923>`_ CPS and NCMP changed management endpoint and port from /manage to /actuator and port same as cps application port. - `CPS-1933 <https://lf-onap.atlassian.net/browse/CPS-1933>`_ Setting up the class loader explicitly in hazelcast config. -Features --------- - Version: 3.3.8 ============== @@ -650,10 +679,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.8 - Features -------- - `CPS-1888 <https://lf-onap.atlassian.net/browse/CPS-1888>`_ Uplift Spring Boot to 3.1.2. @@ -680,7 +705,6 @@ Release Data Bug Fixes --------- -3.3.7 - `CPS-1866 <https://lf-onap.atlassian.net/browse/CPS-1866>`_ Fix ClassDefNotFoundError in opendaylight Yang parser Features @@ -713,7 +737,6 @@ Release Data Bug Fixes --------- -3.3.6 - `CPS-1841 <https://lf-onap.atlassian.net/browse/CPS-1841>`_ Update of top-level data node fails with exception - `CPS-1842 <https://lf-onap.atlassian.net/browse/CPS-1842>`_ Replace event-id with correlation-id for data read operation cloud event @@ -743,10 +766,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.5 - Features -------- - `CPS-1760 <https://lf-onap.atlassian.net/browse/CPS-1760>`_ Improve handling of special characters in Cps Paths @@ -771,10 +790,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.4 - Features -------- - `CPS-1767 <https://lf-onap.atlassian.net/browse/CPS-1767>`_ Upgrade CPS to java 17 @@ -799,13 +814,9 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.3 - Features -------- - - `CPS-1515 <https://lf-onap.atlassian.net/browse/CPS-1515>`_ Support Multiple CM-Handles for NCMP Get Operation + - `CPS-1515 <https://lf-onap.atlassian.net/browse/CPS-1515>`_ Support Multiple CM Handles for NCMP Get Operation - `CPS-1675 <https://lf-onap.atlassian.net/browse/CPS-1675>`_ Persistence write performance improvement(s) - `CPS-1745 <https://lf-onap.atlassian.net/browse/CPS-1745>`_ Upgrade to Openapi 3.0.3 @@ -831,7 +842,6 @@ Release Data Bug Fixes --------- -3.3.2 - `CPS-1716 <https://lf-onap.atlassian.net/browse/CPS-1716>`_ NCMP: Java Heap OutOfMemory errors and slow registration in case of 20k cmhandles Features @@ -862,11 +872,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.1 - - None - Features -------- - `CPS-1272 <https://lf-onap.atlassian.net/browse/CPS-1272>`_ Add Contains operation to CPS Path @@ -895,11 +900,6 @@ Release Data | | | +--------------------------------------+--------------------------------------------------------+ -Bug Fixes ---------- -3.3.0 - - None - Features -------- - `CPS-1215 <https://lf-onap.atlassian.net/browse/CPS-1215>`_ Add OR operation for CPS Path @@ -931,7 +931,6 @@ Release Data Bug Fixes --------- -3.2.6 - `CPS-1526 <https://lf-onap.atlassian.net/browse/CPS-1526>`_ Fix response message for PATCH operation - `CPS-1563 <https://lf-onap.atlassian.net/browse/CPS-1563>`_ Fix 500 response error on id-searches with empty parameters @@ -961,13 +960,8 @@ Release Data Bug Fixes --------- -3.2.5 - `CPS-1537 <https://lf-onap.atlassian.net/browse/CPS-1537>`_ Introduce control switch for model loader functionality. -Features --------- - - None - Version: 3.2.4 ============== @@ -1265,7 +1259,7 @@ Features - `CPS-869 <https://lf-onap.atlassian.net/browse/CPS-869>`_ Apply Standardized logging fields to adhere to ONAP Best practice REQ-1072 - `CPS-870 <https://lf-onap.atlassian.net/browse/CPS-870>`_ Align CPS-Core output with SDN-C output (add module name) - `CPS-875 <https://lf-onap.atlassian.net/browse/CPS-875>`_ CM Handle State: Watchdog-process that syncs 'ADVISED' CM Handles - - `CPS-877 <https://lf-onap.atlassian.net/browse/CPS-877>`_ CM Handle State: Exclude any CM-Handles from queries/operations that are not in state 'READY' + - `CPS-877 <https://lf-onap.atlassian.net/browse/CPS-877>`_ CM Handle State: Exclude any CM Handles from queries/operations that are not in state 'READY' - `CPS-899 <https://lf-onap.atlassian.net/browse/CPS-899>`_ Start and stop sessions on Java API - `CPS-909 <https://lf-onap.atlassian.net/browse/CPS-909>`_ Separate NCMP endpoint for ch/{cm-handle}/properties and ch/{cm-handle}/state - `CPS-917 <https://lf-onap.atlassian.net/browse/CPS-917>`_ Structured Errors response for passthrough use-cases in NCMP @@ -1278,7 +1272,7 @@ Features - `CPS-1099 <https://lf-onap.atlassian.net/browse/CPS-1099>`_ Expose simplified 'external' lock reason enum state over REST interface - `CPS-1101 <https://lf-onap.atlassian.net/browse/CPS-1101>`_ Introducing the DELETING and DELETED Cmhandle State - `CPS-1102 <https://lf-onap.atlassian.net/browse/CPS-1102>`_ Register the Cmhandle Sends Advised State notification. - - `CPS-1133 <https://lf-onap.atlassian.net/browse/CPS-1133>`_ Enable/Disable Data Sync for Cm Handle + - `CPS-1133 <https://lf-onap.atlassian.net/browse/CPS-1133>`_ Enable/Disable Data Sync for CM Handle - `CPS-1136 <https://lf-onap.atlassian.net/browse/CPS-1136>`_ DMI Audit Support (get all CM Handles for a registered DMI) @@ -1379,27 +1373,27 @@ Features - `CPS-559 <https://lf-onap.atlassian.net/browse/CPS-559>`_ Define response objects (schemas) in cps-ncmp - `CPS-636 <https://lf-onap.atlassian.net/browse/CPS-636>`_ Update operation for datastore pass through running - `CPS-638 <https://lf-onap.atlassian.net/browse/CPS-638>`_ Delete operation for datastore pass through running - - `CPS-677 <https://lf-onap.atlassian.net/browse/CPS-677>`_ Support 'public' Cm Handle Properties - - `CPS-741 <https://lf-onap.atlassian.net/browse/CPS-741>`_ Re sync after removing cm handles + - `CPS-677 <https://lf-onap.atlassian.net/browse/CPS-677>`_ Support 'public' CM Handle Properties + - `CPS-741 <https://lf-onap.atlassian.net/browse/CPS-741>`_ Re sync after removing CM Handles - `CPS-777 <https://lf-onap.atlassian.net/browse/CPS-777>`_ Ensure all DMI operations use POST method - `CPS-780 <https://lf-onap.atlassian.net/browse/CPS-780>`_ Add examples for parameters, request and response in openapi yaml for cps-core - `CPS-789 <https://lf-onap.atlassian.net/browse/CPS-789>`_ CPS Data Updated Event Schema V2 to support delete operation - `CPS-791 <https://lf-onap.atlassian.net/browse/CPS-791>`_ CPS-Core sends delete notification event - - `CPS-817 <https://lf-onap.atlassian.net/browse/CPS-817>`_ Create Endpoint For Get Cm Handles (incl. public properties) By Name + - `CPS-817 <https://lf-onap.atlassian.net/browse/CPS-817>`_ Create Endpoint For Get CM Handles (incl. public properties) By Name - `CPS-837 <https://lf-onap.atlassian.net/browse/CPS-837>`_ Add Remove and Update properties (DMI and Public) as part of CM Handle Registration update Bug Fixes --------- - - `CPS-762 <https://lf-onap.atlassian.net/browse/CPS-762>`_ Query cm handles for module names returns incorrect cm handle identifiers + - `CPS-762 <https://lf-onap.atlassian.net/browse/CPS-762>`_ Query CM Handles for module names returns incorrect CM Handle identifiers - `CPS-788 <https://lf-onap.atlassian.net/browse/CPS-788>`_ Yang Resource formatting is incorrect - - `CPS-783 <https://lf-onap.atlassian.net/browse/CPS-783>`_ Remove cm handle does not completely remove all cm handle information + - `CPS-783 <https://lf-onap.atlassian.net/browse/CPS-783>`_ Remove CM Handle does not completely remove all CM Handle information - `CPS-841 <https://lf-onap.atlassian.net/browse/CPS-841>`_ Upgrade log4j to 2.17.1 as recommended by ONAP SECCOM - `CPS-856 <https://lf-onap.atlassian.net/browse/CPS-856>`_ Retry mechanism not working for concurrent CmHandle registration - `CPS-867 <https://lf-onap.atlassian.net/browse/CPS-867>`_ Database port made configurable through env variable DB_PORT - `CPS-886 <https://lf-onap.atlassian.net/browse/CPS-886>`_ Fragment handling decreasing performance for large number of cmHandles - `CPS-887 <https://lf-onap.atlassian.net/browse/CPS-887>`_ Increase performance of cmHandle registration for large number of schema sets in DB - - `CPS-892 <https://lf-onap.atlassian.net/browse/CPS-892>`_ Fixed the response code during CM-Handle Registration from 201 CREATED to 204 NO_CONTENT + - `CPS-892 <https://lf-onap.atlassian.net/browse/CPS-892>`_ Fixed the response code during CM Handle Registration from 201 CREATED to 204 NO_CONTENT - `CPS-893 <https://lf-onap.atlassian.net/browse/CPS-893>`_ NCMP Java API depends on NCMP-Rest-API (cyclic) through json properties on Java API Known Limitations, Issues and Workarounds @@ -1407,9 +1401,9 @@ Known Limitations, Issues and Workarounds *System Limitations* -Null can no longer be passed within the dmi plugin service names when registering a cm handle, as part of +Null can no longer be passed within the dmi plugin service names when registering a CM Handle, as part of `CPS-837 <https://lf-onap.atlassian.net/browse/CPS-837>`_ null is now used to indicate if a property should be removed as part -of cm handle registration. +of CM Handle registration. The Absolute path to list with integer key will not work. Please refer `CPS-961 <https://lf-onap.atlassian.net/browse/CPS-961>`_ for more information. diff --git a/docs/schemas/dmidataavc/avc-event-schema-1.0.0.json b/docs/schemas/dmi/cm-events/avc-event-schema-1.0.0.json index 474520d142..474520d142 100644 --- a/docs/schemas/dmidataavc/avc-event-schema-1.0.0.json +++ b/docs/schemas/dmi/cm-events/avc-event-schema-1.0.0.json diff --git a/docs/schemas/lcm/lcm-event-header-v1.json b/docs/schemas/lcm/lcm-event-header-v1.json new file mode 100644 index 0000000000..8c9922ef7e --- /dev/null +++ b/docs/schemas/lcm/lcm-event-header-v1.json @@ -0,0 +1,56 @@ +{ + + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "urn:cps:org.onap.ncmp.cmhandle.lcm-event-header:v1", + "$ref": "#/definitions/LcmEventHeader", + + "definitions": { + "LcmEventHeader": { + "description": "The header for LCM event", + "type": "object", + "javaType" : "org.onap.cps.ncmp.events.lcm.v1.LcmEventHeader", + "properties": { + "eventId": { + "description": "The unique id identifying the event", + "type": "string" + }, + "eventCorrelationId": { + "description": "The id identifying the event", + "type": "string" + }, + "eventTime": { + "description": "The timestamp when original event occurred", + "type": "string" + }, + "eventSource": { + "description": "The source of the event", + "type": "string" + }, + "eventType": { + "description": "The type of the event", + "type": "string" + }, + "eventSchema": { + "description": "The schema that this event adheres to", + "type": "string" + }, + "eventSchemaVersion": { + "description": "The version of the schema that this event adheres to", + "type": "string" + } + }, + "required": [ + "eventId", + "eventCorrelationId", + "eventTime", + "eventSource", + "eventType", + "eventSchema", + "eventSchemaVersion", + "event" + ], + "additionalProperties": false + } + + } +} diff --git a/docs/schemas/lcm-event-schema-v1.json b/docs/schemas/lcm/lcm-event-schema-v1.json index 97c0fbee22..bd0d90d04a 100644 --- a/docs/schemas/lcm-event-schema-v1.json +++ b/docs/schemas/lcm/lcm-event-schema-v1.json @@ -39,6 +39,18 @@ "description": "cmHandle id", "type": "string" }, + "alternateId": { + "description": "alternative id for cmHandle (e.g. 3GPP FDN)", + "type": "string" + }, + "moduleSetTag": { + "description": "module set tag for cmHandle", + "type": "string" + }, + "dataProducerIdentifier": { + "description": "data producer identifier for cmHandle", + "type": "string" + }, "oldValues": { "$ref": "#/definitions/Values" }, @@ -55,7 +67,7 @@ "LcmEvent": { "description": "The payload for LCM event", "type": "object", - "javaType" : "org.onap.ncmp.cmhandle.event.lcm.LcmEvent", + "javaType" : "org.onap.cps.ncmp.events.lcm.v1.LcmEvent", "properties": { "eventId": { "description": "The unique id identifying the event", diff --git a/docs/schemas/data-operation-event-schema-1.0.0.json b/docs/schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json index c2915187c7..c2915187c7 100644 --- a/docs/schemas/data-operation-event-schema-1.0.0.json +++ b/docs/schemas/ncmp/async-m2m/data-operation-event-schema-1.0.0.json diff --git a/cps-ncmp-events/src/main/resources/schemas/ncmp-async-request-response-event-schema-v1.json b/docs/schemas/ncmp/async-m2m/ncmp-async-request-response-event-schema-v1.json index 51c2cf4d40..32b7becd05 100644 --- a/cps-ncmp-events/src/main/resources/schemas/ncmp-async-request-response-event-schema-v1.json +++ b/docs/schemas/ncmp/async-m2m/ncmp-async-request-response-event-schema-v1.json @@ -6,6 +6,7 @@ "NcmpAsyncRequestResponseEvent": { "description": "The payload for CPS async request response event.", "type": "object", + "javaType" : "org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent", "properties": { "eventId": { "description": "The unique id identifying the event generated by DMI.", diff --git a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/ncmp-in-event-schema-1.0.0.json b/docs/schemas/ncmp/avc/ncmp-in-event-schema-1.0.0.json index f8b6c2e680..d4e8519956 100644 --- a/cps-ncmp-events/src/main/resources/schemas/cmnotificationsubscription/ncmp-in-event-schema-1.0.0.json +++ b/docs/schemas/ncmp/avc/ncmp-in-event-schema-1.0.0.json @@ -1,5 +1,5 @@ { - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-ncmp-in-event:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/NcmpInEvent", "$schema": "https://json-schema.org/draft/2019-09/schema", "definitions": { diff --git a/docs/schemas/ncmp-out-event-schema-1.0.0.json b/docs/schemas/ncmp/avc/ncmp-out-event-schema-1.0.0.json index d6ef55d063..f1dabc17a3 100644 --- a/docs/schemas/ncmp-out-event-schema-1.0.0.json +++ b/docs/schemas/ncmp/avc/ncmp-out-event-schema-1.0.0.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "urn:cps:org.onap.cps.ncmp.events:cm-notification-subscription-ncmp-out-event-schema:1.0.0", + "$id": "urn:cps:org.onap.ncmp.events.subscription:1.0.0", "$ref": "#/definitions/NcmpOutEvent", "definitions": { "NcmpOutEvent": { @@ -28,25 +28,19 @@ "description": "The unique subscription id" }, "acceptedTargets": { - "type": "array", - "description": "List of accepted targets", - "items": { - "type": "string" - } + "type": "object", + "existingJavaType": "java.util.Collection<String>", + "description": "Collection of accepted targets" }, "rejectedTargets": { - "type": "array", - "description": "List of rejected targets", - "items": { - "type": "string" - } + "type": "object", + "existingJavaType": "java.util.Collection<String>", + "description": "Collection of rejected targets" }, "pendingTargets": { - "type": "array", - "description": "List of pending targets", - "items": { - "type": "string" - } + "type": "object", + "existingJavaType": "java.util.Collection<String>", + "description": "Collection of pending targets" } }, "required": [ diff --git a/docs/test_ScrapeMetrics.py b/docs/test_ScrapeMetrics.py new file mode 100644 index 0000000000..cbcc92d2a1 --- /dev/null +++ b/docs/test_ScrapeMetrics.py @@ -0,0 +1,104 @@ +# ============LICENSE_START======================================================= +# Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= + +import unittest +import os +import tempfile +import time +from ScrapeMetrics import ( + scrape_all_metrics_from_file +) + +class TestScrapeMetrics(unittest.TestCase): + + def setUp(self): + """Set up temporary directory and files for testing.""" + self.temp_dir = tempfile.TemporaryDirectory() + self.test_root = self.temp_dir.name + + def tearDown(self): + """Clean up temporary directory and files.""" + self.temp_dir.cleanup() + + def _create_java_file(self, relative_path, content): + """Helper function to create a test .java file.""" + file_path = os.path.join(self.test_root, relative_path) + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'w') as f: + f.write(content) + return file_path + + def test_scrape_metrics_from_file(self): + """Test scraping all metrics from a single Java file.""" + file_content = """ + package com.example; + + @CountCmHandleSearchExecution( + description = "A description does not fit the a single line") + public void myMethod() {} + + @Timed(value="timed", description="A timed metric") + public void anotherMethod() {} + + @TimedCustom(name="custom counter name", description="A custom timed metric") + public void anotherMethod() {} + + @NotTimed + public void notTimedMethod() {} + """ + test_file = self._create_java_file("com/example/MyService.java", file_content) + expected_metrics = [ + '"cm_handle_search_invocation_total","A description does not fit the a single line"', + '"timed","A timed metric"', + '"custom counter name","A custom timed metric"' + ] + result = scrape_all_metrics_from_file(test_file) + self.assertEqual(len(result), 3) + self.assertEqual(result, expected_metrics) + + def test_verify_metrics_file(self): + """Test if metrics.csv was modified less than 1 minute ago and has 56 lines.""" + + # Get the absolute path of the current directory. + current_directory = os.path.dirname(os.path.abspath(__file__)) + + metrics_file = os.path.join(current_directory, "csv/metrics.csv") + + # Check if the file exists + self.assertTrue(os.path.exists(metrics_file), "metrics.csv does not exist.") + + # Check modification time + modification_time_in_seconds = os.path.getmtime(metrics_file) + time_difference_in_seconds = time.time() - modification_time_in_seconds + self.assertLess(time_difference_in_seconds, 60, "metrics.csv was not modified in the last minute.") + + # Check number of lines + with open(metrics_file, 'r') as f: + lines = f.readlines() + + expected_number_of_metrics = 56 + expected_number_of_lines = expected_number_of_metrics + 1 # Header + self.assertEqual(len(lines), expected_number_of_lines, f"metrics.csv does not have {expected_number_of_lines} lines.") + +if __name__ == '__main__': + # Ensure the script's directory is in the Python path for importing + import sys + script_dir = os.path.dirname(os.path.abspath(__file__)) + if script_dir not in sys.path: + sys.path.insert(0, script_dir) + unittest.main()
\ No newline at end of file diff --git a/integration-test/pom.xml b/integration-test/pom.xml index 531d353b0c..b044640580 100644 --- a/integration-test/pom.xml +++ b/integration-test/pom.xml @@ -23,7 +23,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> <modelVersion>4.0.0</modelVersion> diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy index f3cca801e7..b432823158 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy @@ -22,10 +22,13 @@ package org.onap.cps.integration.base import com.hazelcast.map.IMap +import java.time.OffsetDateTime +import java.util.concurrent.BlockingQueue import okhttp3.mockwebserver.MockWebServer import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataService import org.onap.cps.api.CpsDataspaceService +import org.onap.cps.api.CpsDeltaService import org.onap.cps.api.CpsModuleService import org.onap.cps.api.CpsQueryService import org.onap.cps.api.exceptions.DataspaceNotFoundException @@ -44,7 +47,9 @@ import org.onap.cps.ncmp.impl.inventory.sync.ModuleSyncService import org.onap.cps.ncmp.impl.inventory.sync.ModuleSyncWatchdog import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.ri.repository.SchemaSetRepository import org.onap.cps.ri.utils.SessionManager +import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.utils.JsonObjectMapper import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Value @@ -59,10 +64,6 @@ import org.springframework.test.web.servlet.MockMvc import org.testcontainers.spock.Testcontainers import spock.lang.Shared import spock.lang.Specification -import spock.util.concurrent.PollingConditions - -import java.time.OffsetDateTime -import java.util.concurrent.BlockingQueue @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.MOCK, classes = [CpsDataspaceService]) @Testcontainers @@ -93,6 +94,9 @@ abstract class CpsIntegrationSpecBase extends Specification { CpsDataService cpsDataService @Autowired + CpsDeltaService cpsDeltaService + + @Autowired CpsModuleService cpsModuleService @Autowired @@ -102,6 +106,15 @@ abstract class CpsIntegrationSpecBase extends Specification { SessionManager sessionManager @Autowired + CpsModulePersistenceService cpsModulePersistenceService + + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + SchemaSetRepository schemaSetRepository + + @Autowired ParameterizedCmHandleQueryService networkCmProxyCmHandleQueryService @Autowired @@ -153,16 +166,16 @@ abstract class CpsIntegrationSpecBase extends Specification { static NO_ALTERNATE_ID = '' static GENERAL_TEST_DATASPACE = 'generalTestDataspace' static BOOKSTORE_SCHEMA_SET = 'bookstoreSchemaSet' - static MODULE_SYNC_WAIT_TIME_IN_SECONDS = 2 static initialized = false def now = OffsetDateTime.now() + enum ModuleNameStrategy { UNIQUE, OVERLAPPING } + def setup() { if (!initialized) { cpsDataspaceService.createDataspace(GENERAL_TEST_DATASPACE) createStandardBookStoreSchemaSet(GENERAL_TEST_DATASPACE) - cpsAnchorService.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'owner-of-bookstore-schema-set-do-not-delete') initialized = true } mockDmiServer1.setDispatcher(dmiDispatcher1) @@ -182,7 +195,7 @@ abstract class CpsIntegrationSpecBase extends Specification { mockDmiServer1.shutdown() mockDmiServer2.shutdown() mockPolicyServer.shutdown() - cpsModuleService.deleteAllUnusedYangModuleData() + cpsModuleService.deleteAllUnusedYangModuleData('NFP-Operational') } def static readResourceDataFile(filename) { @@ -203,18 +216,18 @@ abstract class CpsIntegrationSpecBase extends Specification { return nodeCount } - def getBookstoreYangResourcesNameToContentMap() { + def getBookstoreyangResourceContentPerName() { def bookstoreModelFileContent = readResourceDataFile('bookstore/bookstore.yang') def bookstoreTypesFileContent = readResourceDataFile('bookstore/bookstore-types.yang') return [bookstore: bookstoreModelFileContent, bookstoreTypes: bookstoreTypesFileContent] } def createStandardBookStoreSchemaSet(targetDataspace) { - cpsModuleService.createSchemaSet(targetDataspace, BOOKSTORE_SCHEMA_SET, getBookstoreYangResourcesNameToContentMap()) + cpsModuleService.createSchemaSet(targetDataspace, BOOKSTORE_SCHEMA_SET, getBookstoreyangResourceContentPerName()) } def createStandardBookStoreSchemaSet(targetDataspace, targetSchemaSet) { - cpsModuleService.createSchemaSet(targetDataspace, targetSchemaSet, getBookstoreYangResourcesNameToContentMap()) + cpsModuleService.createSchemaSet(targetDataspace, targetSchemaSet, getBookstoreyangResourceContentPerName()) } def dataspaceExists(dataspaceName) { @@ -254,22 +267,35 @@ abstract class CpsIntegrationSpecBase extends Specification { def registerCmHandle(dmiPlugin, cmHandleId, moduleSetTag, alternateId) { registerCmHandleWithoutWaitForReady(dmiPlugin, cmHandleId, moduleSetTag, alternateId) moduleSyncWatchdog.moduleSyncAdvisedCmHandles() - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - CmHandleState.READY == networkCmProxyInventoryFacade.getCmHandleCompositeState(cmHandleId).cmHandleState - }) + CmHandleState.READY == networkCmProxyInventoryFacade.getCmHandleCompositeState(cmHandleId).cmHandleState } def registerCmHandleWithoutWaitForReady(dmiPlugin, cmHandleId, moduleSetTag, alternateId) { - def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: cmHandleId, moduleSetTag: moduleSetTag, alternateId: alternateId) + def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: cmHandleId, moduleSetTag: moduleSetTag, alternateId: alternateId, dataProducerIdentifier: 'some data producer id') networkCmProxyInventoryFacade.updateDmiRegistration(new DmiPluginRegistration(dmiPlugin: dmiPlugin, createdCmHandles: [cmHandleToCreate])) } def registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(dmiPlugin, moduleSetTag, numberOfCmHandles, offset) { + registerSequenceOfCmHandles(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, ModuleNameStrategy.UNIQUE, { id -> "alt=${id}" }) + } + + def registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, ModuleNameStrategy moduleNameStrategy) { + registerSequenceOfCmHandles(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, moduleNameStrategy, { id -> "alt=${id}" }) + } + + def registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, ModuleNameStrategy moduleNameStrategy, Closure<String> alternateIdGenerator) { + registerSequenceOfCmHandles(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, moduleNameStrategy, alternateIdGenerator) + } + + def registerSequenceOfCmHandles(dmiPlugin, moduleSetTag, numberOfCmHandles, offset, ModuleNameStrategy moduleNameStrategy, Closure<String> alternateIdGenerator) { def cmHandles = [] def id = offset - def moduleReferences = (1..200).collect { "${moduleSetTag}Module${it}" } + def modulePrefix = moduleNameStrategy.OVERLAPPING.equals(moduleNameStrategy) ? 'same' : moduleSetTag + def moduleReferences = (1..200).collect { "${modulePrefix}Module${it}" } + (1..numberOfCmHandles).each { - def ncmpServiceCmHandle = new NcmpServiceCmHandle(cmHandleId: "ch-${id}", moduleSetTag: moduleSetTag, alternateId: NO_ALTERNATE_ID) + def alternateId = alternateIdGenerator(id) + def ncmpServiceCmHandle = new NcmpServiceCmHandle(cmHandleId: "ch-${id}", moduleSetTag: moduleSetTag, alternateId: alternateId) cmHandles.add(ncmpServiceCmHandle) dmiDispatcher1.moduleNamesPerCmHandleId[ncmpServiceCmHandle.cmHandleId] = moduleReferences dmiDispatcher2.moduleNamesPerCmHandleId[ncmpServiceCmHandle.cmHandleId] = moduleReferences diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/DmiDispatcher.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/DmiDispatcher.groovy index 35a7b6a7c2..556495e1ea 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/base/DmiDispatcher.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/DmiDispatcher.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ package org.onap.cps.integration.base import groovy.json.JsonSlurper +import java.util.regex.Matcher import okhttp3.mockwebserver.Dispatcher import okhttp3.mockwebserver.MockResponse import okhttp3.mockwebserver.RecordedRequest @@ -28,8 +29,6 @@ import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType -import java.util.regex.Matcher - import static org.onap.cps.integration.base.CpsIntegrationSpecBase.readResourceDataFile /** @@ -59,6 +58,7 @@ class DmiDispatcher extends Dispatcher { def jsonSlurper = new JsonSlurper() def moduleNamesPerCmHandleId = [:] def receivedSubJobs = [:] + def receivedDataOperationRequest = [:] def lastAuthHeaderReceived def dmiResourceDataUrl @@ -81,7 +81,7 @@ class DmiDispatcher extends Dispatcher { // get module resources for a CM-handle case ~'^/dmi/v1/ch/(.*)/moduleResources$': def cmHandleId = Matcher.lastMatcher[0][1] - return getModuleResourcesResponse(cmHandleId) + return getModuleResourcesResponse(request, cmHandleId) // pass-through data operation for a CM-handle case ~'^/dmi/v1/ch/(.*)/data/ds/(.*)$': @@ -89,8 +89,9 @@ class DmiDispatcher extends Dispatcher { return mockResponseWithBody(HttpStatus.OK, '{}') // legacy pass-through batch data operation - case ~'^/dmi/v1/data$': - return mockResponseWithBody(HttpStatus.ACCEPTED, '{}') + case ~'^/dmi/v1/data\\?requestId=(.*)&topic=(.*)$': + receivedDataOperationRequest = jsonSlurper.parseText(request.body.readUtf8()) + return mockResponse(HttpStatus.ACCEPTED) // get data job status case ~'^/dmi/v1/cmwriteJob/dataProducer/(.*)/dataProducerJob/(.*)/status$': @@ -111,9 +112,9 @@ class DmiDispatcher extends Dispatcher { def mockWriteJobResponse(request) { def destination = Matcher.lastMatcher[0][1] - def subJobWriteRequest = jsonSlurper.parseText(request.getBody().readUtf8()) + def subJobWriteRequest = jsonSlurper.parseText(request.body.readUtf8()) this.receivedSubJobs.put(destination, subJobWriteRequest) - def response = '{"subJobId":"some sub job id", "dmiServiceName":"some dmi service name", "dataProducerId":"some data producer id"}' + def response = '{"subJobId":"some sub job id"}' return mockResponseWithBody(HttpStatus.OK, response) } @@ -124,8 +125,13 @@ class DmiDispatcher extends Dispatcher { return mockResponseWithBody(HttpStatus.OK, moduleReferences) } - def getModuleResourcesResponse(cmHandleId) { - def moduleResources = '[' + getModuleNamesForCmHandle(cmHandleId).collect { + def getModuleResourcesResponse(request, cmHandleId) { + def moduleResourcesRequest = jsonSlurper.parseText(request.body.readUtf8()) + def requestedModuleNames = moduleResourcesRequest.data.modules.name + def candidateModuleNames = getModuleNamesForCmHandle(cmHandleId) + def moduleNames = candidateModuleNames.stream().filter(candidate -> requestedModuleNames.contains(candidate)).toList() + + def moduleResources = '[' + moduleNames.collect { MODULE_RESOURCES_RESPONSE_TEMPLATE.replaceAll("<MODULE_NAME>", it) }.join(',') + ']' return mockResponseWithBody(HttpStatus.OK, moduleResources) diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy index 2bd5a4a1be..ca321119ea 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy @@ -59,17 +59,17 @@ class AnchorServiceIntegrationSpec extends FunctionalSpecBase { and: '1 anchor with "other" schema set is created' createStandardBookStoreSchemaSet(GENERAL_TEST_DATASPACE, 'otherSchemaSet') objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, 'otherSchemaSet', 'anchor3') - then: 'there are 4 anchors in the general test database' - assert objectUnderTest.getAnchors(GENERAL_TEST_DATASPACE).size() == 4 - and: 'there are 3 anchors associated with bookstore schema set' - assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET).size() == 3 + then: 'there are 3 anchors in the general test database' + assert objectUnderTest.getAnchors(GENERAL_TEST_DATASPACE).size() == 3 + and: 'there are 2 anchors associated with bookstore schema set' + assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET).size() == 2 and: 'there is 1 anchor associated with other schema set' assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, 'otherSchemaSet').size() == 1 } def 'Querying anchor(name)s (depends on previous test!).'() { - expect: 'there are now 4 anchors using the "stores" module (both schema sets use the same modules) ' - assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'bookstore-types']).size() == 4 + expect: 'there are now 3 anchors using the "stores" module (both schema sets use the same modules)' + assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'bookstore-types']).size() == 3 and: 'there are no anchors using both "stores" and a "unused-model"' assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'unused-model']).size() == 0 } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy index 4823d58af9..6ecc3a509c 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy @@ -26,13 +26,11 @@ import org.onap.cps.integration.base.FunctionalSpecBase import org.onap.cps.api.parameters.FetchDescendantsOption import org.onap.cps.api.exceptions.AlreadyDefinedException import org.onap.cps.api.exceptions.AnchorNotFoundException -import org.onap.cps.api.exceptions.CpsAdminException import org.onap.cps.api.exceptions.CpsPathException import org.onap.cps.api.exceptions.DataNodeNotFoundException import org.onap.cps.api.exceptions.DataNodeNotFoundExceptionBatch import org.onap.cps.api.exceptions.DataValidationException import org.onap.cps.api.exceptions.DataspaceNotFoundException -import org.onap.cps.api.model.DeltaReport import org.onap.cps.utils.ContentType import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY @@ -523,191 +521,6 @@ class DataServiceIntegrationSpec extends FunctionalSpecBase { restoreBookstoreDataAnchor(2) } - def 'Get delta between 2 anchors'() { - when: 'attempt to get delta report between anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, '/', OMIT_DESCENDANTS) - and: 'report is ordered based on xpath' - result = result.toList().sort { it.xpath } - then: 'delta report contains expected number of changes' - result.size() == 3 - and: 'delta report contains REPLACE action with expected xpath' - assert result[0].getAction() == 'replace' - assert result[0].getXpath() == '/bookstore' - and: 'delta report contains CREATE action with expected xpath' - assert result[1].getAction() == 'create' - assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" - and: 'delta report contains REMOVE action with expected xpath' - assert result[2].getAction() == 'remove' - assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" - } - - def 'Get delta between 2 anchors returns empty response when #scenario'() { - when: 'attempt to get delta report between anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, targetAnchor, xpath, INCLUDE_ALL_DESCENDANTS) - then: 'delta report is empty' - assert result.isEmpty() - where: 'following data was used' - scenario | targetAnchor | xpath - 'anchors with identical data are queried' | BOOKSTORE_ANCHOR_4 | '/' - 'same anchor name is passed as parameter' | BOOKSTORE_ANCHOR_3 | '/' - 'non existing xpath' | BOOKSTORE_ANCHOR_5 | '/non-existing-xpath' - } - - def 'Get delta between anchors error scenario: #scenario'() { - when: 'attempt to get delta between anchors' - objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchor, targetAnchor, '/some-xpath', INCLUDE_ALL_DESCENDANTS) - then: 'expected exception is thrown' - thrown(expectedException) - where: 'following data was used' - scenario | dataspaceName | sourceAnchor | targetAnchor || expectedException - 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | 'not-relevant' || DataValidationException - 'invalid anchor 1 name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | 'not-relevant' || DataValidationException - 'invalid anchor 2 name' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'invalid anchor' || DataValidationException - 'non-existing dataspace' | 'non-existing' | 'not-relevant1' | 'not-relevant2' || DataspaceNotFoundException - 'non-existing dataspace with same anchor name' | 'non-existing' | 'not-relevant' | 'not-relevant' || DataspaceNotFoundException - 'non-existing anchor 1' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | 'not-relevant' || AnchorNotFoundException - 'non-existing anchor 2' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'non-existing-anchor' || AnchorNotFoundException - } - - def 'Get delta between anchors for remove action, where source data node #scenario'() { - when: 'attempt to get delta between leaves of data nodes present in 2 anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_5, BOOKSTORE_ANCHOR_3, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - then: 'expected action is present in delta report' - assert result.get(0).getAction() == 'remove' - where: 'following data was used' - scenario | parentNodeXpath - 'has leaves and child nodes' | "/bookstore/categories[@code='6']" - 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" - 'has child data node only' | "/bookstore/support-info/contact-emails" - 'is empty' | "/bookstore/container-without-leaves" - } - - def 'Get delta between anchors for "create" action, where target data node #scenario'() { - when: 'attempt to get delta between leaves of data nodes present in 2 anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - then: 'the expected action is present in delta report' - result.get(0).getAction() == 'create' - and: 'the expected xapth is present in delta report' - result.get(0).getXpath() == parentNodeXpath - where: 'following data was used' - scenario | parentNodeXpath - 'has leaves and child nodes' | "/bookstore/categories[@code='6']" - 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" - 'has child data node only' | "/bookstore/support-info/contact-emails" - 'is empty' | "/bookstore/container-without-leaves" - } - - def 'Get delta between anchors when leaves of existing data nodes are updated,: #scenario'() { - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, OMIT_DESCENDANTS) - then: 'expected action is "replace"' - assert result[0].getAction() == 'replace' - and: 'the payload has expected leaf values' - def sourceData = result[0].getSourceData() - def targetData = result[0].getTargetData() - assert sourceData == expectedSourceValue - assert targetData == expectedTargetValue - where: 'following data was used' - scenario | sourceAnchor | targetAnchor | xpath || expectedSourceValue | expectedTargetValue - 'leaf is updated in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || ['bookstore-name': 'Easons-1'] | ['bookstore-name': 'Crossword Bookstores'] - 'leaf is removed in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || [price:1] | null - 'leaf is added in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || null | [price:1] - } - - def 'Get delta between anchors when child data nodes under existing parent data nodes are updated: #scenario'() { - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, DIRECT_CHILDREN_ONLY) - then: 'expected action is "replace"' - assert result[0].getAction() == 'replace' - and: 'the delta report has expected child node xpaths' - def deltaReportEntities = getDeltaReportEntities(result) - def childNodeXpathsInDeltaReport = deltaReportEntities.get('xpaths') - assert childNodeXpathsInDeltaReport.contains(expectedChildNodeXpath) - where: 'following data was used' - scenario | sourceAnchor | targetAnchor | xpath || expectedChildNodeXpath - 'source and target anchors have child data nodes' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore/premises' || '/bookstore/premises/addresses[@house-number=\'2\' and @street=\'Main Street\']' - 'removed child data nodes in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | '/bookstore' || '/bookstore/support-info' - 'added child data nodes in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || '/bookstore/support-info' - } - - def 'Get delta between anchors where source and target data nodes have leaves and child data nodes'() { - given: 'parent node xpath and expected data in delta report' - def parentNodeXpath = "/bookstore/categories[@code='1']" - def expectedSourceDataInParentNode = ['name':'Children'] - def expectedTargetDataInParentNode = ['name':'Kids'] - def expectedSourceDataInChildNode = [['lang' : 'English'],['price':20, 'editions':[1988, 2000]]] - def expectedTargetDataInChildNode = [['lang':'English/German'], ['price':200, 'editions':[1988, 2000, 2023]]] - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - def deltaReportEntities = getDeltaReportEntities(result) - then: 'expected action is "replace"' - assert result[0].getAction() == 'replace' - and: 'the payload has expected parent node xpath' - assert deltaReportEntities.get('xpaths').contains(parentNodeXpath) - and: 'delta report has expected source and target data' - assert deltaReportEntities.get('sourcePayload').contains(expectedSourceDataInParentNode) - assert deltaReportEntities.get('targetPayload').contains(expectedTargetDataInParentNode) - and: 'the delta report also has expected child node xpaths' - assert deltaReportEntities.get('xpaths').containsAll(["/bookstore/categories[@code='1']/books[@title='The Gruffalo']", "/bookstore/categories[@code='1']/books[@title='Matilda']"]) - and: 'the delta report also has expected source and target data of child nodes' - assert deltaReportEntities.get('sourcePayload').containsAll(expectedSourceDataInChildNode) - assert deltaReportEntities.get('targetPayload').containsAll(expectedTargetDataInChildNode) - } - - def 'Get delta between anchor and JSON payload'() { - when: 'attempt to get delta report between anchor and JSON payload' - def jsonPayload = "{\"book-store:bookstore\":{\"bookstore-name\":\"Crossword Bookstores\"},\"book-store:bookstore-address\":{\"address\":\"Bangalore, India\",\"postal-code\":\"560062\",\"bookstore-name\":\"Crossword Bookstores\"}}" - def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, OMIT_DESCENDANTS) - then: 'delta report contains expected number of changes' - result.size() == 3 - and: 'delta report contains "replace" action with expected xpath' - assert result[0].getAction() == 'replace' - assert result[0].getXpath() == '/bookstore' - and: 'delta report contains "remove" action with expected xpath' - assert result[1].getAction() == 'remove' - assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" - and: 'delta report contains "create" action with expected xpath' - assert result[2].getAction() == 'create' - assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" - } - - def 'Get delta between anchor and payload returns empty response when JSON payload is identical to anchor data'() { - when: 'attempt to get delta report between anchor and JSON payload (replacing the string Easons with Easons-1 because the data in JSON file is modified, to append anchor number, during the setup process of the integration tests)' - def jsonPayload = readResourceDataFile('bookstore/bookstoreData.json').replace('Easons', 'Easons-1') - def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) - then: 'delta report is empty' - assert result.isEmpty() - } - - def 'Get delta between anchor and payload error scenario: #scenario'() { - when: 'attempt to get delta between anchor and json payload' - objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchor, xpath, [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) - then: 'expected exception is thrown' - thrown(expectedException) - where: 'following data was used' - scenario | dataspaceName | sourceAnchor | xpath | jsonPayload || expectedException - 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | '/' | '{some-json}' || DataValidationException - 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | '/' | '{some-json}' || DataValidationException - 'non-existing dataspace' | 'non-existing' | 'not-relevant' | '/' | '{some-json}' || DataspaceNotFoundException - 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | '/' | '{some-json}' || AnchorNotFoundException - 'empty json payload with root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/' | '' || DataValidationException - 'empty json payload with non-root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/bookstore' | '' || DataValidationException - } - - def getDeltaReportEntities(List<DeltaReport> deltaReport) { - def xpaths = [] - def action = [] - def sourcePayload = [] - def targetPayload = [] - deltaReport.each { - delta -> xpaths.add(delta.getXpath()) - action.add(delta.getAction()) - sourcePayload.add(delta.getSourceData()) - targetPayload.add(delta.getTargetData()) - } - return ['xpaths':xpaths, 'action':action, 'sourcePayload':sourcePayload, 'targetPayload':targetPayload] - } - def countDataNodesInBookstore() { return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS)) } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy index 178b0227ca..47a332adc9 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -32,8 +32,6 @@ class DataspaceServiceIntegrationSpec extends FunctionalSpecBase { def setup() { objectUnderTest = cpsDataspaceService } - def cleanup() { cpsModuleService.deleteAllUnusedYangModuleData() } - def 'Dataspace CRUD operations.'() { when: 'a dataspace is created' objectUnderTest.createDataspace('newDataspace') diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DeltaServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DeltaServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..691e71427c --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DeltaServiceIntegrationSpec.groovy @@ -0,0 +1,245 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import org.onap.cps.api.CpsDeltaService +import org.onap.cps.api.exceptions.AnchorNotFoundException +import org.onap.cps.api.exceptions.DataValidationException +import org.onap.cps.api.exceptions.DataspaceNotFoundException +import org.onap.cps.api.model.DeltaReport +import org.onap.cps.api.parameters.FetchDescendantsOption +import org.onap.cps.integration.base.FunctionalSpecBase + +class DeltaServiceIntegrationSpec extends FunctionalSpecBase { + CpsDeltaService objectUnderTest + def originalCountBookstoreChildNodes + def originalCountXmlBookstoreChildNodes + def originalCountBookstoreTopLevelListNodes + + static def INCLUDE_ALL_DESCENDANTS = FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS + static def OMIT_DESCENDANTS = FetchDescendantsOption.OMIT_DESCENDANTS + static def DIRECT_CHILDREN_ONLY = FetchDescendantsOption.DIRECT_CHILDREN_ONLY + + def setup() { + objectUnderTest = cpsDeltaService + originalCountBookstoreChildNodes = countDataNodesInBookstore() + originalCountBookstoreTopLevelListNodes = countTopLevelListDataNodesInBookstore() + originalCountXmlBookstoreChildNodes = countXmlDataNodesInBookstore() + } + + def 'Get delta between 2 anchors'() { + when: 'attempt to get delta report between anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, '/', OMIT_DESCENDANTS) + and: 'report is ordered based on xpath' + result = result.toList().sort { it.xpath } + then: 'delta report contains expected number of changes' + result.size() == 3 + and: 'delta report contains REPLACE action with expected xpath' + assert result[0].getAction() == 'replace' + assert result[0].getXpath() == '/bookstore' + and: 'delta report contains CREATE action with expected xpath' + assert result[1].getAction() == 'create' + assert result[1].getXpath() == '/bookstore-address[@bookstore-name=\'Crossword Bookstores\']' + and: 'delta report contains REMOVE action with expected xpath' + assert result[2].getAction() == 'remove' + assert result[2].getXpath() == '/bookstore-address[@bookstore-name=\'Easons-1\']' + } + + def 'Get delta between 2 anchors returns empty response when #scenario'() { + when: 'attempt to get delta report between anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, targetAnchor, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'delta report is empty' + assert result.isEmpty() + where: 'following data was used' + scenario | targetAnchor | xpath + 'anchors with identical data are queried' | BOOKSTORE_ANCHOR_4 | '/' + 'same anchor name is passed as parameter' | BOOKSTORE_ANCHOR_3 | '/' + 'non existing xpath' | BOOKSTORE_ANCHOR_5 | '/non-existing-xpath' + } + + def 'Get delta between anchors error scenario: #scenario'() { + when: 'attempt to get delta between anchors' + objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchor, targetAnchor, '/some-xpath', INCLUDE_ALL_DESCENDANTS) + then: 'expected exception is thrown' + thrown(expectedException) + where: 'following data was used' + scenario | dataspaceName | sourceAnchor | targetAnchor || expectedException + 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | 'not-relevant' || DataValidationException + 'invalid anchor 1 name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | 'not-relevant' || DataValidationException + 'invalid anchor 2 name' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'invalid anchor' || DataValidationException + 'non-existing dataspace' | 'non-existing' | 'not-relevant1' | 'not-relevant2' || DataspaceNotFoundException + 'non-existing dataspace with same anchor name' | 'non-existing' | 'not-relevant' | 'not-relevant' || DataspaceNotFoundException + 'non-existing anchor 1' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | 'not-relevant' || AnchorNotFoundException + 'non-existing anchor 2' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'non-existing-anchor' || AnchorNotFoundException + } + + def 'Get delta between anchors for remove action, where source data node #scenario'() { + when: 'attempt to get delta between leaves of data nodes present in 2 anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_5, BOOKSTORE_ANCHOR_3, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + then: 'expected action is present in delta report' + assert result.get(0).getAction() == 'remove' + where: 'following data was used' + scenario | parentNodeXpath + 'has leaves and child nodes' | '/bookstore/categories[@code=\'6\']' + 'has leaves only' | '/bookstore/categories[@code=\'5\']/books[@title=\'Book 11\']' + 'has child data node only' | '/bookstore/support-info/contact-emails' + 'is empty' | '/bookstore/container-without-leaves' + } + + def 'Get delta between anchors for "create" action, where target data node #scenario'() { + when: 'attempt to get delta between leaves of data nodes present in 2 anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + then: 'the expected action is present in delta report' + result.get(0).getAction() == 'create' + and: 'the expected xapth is present in delta report' + result.get(0).getXpath() == parentNodeXpath + where: 'following data was used' + scenario | parentNodeXpath + 'has leaves and child nodes' | '/bookstore/categories[@code=\'6\']' + 'has leaves only' | '/bookstore/categories[@code=\'5\']/books[@title=\'Book 11\']' + 'has child data node only' | '/bookstore/support-info/contact-emails' + 'is empty' | '/bookstore/container-without-leaves' + } + + def 'Get delta between anchors when leaves of existing data nodes are updated,: #scenario'() { + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, OMIT_DESCENDANTS) + then: 'expected action is "replace"' + assert result[0].getAction() == 'replace' + and: 'the payload has expected leaf values' + def sourceData = result[0].getSourceData() + def targetData = result[0].getTargetData() + assert sourceData == expectedSourceValue + assert targetData == expectedTargetValue + where: 'following data was used' + scenario | sourceAnchor | targetAnchor | xpath || expectedSourceValue | expectedTargetValue + 'leaf is updated in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || ['bookstore-name': 'Easons-1'] | ['bookstore-name': 'Crossword Bookstores'] + 'leaf is removed in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore/categories[@code=\'5\']/books[@title=\'Book 1\']' || [price:1] | null + 'leaf is added in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | '/bookstore/categories[@code=\'5\']/books[@title=\'Book 1\']' || null | [price:1] + } + + def 'Get delta between anchors when child data nodes under existing parent data nodes are updated: #scenario'() { + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, DIRECT_CHILDREN_ONLY) + then: 'expected action is "replace"' + assert result[0].getAction() == 'replace' + and: 'the delta report has expected child node xpaths' + def deltaReportEntities = getDeltaReportEntities(result) + def childNodeXpathsInDeltaReport = deltaReportEntities.get('xpaths') + assert childNodeXpathsInDeltaReport.contains(expectedChildNodeXpath) + where: 'following data was used' + scenario | sourceAnchor | targetAnchor | xpath || expectedChildNodeXpath + 'source and target anchors have child data nodes' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore/premises' || '/bookstore/premises/addresses[@house-number=\'2\' and @street=\'Main Street\']' + 'removed child data nodes in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | '/bookstore' || '/bookstore/support-info' + 'added child data nodes in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || '/bookstore/support-info' + } + + def 'Get delta between anchors where source and target data nodes have leaves and child data nodes'() { + given: 'parent node xpath and expected data in delta report' + def parentNodeXpath = '/bookstore/categories[@code=\'1\']' + def expectedSourceDataInParentNode = ['name':'Children'] + def expectedTargetDataInParentNode = ['name':'Kids'] + def expectedSourceDataInChildNode = [['lang' : 'English'],['price':20, 'editions':[1988, 2000]]] + def expectedTargetDataInChildNode = [['lang':'English/German'], ['price':200, 'editions':[1988, 2000, 2023]]] + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + def deltaReportEntities = getDeltaReportEntities(result) + then: 'expected action is "replace"' + assert result[0].getAction() == 'replace' + and: 'the payload has expected parent node xpath' + assert deltaReportEntities.get('xpaths').contains(parentNodeXpath) + and: 'delta report has expected source and target data' + assert deltaReportEntities.get('sourcePayload').contains(expectedSourceDataInParentNode) + assert deltaReportEntities.get('targetPayload').contains(expectedTargetDataInParentNode) + and: 'the delta report also has expected child node xpaths' + assert deltaReportEntities.get('xpaths').containsAll(['/bookstore/categories[@code=\'1\']/books[@title=\'The Gruffalo\']', '/bookstore/categories[@code=\'1\']/books[@title=\'Matilda\']']) + and: 'the delta report also has expected source and target data of child nodes' + assert deltaReportEntities.get('sourcePayload').containsAll(expectedSourceDataInChildNode) + assert deltaReportEntities.get('targetPayload').containsAll(expectedTargetDataInChildNode) + } + + def 'Get delta between anchor and JSON payload'() { + when: 'attempt to get delta report between anchor and JSON payload' + def jsonPayload = '{\"book-store:bookstore\":{\"bookstore-name\":\"Crossword Bookstores\"},\"book-store:bookstore-address\":{\"address\":\"Bangalore, India\",\"postal-code\":\"560062\",\"bookstore-name\":\"Crossword Bookstores\"}}' + def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, OMIT_DESCENDANTS) + then: 'delta report contains expected number of changes' + result.size() == 3 + and: 'delta report contains "replace" action with expected xpath' + assert result[0].getAction() == 'replace' + assert result[0].getXpath() == '/bookstore' + and: 'delta report contains "remove" action with expected xpath' + assert result[1].getAction() == 'remove' + assert result[1].getXpath() == '/bookstore-address[@bookstore-name=\'Easons-1\']' + and: 'delta report contains "create" action with expected xpath' + assert result[2].getAction() == 'create' + assert result[2].getXpath() == '/bookstore-address[@bookstore-name=\'Crossword Bookstores\']' + } + + def 'Get delta between anchor and payload returns empty response when JSON payload is identical to anchor data'() { + when: 'attempt to get delta report between anchor and JSON payload (replacing the string Easons with Easons-1 because the data in JSON file is modified, to append anchor number, during the setup process of the integration tests)' + def jsonPayload = readResourceDataFile('bookstore/bookstoreData.json').replace('Easons', 'Easons-1') + def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) + then: 'delta report is empty' + assert result.isEmpty() + } + + def 'Get delta between anchor and payload error scenario: #scenario'() { + when: 'attempt to get delta between anchor and json payload' + objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchor, xpath, [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) + then: 'expected exception is thrown' + thrown(expectedException) + where: 'following data was used' + scenario | dataspaceName | sourceAnchor | xpath | jsonPayload || expectedException + 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | '/' | '{some-json}' || DataValidationException + 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | '/' | '{some-json}' || DataValidationException + 'non-existing dataspace' | 'non-existing' | 'not-relevant' | '/' | '{some-json}' || DataspaceNotFoundException + 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | '/' | '{some-json}' || AnchorNotFoundException + 'empty json payload with root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/' | '' || DataValidationException + 'empty json payload with non-root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/bookstore' | '' || DataValidationException + } + + def getDeltaReportEntities(List<DeltaReport> deltaReport) { + def xpaths = [] + def action = [] + def sourcePayload = [] + def targetPayload = [] + deltaReport.each { + delta -> xpaths.add(delta.getXpath()) + action.add(delta.getAction()) + sourcePayload.add(delta.getSourceData()) + targetPayload.add(delta.getTargetData()) + } + return ['xpaths':xpaths, 'action':action, 'sourcePayload':sourcePayload, 'targetPayload':targetPayload] + } + + def countDataNodesInBookstore() { + return countDataNodesInTree(cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS)) + } + + def countTopLevelListDataNodesInBookstore() { + return countDataNodesInTree(cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/', INCLUDE_ALL_DESCENDANTS)) + } + + def countXmlDataNodesInBookstore() { + return countDataNodesInTree(cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_4, BOOKSTORE_ANCHOR_6, '/bookstore', INCLUDE_ALL_DESCENDANTS)) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy index d8010875c1..9a48dd72f3 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy @@ -21,8 +21,6 @@ package org.onap.cps.integration.functional.cps import org.onap.cps.api.CpsModuleService -import org.onap.cps.integration.base.FunctionalSpecBase -import org.onap.cps.api.parameters.CascadeDeleteAllowed import org.onap.cps.api.exceptions.AlreadyDefinedException import org.onap.cps.api.exceptions.DataspaceNotFoundException import org.onap.cps.api.exceptions.ModelValidationException @@ -30,6 +28,8 @@ import org.onap.cps.api.exceptions.SchemaSetInUseException import org.onap.cps.api.exceptions.SchemaSetNotFoundException import org.onap.cps.api.model.ModuleDefinition import org.onap.cps.api.model.ModuleReference +import org.onap.cps.api.parameters.CascadeDeleteAllowed +import org.onap.cps.integration.base.FunctionalSpecBase class ModuleServiceIntegrationSpec extends FunctionalSpecBase { @@ -53,25 +53,23 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { } """ - def newYangResourcesNameToContentMap = [:] - def moduleReferences = [] + def yangResourceContentPerName = [:] + def allModuleReferences = [] def noNewModules = [:] def bookstoreModelFileContent = readResourceDataFile('bookstore/bookstore.yang') def bookstoreTypesFileContent = readResourceDataFile('bookstore/bookstore-types.yang') def setup() { objectUnderTest = cpsModuleService } - def cleanup() { objectUnderTest.deleteAllUnusedYangModuleData() } - /* C R E A T E S C H E M A S E T U S E - C A S E S */ def 'Create new schema set from yang resources with #scenario'() { given: 'a new schema set with #numberOfModules modules' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) + populateYangResourceContentPerNameAndAllModuleReferences(numberOfNewModules) when: 'the new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', yangResourceContentPerName) then: 'the number of module references has increased by #numberOfNewModules' def yangResourceModuleReferences = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1) originalNumberOfModuleReferences + numberOfNewModules == yangResourceModuleReferences.size() @@ -95,49 +93,53 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { def 'Create new schema set from modules with #scenario'() { given: 'a new schema set with #numberOfNewModules modules' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) + populateYangResourceContentPerNameAndAllModuleReferences(numberOfNewModules) and: 'add existing module references (optional)' - moduleReferences.addAll(existingModuleReferences) + allModuleReferences.addAll(existingModuleReferences) when: 'the new schema set is created' def schemaSetName = "NewSchemaWith${numberOfNewModules}Modules" - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, newYangResourcesNameToContentMap, moduleReferences) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, yangResourceContentPerName, allModuleReferences) and: 'associated with a new anchor' cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, 'newAnchor') then: 'the new anchor has the correct number of modules' def yangResourceModuleReferences = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor') assert expectedNumberOfModulesForAnchor == yangResourceModuleReferences.size() + and: 'the schema set has the correct number of modules too' + def dataspaceEntity = dataspaceRepository.getByName(FUNCTIONAL_TEST_DATASPACE_1) + def schemaSetEntity = schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName) + assert expectedNumberOfModulesForAnchor == schemaSetEntity.yangResources.size() cleanup: objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ schemaSetName.toString() ]) where: 'the following module references are provided' scenario | numberOfNewModules | existingModuleReferences || expectedNumberOfModulesForAnchor 'empty schema set' | 0 | [ ] || 0 - 'one existing module' | 0 | [bookStoreModuleReference ] || 1 + 'one existing module' | 0 | [ bookStoreModuleReference ] || 1 'two new modules' | 2 | [ ] || 2 - 'two new modules, one existing' | 2 | [bookStoreModuleReference ] || 3 + 'two new modules, one existing' | 2 | [ bookStoreModuleReference ] || 3 'over max batch size #modules' | 101 | [ ] || 101 'two valid, one invalid module' | 2 | [ new ModuleReference('NOT EXIST','IRRELEVANT') ] || 2 } def 'Duplicate schema content.'() { given: 'a map of yang resources' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) + populateYangResourceContentPerNameAndAllModuleReferences(1) when: 'a new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', yangResourceContentPerName) then: 'the dataspace has one new module (reference)' def numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == originalNumberOfModuleReferences + 1 when: 'a second new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema2', newYangResourcesNameToContentMap) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema2', yangResourceContentPerName) then: 'the dataspace has no additional module (reference)' assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ 'newSchema1', 'newSchema2']) } def 'Attempt to create schema set, error scenario: #scenario.'() { when: 'attempt to store schema set #schemaSetName in dataspace #dataspaceName' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(0) - objectUnderTest.createSchemaSet(dataspaceName, schemaSetName, newYangResourcesNameToContentMap) + populateYangResourceContentPerNameAndAllModuleReferences(0) + objectUnderTest.createSchemaSet(dataspaceName, schemaSetName, yangResourceContentPerName) then: 'an #expectedException is thrown' thrown(expectedException) where: 'the following data is used' @@ -148,7 +150,7 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { def 'Attempt to create duplicate schema set from modules.'() { when: 'attempt to store duplicate schema set from modules' - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, newYangResourcesNameToContentMap, []) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, yangResourceContentPerName, []) then: 'an Already Defined Exception is thrown' thrown(AlreadyDefinedException) } @@ -194,12 +196,12 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { def 'Identifying new module references with #scenario'() { when: 'identifyNewModuleReferences is called' - def result = objectUnderTest.identifyNewModuleReferences(moduleReferences) + def result = objectUnderTest.identifyNewModuleReferences(allModuleReferences) then: 'the correct module references are returned' assert result.size() == expectedResult.size() assert result.containsAll(expectedResult) where: 'the following data is used' - scenario | moduleReferences || expectedResult + scenario | allModuleReferences || expectedResult 'just new module references' | [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] || [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] 'one new module,one existing reference' | [new ModuleReference('new1', 'r1'), bookStoreModuleReference] || [new ModuleReference('new1', 'r1')] 'no new module references' | [bookStoreModuleReference] || [] @@ -214,29 +216,71 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { assert result.name == 'bookstoreSchemaSet' assert result.moduleReferences.size() == 2 assert result.moduleReferences.containsAll(bookStoreModuleReferenceWithNamespace, bookStoreTypesModuleReferenceWithNamespace) + and: 'the yang resource is stored with the normalized filename' + def fileName = cpsModulePersistenceService.getYangSchemaResources(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET).keySet()[0] + assert fileName == 'bookstore-types@2024-01-30.yang' } def 'Retrieve all schema sets.'() { given: 'an extra schema set is stored' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) + populateYangResourceContentPerNameAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', yangResourceContentPerName) when: 'all schema sets are retrieved' def result = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1) then: 'the result contains all expected schema sets' assert result.name.size() == 2 assert result.name.containsAll('bookstoreSchemaSet', 'newSchema1') - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchema1']) } + def 'Create schema set with duplicate module filename [CPS-138].'() { + given: 'store the original number of sets and modules' + def numberOfSchemaSets = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).size() + def numberOfModuleReferences = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() + and: 'create a new schema set using a module with filename identical to a previously stored module (e.g. bookstore)' + populateYangResourceContentPerNameAndAllModuleReferences('otherModule', 1) + def otherModuleContent = yangResourceContentPerName.values()[0] + def mapWithDuplicateName = ['bookstore' : otherModuleContent] + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema', mapWithDuplicateName) + when: 'the yang resource details are retrieved' + def yangSchemaResources = cpsModulePersistenceService.getYangSchemaResources(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema') + then: 'the file name of the resource has been normalized' + def fileName = yangSchemaResources.keySet()[0] + assert fileName == 'otherModule_0@2000-01-01.yang' + and: 'the yang resource has the correct content' + assert yangSchemaResources.get(fileName) == otherModuleContent + and: 'the number of schema sets and modules has increased as expected' + assert objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).size() == numberOfSchemaSets + 1 + assert objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() == numberOfModuleReferences + 1 + cleanup: 'the data created in this test' + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchema']) + } + + def 'Create schema set with RFC-6020 filename pattern but incorrect details [CPS-138].'() { + given: 'create a new schema set using a module with filename identical to a previously stored module (e.g. bookstore)' + populateYangResourceContentPerNameAndAllModuleReferences('otherModule', 1) + def otherModuleContent = yangResourceContentPerName.values()[0] + def mapIncorrectName = ['wrongModuleAndRevision@1999-08-08.yang': otherModuleContent] + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema', mapIncorrectName) + when: 'the yang resource details are retrieved' + def yangSchemaResources = cpsModulePersistenceService.getYangSchemaResources(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema') + then: 'the file name of the resource has been normalized' + def fileName = yangSchemaResources.keySet()[0] + assert fileName == 'otherModule_0@2000-01-01.yang' + cleanup: 'the data created in this test' + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchema']) + } + + /* D E L E T E S C H E M A S E T U S E - C A S E S */ def 'Delete schema sets with(out) cascade.'() { given: 'a schema set' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) + populateYangResourceContentPerNameAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', yangResourceContentPerName) and: 'optionally create anchor for the schema set' if (associateWithAnchor) { cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', 'newAnchor') @@ -251,7 +295,7 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { then: 'check if the dataspace still contains the new schema set or not' def remainingSchemaSetNames = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).name assert remainingSchemaSetNames.contains('newSchemaSet') == expectSchemaSetStillPresent - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet']) where: 'the following options are used' associateWithAnchor | cascadeDeleteAllowedOption || expectSchemaSetStillPresent @@ -263,11 +307,11 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { def 'Delete schema sets with shared resources.'() { given: 'a new schema set' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet1', newYangResourcesNameToContentMap) + populateYangResourceContentPerNameAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet1', yangResourceContentPerName) and: 'another schema set which shares one yang resource (module)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(2) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet2', newYangResourcesNameToContentMap) + populateYangResourceContentPerNameAndAllModuleReferences(2) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet2', yangResourceContentPerName) when: 'all schema sets are retrieved' def moduleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision then: 'both modules (revisions) are present' @@ -282,7 +326,7 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { def remainingModuleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision assert remainingModuleRevisions.contains('2000-01-01') assert !remainingModuleRevisions.contains('2001-01-01') - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet1']) } @@ -301,44 +345,45 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { U P G R A D E */ - def 'Upgrade schema set (with existing and new modules, no matching module set tag in NCMP)'() { + def 'Upgrade schema set [with existing and new modules, no matching module set tag in NCMP]'() { given: 'an anchor and schema set with 2 modules (to be upgraded)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('original', 2) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) + populateYangResourceContentPerNameAndAllModuleReferences('original', 2) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', yangResourceContentPerName, allModuleReferences) cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') def yangResourceModuleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') assert yangResourceModuleReferencesBeforeUpgrade.size() == 2 assert yangResourceModuleReferencesBeforeUpgrade.containsAll([new ModuleReference('original_0','2000-01-01'),new ModuleReference('original_1','2001-01-01')]) and: 'two new 2 modules (from node)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('new', 2) + populateYangResourceContentPerNameAndAllModuleReferences('new', 2) def newModuleReferences = [new ModuleReference('new_0','2000-01-01'),new ModuleReference('new_1','2001-01-01')] and: 'a list of all module references (normally retrieved from node)' - def allModuleReferences = [] - allModuleReferences.add(bookStoreModuleReference) - allModuleReferences.addAll(newModuleReferences) + def allOtherModuleReferences = [] + allOtherModuleReferences.add(bookStoreModuleReference) + allOtherModuleReferences.addAll(newModuleReferences) when: 'the schema set is upgraded' - objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, allModuleReferences) + objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', yangResourceContentPerName, allOtherModuleReferences) then: 'the new anchor has the correct new and existing modules' def yangResourceModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') assert yangResourceModuleReferencesAfterUpgrade.size() == 3 assert yangResourceModuleReferencesAfterUpgrade.contains(bookStoreModuleReference) assert yangResourceModuleReferencesAfterUpgrade.containsAll(newModuleReferences); - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['targetSchema']) } - def 'Upgrade existing schema set from another anchor (used in NCMP for matching module set tag)'() { + def 'Upgrade existing schema set from another anchor [used in NCMP for matching module set tag]'() { given: 'an anchor and schema set with 1 module (target)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('target', 1) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) + populateYangResourceContentPerNameAndAllModuleReferences('target', 1) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', yangResourceContentPerName, allModuleReferences) cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') def moduleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') assert moduleReferencesBeforeUpgrade.size() == 1 and: 'another anchor and schema set with 2 other modules (source for upgrade)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('source', 2) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', newYangResourcesNameToContentMap, []) + populateYangResourceContentPerNameAndAllModuleReferences('source', 2) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', yangResourceContentPerName, allModuleReferences) cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', 'sourceAnchor') - assert objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor').size() == 2 + def yangResourcesModuleReferences = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor') + assert yangResourcesModuleReferences.size() == 2 when: 'the target schema is upgraded using the module references from the source anchor' def moduleReferencesFromSourceAnchor = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor') objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', noNewModules, moduleReferencesFromSourceAnchor) @@ -348,7 +393,7 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { and: 'the associated target anchor has the same module references (without namespace but that is a legacy issue)' def anchorModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') assert anchorModuleReferencesAfterUpgrade.containsAll([new ModuleReference('source_0','2000-01-01'),new ModuleReference('source_1','2001-01-01')]); - cleanup: + cleanup: 'the data created in this test' objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['sourceSchema', 'targetSchema']) } @@ -356,17 +401,19 @@ class ModuleServiceIntegrationSpec extends FunctionalSpecBase { H E L P E R M E T H O D S */ - def populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfModules) { - populateNewYangResourcesNameToContentMapAndAllModuleReferences('name', numberOfModules) + def populateYangResourceContentPerNameAndAllModuleReferences(numberOfModules) { + populateYangResourceContentPerNameAndAllModuleReferences('name', numberOfModules) } - def populateNewYangResourcesNameToContentMapAndAllModuleReferences(namePrefix, numberOfModules) { + def populateYangResourceContentPerNameAndAllModuleReferences(namePrefix, numberOfModules) { + yangResourceContentPerName.clear() + allModuleReferences.clear() numberOfModules.times { def uniqueName = namePrefix + '_' + it def uniqueRevision = String.valueOf(2000 + it) + '-01-01' - moduleReferences.add(new ModuleReference(uniqueName, uniqueRevision)) + allModuleReferences.add(new ModuleReference(uniqueName, uniqueRevision)) def uniqueContent = NEW_RESOURCE_CONTENT.replace(NEW_RESOURCE_REVISION, uniqueRevision).replace('module test_module', 'module '+uniqueName) - newYangResourcesNameToContentMap.put(uniqueRevision, uniqueContent) + yangResourceContentPerName.put(uniqueName, uniqueContent) } } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy index e4d75aa378..212686e917 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * Modifications Copyright (C) 2023-2025 TechMahindra Ltd * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); @@ -21,18 +21,17 @@ package org.onap.cps.integration.functional.cps +import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY +import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS +import static org.onap.cps.api.parameters.PaginationOption.NO_PAGINATION + import java.time.OffsetDateTime import org.onap.cps.api.CpsQueryService import org.onap.cps.integration.base.FunctionalSpecBase import org.onap.cps.api.parameters.FetchDescendantsOption import org.onap.cps.api.parameters.PaginationOption import org.onap.cps.api.exceptions.CpsPathException -import spock.lang.Ignore - -import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY -import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS -import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS -import static org.onap.cps.api.parameters.PaginationOption.NO_PAGINATION class QueryServiceIntegrationSpec extends FunctionalSpecBase { @@ -57,7 +56,6 @@ class QueryServiceIntegrationSpec extends FunctionalSpecBase { 'the AND is used where result does not exist' | '//books[@lang="English" and @price=1000]' || 0 | [] } - @Ignore // TODO will be implemented in CPS-2416 def 'Query data leaf using CPS path for #scenario.'() { when: 'query data leaf for bookstore container' def result = objectUnderTest.queryDataLeaf(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, Object.class) @@ -68,9 +66,9 @@ class QueryServiceIntegrationSpec extends FunctionalSpecBase { 'all books' | '//books/@title' || 19 'all books in a category' | '/bookstore/categories[@code=5]/books/@title' || 10 'non-existing path' | '/non-existing/@title' || 0 + 'non-existing attribute' | '//books/@non-existing' || 0 } - @Ignore def 'Query data leaf with type #leafType using CPS path.'() { given: 'a cps path query for two books, returning only #leafName' def cpsPath = '//books[@title="Matilda" or @title="Good Omens"]/@' + leafName @@ -81,11 +79,10 @@ class QueryServiceIntegrationSpec extends FunctionalSpecBase { where: leafName | leafType || expectedResults 'lang' | String.class || ['English'] - 'price' | Number.class || [13, 20] + 'price' | Integer.class || [13, 20] 'editions' | List.class || [[1988, 2000], [2006]] } - @Ignore def 'Query data leaf using CPS path with ancestor axis.'() { given: 'a cps path query that will return the names of the categories of two books' def cpsPath = '//books[@title="Matilda" or @title="Good Omens"]/ancestor::categories/@name' @@ -95,6 +92,15 @@ class QueryServiceIntegrationSpec extends FunctionalSpecBase { assert result == ['Children', 'Comedy'] as Set } + def 'Attempt to query data leaf without specifying leaf name gives an error.'() { + given: 'a cps path without an attribute axis' + def cpsPathWithoutAttributeAxis = '//books' + when: 'query data leaf is called without attribute axis in cps path' + objectUnderTest.queryDataLeaf(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPathWithoutAttributeAxis, String.class) + then: 'illegal argument exception is thrown' + thrown(IllegalArgumentException) + } + def 'Cps Path query using comparative and boolean operators.'() { given: 'a cps path query in the discount category' def cpsPath = "/bookstore/categories[@code='5']/books" + leafCondition @@ -461,4 +467,30 @@ class QueryServiceIntegrationSpec extends FunctionalSpecBase { and: 'the queried nodes have expected bookstore names' assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() } + + def 'Query data nodes with a limit of #limit.' () { + when: 'a query for data nodes is executed with a result limit' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories', OMIT_DESCENDANTS, limit) + then: 'the expected number of nodes is returned' + assert countDataNodesInTree(result) == expectedNumberOfResults + where: 'the following parameters are used' + limit || expectedNumberOfResults + 1 || 1 + 2 || 2 + 0 || 5 + -1 || 5 + } + + def 'Query data leaf with a limit of #limit.' () { + when: 'a query for data leaf is executed with a result limit' + def result = objectUnderTest.queryDataLeaf(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories/@name', limit, String) + then: 'the expected number of leaf values is returned' + assert result.size() == expectedNumberOfResults + where: 'the following parameters are used' + limit || expectedNumberOfResults + 1 || 1 + 2 || 2 + 0 || 5 + -1 || 5 + } } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/AlternateIdSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/AlternateIdSpec.groovy index 222b3c0f6f..b1b777c79f 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/AlternateIdSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/AlternateIdSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -30,16 +30,13 @@ class AlternateIdSpec extends CpsIntegrationSpecBase { def setup() { dmiDispatcher1.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] - registerCmHandle(DMI1_URL, 'ch-1', NO_MODULE_SET_TAG, 'alternateId') } - def cleanup() { - deregisterCmHandle(DMI1_URL, 'ch-1') - } - - def 'AlternateId in pass-through data operations should return OK status.'() { - given: 'the URL for the pass-through data request' - def url = '/ncmp/v1/ch/alternateId/data/ds/ncmp-datastore:passthrough-running' + def 'Pass-through data operations using #scenario as reference.'() { + given: 'a cm handle with an alternate id' + registerCmHandle(DMI1_URL, 'ch-1', NO_MODULE_SET_TAG, alternateId) + and: 'the URL for the pass-through data request' + def url = "/ncmp/v1/ch/${cmHandleReference}/data/ds/ncmp-datastore:passthrough-running" when: 'a pass-through data request is sent to NCMP' def response = mvc.perform(get(url) .queryParam('resourceIdentifier', 'my-resource-id') @@ -47,8 +44,13 @@ class AlternateIdSpec extends CpsIntegrationSpecBase { .andReturn().response then: 'response status is Ok' assert response.status == HttpStatus.OK.value() + cleanup: 'remove the test cm handle' + deregisterCmHandle(DMI1_URL, 'ch-1') + where: 'the following ids are used' + scenario | alternateId | cmHandleReference + 'standard id' | 'dont care' | 'ch-1' + 'alt-id with =' | 'alt=1' | 'alt=1' + 'alt-id without =' | 'alt-1' | 'alt-1' } - - } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy index 6f063fb222..d3a5c9a2ec 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ package org.onap.cps.integration.functional.ncmp +import java.time.Duration import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.serialization.StringDeserializer import org.onap.cps.integration.KafkaTestContainer @@ -32,9 +33,6 @@ import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle import org.onap.cps.ncmp.events.lcm.v1.LcmEvent import org.onap.cps.ncmp.api.inventory.models.CmHandleState import org.onap.cps.ncmp.api.inventory.models.LockReasonCategory -import spock.util.concurrent.PollingConditions - -import java.time.Duration class CmHandleCreateSpec extends CpsIntegrationSpecBase { @@ -48,7 +46,7 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { subscribeAndClearPreviousMessages() } - def cleanupSpec() { + def cleanup() { kafkaConsumer.unsubscribe() kafkaConsumer.close() } @@ -73,9 +71,7 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() then: 'CM-handle goes to READY state after module sync' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(uniqueId).cmHandleState - }) + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(uniqueId).cmHandleState and: 'the CM-handle has expected modules' assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(uniqueId).moduleName.sort() @@ -113,11 +109,9 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() then: 'CM-handle goes to LOCKED state with reason MODULE_SYNC_FAILED' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState('ch-1') - assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED - assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_SYNC_FAILED - }) + def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState('ch-1') + assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED + assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_SYNC_FAILED and: 'CM-handle has no modules' assert objectUnderTest.getYangResourcesModuleReferences('ch-1').empty @@ -141,9 +135,7 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() then: 'the CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-3').cmHandleState - }) + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-3').cmHandleState and: 'the CM-handle has expected moduleSetTag' assert objectUnderTest.getNcmpServiceCmHandle('ch-3').moduleSetTag == 'B' @@ -178,9 +170,9 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { assert dmiPluginRegistrationResponse.createdCmHandles.sort { it.cmHandle } == [ CmHandleRegistrationResponse.createSuccessResponse('ch-3'), CmHandleRegistrationResponse.createSuccessResponse('ch-4'), - CmHandleRegistrationResponse.createFailureResponse('ch-5', NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED), + CmHandleRegistrationResponse.createFailureResponse('ch-5', NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST), CmHandleRegistrationResponse.createSuccessResponse('ch-6'), - CmHandleRegistrationResponse.createFailureResponse('ch-7', NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED), + CmHandleRegistrationResponse.createFailureResponse('ch-7', NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST), ] cleanup: 'deregister CM handles' @@ -200,9 +192,7 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() then: 'CM-handles go to LOCKED state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.LOCKED - }) + assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.LOCKED when: 'DMI is available for retry' dmiDispatcher1.moduleNamesPerCmHandleId = ['ch-1': ['M1', 'M2'], 'ch-2': ['M1', 'M2']] @@ -212,11 +202,9 @@ class CmHandleCreateSpec extends CpsIntegrationSpecBase { 2.times { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() } then: 'Both CM-handles go to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - ['ch-1', 'ch-2'].each { cmHandleId -> - assert objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState == CmHandleState.READY - } - }) + ['ch-1', 'ch-2'].each { cmHandleId -> + assert objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState == CmHandleState.READY + } and: 'Both CM-handles have expected modules' ['ch-1', 'ch-2'].each { cmHandleId -> diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpdateSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpdateSpec.groovy index f2593ce587..22bbaa81df 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpdateSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpdateSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -76,8 +76,8 @@ class CmHandleUpdateSpec extends CpsIntegrationSpecBase { def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistration(new DmiPluginRegistration(dmiPlugin: DMI1_URL, updatedCmHandles: [cmHandleToUpdate])) - then: 'registration gives failure response, due to alternate ID being already associated' - assert dmiPluginRegistrationResponse.updatedCmHandles == [CmHandleRegistrationResponse.createFailureResponse('ch-1', NcmpResponseStatus.ALTERNATE_ID_ALREADY_ASSOCIATED)] + then: 'registration gives failure response, due to cm-handle already existing' + assert dmiPluginRegistrationResponse.updatedCmHandles == [CmHandleRegistrationResponse.createFailureResponse('ch-1', NcmpResponseStatus.CM_HANDLE_ALREADY_EXIST)] and: 'the CM-handle still has the old alternate ID' assert objectUnderTest.getNcmpServiceCmHandle('ch-1').alternateId == 'original' diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy index 28714fd123..43540a9675 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy @@ -27,7 +27,6 @@ import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration import org.onap.cps.ncmp.api.inventory.models.LockReasonCategory import org.onap.cps.ncmp.api.inventory.models.UpgradedCmHandles import org.onap.cps.ncmp.impl.NetworkCmProxyInventoryFacadeImpl -import spock.util.concurrent.PollingConditions class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { @@ -67,9 +66,7 @@ class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { 2.times { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() } then: 'CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState - }) + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState and: 'the CM-handle has expected moduleSetTag' assert objectUnderTest.getNcmpServiceCmHandle(cmHandleId).moduleSetTag == updatedModuleSetTag @@ -79,7 +76,6 @@ class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { cleanup: 'deregister CM-handle and remove all associated module resources' deregisterCmHandle(DMI1_URL, cmHandleId) - cpsModuleService.deleteAllUnusedYangModuleData() where: 'following module set tags are used' initialModuleSetTag | updatedModuleSetTag @@ -112,9 +108,7 @@ class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { 2.times { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() } and: 'CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState - }) + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(cmHandleId).cmHandleState and: 'the CM-handle has expected moduleSetTag' assert objectUnderTest.getNcmpServiceCmHandle(cmHandleId).moduleSetTag == updatedModuleSetTag @@ -171,11 +165,9 @@ class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { 2.times { moduleSyncWatchdog.moduleSyncAdvisedCmHandles() } then: 'CM-handle goes to LOCKED state with reason MODULE_UPGRADE_FAILED' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(cmHandleId) - assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED - assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE_FAILED - }) + def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(cmHandleId) + assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED + assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE_FAILED and: 'the CM-handle has same moduleSetTag as before' assert objectUnderTest.getNcmpServiceCmHandle(cmHandleId).moduleSetTag == 'oldTag' diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/DataJobStatusServiceSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/DataJobStatusServiceSpec.groovy index 6e5c0e40c2..162b51844c 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/DataJobStatusServiceSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/DataJobStatusServiceSpec.groovy @@ -18,6 +18,6 @@ class DataJobStatusServiceSpec extends CpsIntegrationSpecBase { when: 'the data job status checked' def result = dataJobStatusService.getDataJobStatus(authorization, dmiServiceName, dataProducerId, dataProducerJobId) then: 'the status is that defined in the mock service.' - assert result == 'status details from mock service' + assert result == '{"status":"status details from mock service"}' } } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/LegacyBatchDataOperationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/LegacyBatchDataOperationSpec.groovy new file mode 100644 index 0000000000..83f91226be --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/LegacyBatchDataOperationSpec.groovy @@ -0,0 +1,138 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import io.cloudevents.CloudEvent +import io.cloudevents.kafka.CloudEventDeserializer +import java.time.Duration +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.onap.cps.integration.KafkaTestContainer +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.onap.cps.ncmp.events.async1_0_0.Data +import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent +import org.onap.cps.ncmp.events.async1_0_0.Response +import org.springframework.http.MediaType +import spock.util.concurrent.PollingConditions + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status + +class LegacyBatchDataOperationSpec extends CpsIntegrationSpecBase { + + KafkaConsumer kafkaConsumer + + def setup() { + kafkaConsumer = KafkaTestContainer.getConsumer('test-group', CloudEventDeserializer.class) + kafkaConsumer.subscribe(['legacy-batch-topic']) + kafkaConsumer.poll(Duration.ofMillis(500)) + dmiDispatcher1.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] + registerCmHandle(DMI1_URL, 'ch-1', 'tagA', 'alt-1') + registerCmHandleWithoutWaitForReady(DMI1_URL, 'not-ready-ch', NO_MODULE_SET_TAG, 'alt-3') + } + + def cleanup() { + deregisterCmHandles(DMI1_URL, ['ch-1', 'not-ready-ch']) + kafkaConsumer.unsubscribe() + kafkaConsumer.close() + } + + def 'Batch pass-through data operation is forwarded to DMI plugin.'() { + given: 'a request body containing a data read operation for an existing and ready CM-handle' + def dataOperationRequestBody = makeDataOperationRequestBody('ch-1') + + when: 'a pass-through batch data request is sent to NCMP is successful' + mvc.perform(post('/ncmp/v1/data') + .queryParam('topic', 'legacy-batch-topic') + .contentType(MediaType.APPLICATION_JSON) + .content(dataOperationRequestBody) + ).andExpect(status().is2xxSuccessful()) + + then: 'DMI will receive the async request' + new PollingConditions().within(2, () -> { + assert dmiDispatcher1.receivedDataOperationRequest.isEmpty() == false + }) + and: 'the request has one operation' + assert dmiDispatcher1.receivedDataOperationRequest.operations.size() == 1 + def operation = dmiDispatcher1.receivedDataOperationRequest.operations[0] + and: 'the operation has the expected ID' + assert operation.operationId == '12' + and: 'the operation is for the expected CM-handles' + assert operation.cmHandles.id == ['ch-1'] + } + + def 'Batch pass-through data operation reports errors on kafka topic.'() { + given: 'a request body containing a data read operation for #cmHandleId' + def dataOperationRequestBody = makeDataOperationRequestBody(cmHandleId) + + when: 'a pass-through batch data request is sent to NCMP specifying a kafka topic is successful' + mvc.perform(post('/ncmp/v1/data') + .queryParam('topic', 'legacy-batch-topic') + .contentType(MediaType.APPLICATION_JSON) + .content(dataOperationRequestBody)) + .andExpect(status().is2xxSuccessful()) + + then: 'there is one kafka message' + def consumerRecords = kafkaConsumer.poll(Duration.ofMillis(1000)) + assert consumerRecords.size() == 1 + + and: 'it is a cloud event' + assert consumerRecords[0].value() instanceof CloudEvent + + and: 'it contains the data operation event with the expected error status' + def jsonData = new String(consumerRecords[0].value().data.toBytes()) + def dataOperationEvent = jsonObjectMapper.convertJsonString(jsonData, DataOperationEvent) + assert dataOperationEvent == new DataOperationEvent(data: + new Data(responses: [ + new Response( + operationId: 12, + resourceIdentifier: 'ManagedElement=NRNode1/GNBDUFunction=1', + options: '(fields=NRCellDU/attributes/cellLocalId)', + ids: [cmHandleId], + statusCode: expectedStatusCode, + statusMessage: expectedStatusMessage, + result: null), + ])) + + where: + scenario | cmHandleId || expectedStatusCode | expectedStatusMessage + 'CM handle not ready' | 'not-ready-ch' || 101 | 'cm handle(s) not ready' + // FIXME BUG CPS-2769: CM handle not found causes batch to fail + // 'CM handle not found' | 'not-found-ch' || 100 | 'cm handle reference(s) not found' + } + + def makeDataOperationRequestBody(cmHandleId) { + return """ + { + "operations": [ + { + "operation": "read", + "operationId": "12", + "datastore": "ncmp-datastore:passthrough-operational", + "resourceIdentifier": "ManagedElement=NRNode1/GNBDUFunction=1", + "options": "(fields=NRCellDU/attributes/cellLocalId)", + "targetIds": ["%s"] + } + ] + } + """.formatted(cmHandleId) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/ModuleSyncWatchdogIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/ModuleSyncWatchdogIntegrationSpec.groovy index 43db9b208e..8bbe0ca194 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/ModuleSyncWatchdogIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/ModuleSyncWatchdogIntegrationSpec.groovy @@ -20,17 +20,18 @@ package org.onap.cps.integration.functional.ncmp +import com.hazelcast.map.IMap import io.micrometer.core.instrument.MeterRegistry -import spock.lang.Ignore - -import java.util.concurrent.Executors -import java.util.concurrent.TimeUnit import org.onap.cps.integration.base.CpsIntegrationSpecBase import org.onap.cps.ncmp.impl.inventory.sync.ModuleSyncWatchdog import org.springframework.beans.factory.annotation.Autowired import org.springframework.util.StopWatch +import spock.lang.Ignore import spock.util.concurrent.PollingConditions +import java.util.concurrent.Executors +import java.util.concurrent.TimeUnit + class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { ModuleSyncWatchdog objectUnderTest @@ -38,11 +39,15 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { @Autowired MeterRegistry meterRegistry + @Autowired + IMap<String, Integer> cmHandlesByState + def executorService = Executors.newFixedThreadPool(2) def PARALLEL_SYNC_SAMPLE_SIZE = 100 def setup() { objectUnderTest = moduleSyncWatchdog + clearCmHandleStateGauge() } def cleanup() { @@ -64,18 +69,16 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { deregisterSequenceOfCmHandles(DMI1_URL, PARALLEL_SYNC_SAMPLE_SIZE, 1) } - @Ignore - /** this test has intermittent failures, due to timeouts. + /** this test has intermittent failures, due to race conditions * Ignored but left here as it might be valuable to further optimization investigations. **/ - + @Ignore def 'CPS-2478 Highlight (and improve) module sync inefficiencies.'() { given: 'register 250 cm handles with module set tag cps-2478-A' def numberOfTags = 2 def cmHandlesPerTag = 250 def totalCmHandles = numberOfTags * cmHandlesPerTag def offset = 1 - def minimumBatches = totalCmHandles / 100 registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(DMI1_URL, 'cps-2478-A', cmHandlesPerTag, offset) and: 'register anther 250 cm handles with module set tag cps-2478-B' offset += cmHandlesPerTag @@ -85,23 +88,21 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { when: 'sync all advised cm handles' objectUnderTest.moduleSyncAdvisedCmHandles() Thread.sleep(100) - then: 'retry until both schema sets are stored in db (1 schema set for each module set tag)' - def dbSchemaSetStorageTimer = meterRegistry.get('cps.module.persistence.schemaset.store').timer() - new PollingConditions().within(10, () -> { - objectUnderTest.moduleSyncAdvisedCmHandles() - Thread.sleep(100) - assert dbSchemaSetStorageTimer.count() == 2 - }) - then: 'wait till at least 5 batches of state updates are done (often more because of retries of locked cm handles)' - def dbStateUpdateTimer = meterRegistry.get('cps.ncmp.cmhandle.state.update.batch').timer() + then: 'Keep processing until there are no more LOCKED or ADVISED cm handles' new PollingConditions().within(10, () -> { - assert dbStateUpdateTimer.count() >= minimumBatches + def advised = cmHandlesByState.get('advisedCmHandlesCount') + def locked = cmHandlesByState.get('lockedCmHandlesCount') + if ( locked > 0 | advised > 0 ) { + println "CPS-2576 Need to retry ${locked} LOCKED / ${advised} ADVISED cm Handles" + objectUnderTest.moduleSyncAdvisedCmHandles() + Thread.sleep(100) + } + assert cmHandlesByState.get('lockedCmHandlesCount') + cmHandlesByState.get('advisedCmHandlesCount') == 0 }) - and: 'one call to DMI per module set tag to get module references (may be more due to parallel processing of batches)' - def dmiModuleRetrievalTimer = meterRegistry.get('cps.ncmp.inventory.module.references.from.dmi').timer() - assert dmiModuleRetrievalTimer.count() >= numberOfTags && dmiModuleRetrievalTimer.count() <= minimumBatches - and: 'log the relevant instrumentation' + def dmiModuleRetrievalTimer = meterRegistry.get('cps.ncmp.inventory.module.references.from.dmi').timer() + def dbSchemaSetStorageTimer = meterRegistry.get('cps.module.persistence.schemaset.createFromNewAndExistingModules').timer() + def dbStateUpdateTimer = meterRegistry.get('cps.ncmp.cmhandle.state.update.batch').timer() logInstrumentation(dmiModuleRetrievalTimer, 'get modules from DMI ') logInstrumentation(dbSchemaSetStorageTimer, 'store schema sets ') logInstrumentation(dbStateUpdateTimer, 'batch state updates ') @@ -110,7 +111,6 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { def stopWatch = new StopWatch() stopWatch.start() deregisterSequenceOfCmHandles(DMI1_URL, totalCmHandles, 1) - cpsModuleService.deleteAllUnusedYangModuleData() stopWatch.stop() println "*** CPS-2478, Deletion of $totalCmHandles cm handles took ${stopWatch.getTotalTimeMillis()} milliseconds" } @@ -131,6 +131,28 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { deregisterSequenceOfCmHandles(DMI1_URL, PARALLEL_SYNC_SAMPLE_SIZE, 1) } + /** this test has intermittent failures, due to race conditions + * Ignored but left here as it might be valuable to further optimization investigations. + **/ + @Ignore + def 'Schema sets with overlapping modules processed at the same time (DB constraint violation).'() { + given: 'register one batch (100) cm handles of tag A (with overlapping module names)' + registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(DMI1_URL, 'tagA', 100, 1, ModuleNameStrategy.OVERLAPPING) + and: 'register another batch cm handles of tag B (with overlapping module names)' + registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(DMI1_URL, 'tagB', 100, 101, ModuleNameStrategy.OVERLAPPING) + and: 'populate the work queue with both batches' + objectUnderTest.populateWorkQueueIfNeeded() + when: 'advised cm handles are processed on 2 threads (exactly one batch for each)' + objectUnderTest.moduleSyncAdvisedCmHandles() + executorService.execute(moduleSyncAdvisedCmHandles) + then: 'all cm handles have been processed' + assert getNumberOfProcessedCmHandles() == 200 + then: 'at least 1 cm handle is in state LOCKED' + assert cmHandlesByState.get('lockedCmHandlesCount') >= 1 + cleanup: 'remove all test cm handles' + deregisterSequenceOfCmHandles(DMI1_URL, 200, 1) + } + def 'Populate module sync work queue on two parallel threads with a slight difference in start time.'() { // This test proved that the issue in CPS-2403 did not arise if the the queue was populated and given time to be distributed given: 'the queue is empty at the start' @@ -169,4 +191,21 @@ class ModuleSyncWatchdogIntegrationSpec extends CpsIntegrationSpecBase { } } + def moduleSyncAdvisedCmHandles = () -> { + try { + objectUnderTest.moduleSyncAdvisedCmHandles() + } catch (InterruptedException e) { + e.printStackTrace() + } + } + + def clearCmHandleStateGauge() { + cmHandlesByState.keySet().each { cmHandlesByState.put(it, 0)} + } + + def getNumberOfProcessedCmHandles() { + return cmHandlesByState.get('readyCmHandlesCount') + cmHandlesByState.get('lockedCmHandlesCount') + } + + } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy index 7ce3cf5e17..77349fe0a5 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -29,7 +29,6 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import org.onap.cps.integration.base.CpsIntegrationSpecBase import org.springframework.http.MediaType -import spock.util.concurrent.PollingConditions class RestApiSpec extends CpsIntegrationSpecBase { @@ -47,13 +46,11 @@ class RestApiSpec extends CpsIntegrationSpecBase { and: 'the module sync watchdog is triggered' moduleSyncWatchdog.moduleSyncAdvisedCmHandles() then: 'CM-handles go to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - (1..3).each { - mvc.perform(get('/ncmp/v1/ch/ch-'+it)) - .andExpect(status().isOk()) - .andExpect(jsonPath('$.state.cmHandleState').value('READY')) - } - }) + (1..3).each { + mvc.perform(get('/ncmp/v1/ch/ch-'+it)) + .andExpect(status().isOk()) + .andExpect(jsonPath('$.state.cmHandleState').value('READY')) + } } def 'Search for CM Handles by module using REST API.'() { diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/WriteSubJobSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/WriteSubJobSpec.groovy index 834e1399e3..46c641cd23 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/WriteSubJobSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/WriteSubJobSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -59,9 +59,9 @@ class WriteSubJobSpec extends CpsIntegrationSpecBase { then: 'each DMI received the expected sub-jobs and the response has the expected values' assert response.size() == 2 assert response[0].class == SubJobWriteResponse.class - assert response[0].subJobId == "some sub job id" - assert response[0].dmiServiceName == "some dmi service name" - assert response[0].dataProducerId == "some data producer id" + assert response[0].subJobId == 'some sub job id' + assert response[0].dmiServiceName.startsWith('http://localhost:') || response[0].dmiServiceName().startsWith('http://kubernetes') + assert response[0].dataProducerId == 'some data producer id' and: 'dmi 1 received the correct job details' def receivedSubJobsForDispatcher1 = dmiDispatcher1.receivedSubJobs['?destination=d1']['data'].collect() assert receivedSubJobsForDispatcher1.size() == 2 diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy index a37bb6ad4d..d8553419ce 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy @@ -47,15 +47,15 @@ class CpsModuleServicePerfTest extends CpsPerfTestBase { def 'Store new schema set with many modules'() { when: 'a new schema set with 200 modules is stored' - def newYangResourcesNameToContentMap = [:] + def newYangResourceContentPerName = [:] (1..200).each { def year = 2000 + it def resourceName = "module${it}".toString() def moduleName = "stores${it}" def content = NEW_RESOURCE_CONTENT.replace('2020',String.valueOf(year)).replace('stores',moduleName) - newYangResourcesNameToContentMap.put(resourceName, content) + newYangResourceContentPerName.put(resourceName, content) } - objectUnderTest.createSchemaSet(CPS_PERFORMANCE_TEST_DATASPACE, 'perfSchemaSet', newYangResourcesNameToContentMap) + objectUnderTest.createSchemaSet(CPS_PERFORMANCE_TEST_DATASPACE, 'perfSchemaSet', newYangResourceContentPerName) then: 'the schema set is persisted correctly' def result = cpsModuleService.getSchemaSet(CPS_PERFORMANCE_TEST_DATASPACE, 'perfSchemaSet') result.moduleReferences.size() == 200 diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/ModuleQueryPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/ModuleQueryPerfTest.groovy index 613f760b0c..e52d3f819c 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/ModuleQueryPerfTest.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/ModuleQueryPerfTest.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -100,7 +100,7 @@ class ModuleQueryPerfTest extends CpsPerfTestBase { cpsModuleService.deleteSchemaSetsWithCascade(CPS_PERFORMANCE_TEST_DATASPACE, (i..i+100).collect {SCHEMA_SET_PREFIX + it}) } cpsModuleService.deleteSchemaSetsWithCascade(CPS_PERFORMANCE_TEST_DATASPACE, [SCHEMA_SET_PREFIX + '0']) - cpsModuleService.deleteAllUnusedYangModuleData() + cpsModuleService.deleteAllUnusedYangModuleData(CPS_PERFORMANCE_TEST_DATASPACE) } // This makes a Yang module of approximately target length in bytes by padding the description field with many '*' diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy index 364127f388..8c429b3a30 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation + * Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -20,14 +20,14 @@ package org.onap.cps.integration.performance.cps -import org.onap.cps.api.CpsQueryService -import org.onap.cps.integration.performance.base.CpsPerfTestBase -import org.onap.cps.api.parameters.PaginationOption - import static org.onap.cps.api.parameters.FetchDescendantsOption.DIRECT_CHILDREN_ONLY import static org.onap.cps.api.parameters.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS +import org.onap.cps.api.CpsQueryService +import org.onap.cps.integration.performance.base.CpsPerfTestBase +import org.onap.cps.api.parameters.PaginationOption + class QueryPerfTest extends CpsPerfTestBase { CpsQueryService objectUnderTest @@ -41,7 +41,7 @@ class QueryPerfTest extends CpsPerfTestBase { def durationInSeconds = resourceMeter.getTotalTimeInSeconds() then: 'the expected number of nodes is returned' assert countDataNodesInTree(result) == expectedNumberOfDataNodes - and: 'all data is read within #durationLimit ms and memory used is within limit' + and: 'all data is read within #durationLimit seconds and memory used is within limit' recordAndAssertResourceUsage("Query 1 anchor ${scenario}", durationLimit, durationInSeconds, memoryLimit, resourceMeter.getTotalMemoryUsageInMB()) where: 'the following parameters are used' scenario | cpsPath || durationLimit | memoryLimit | expectedNumberOfDataNodes @@ -60,7 +60,7 @@ class QueryPerfTest extends CpsPerfTestBase { def durationInSeconds = resourceMeter.getTotalTimeInSeconds() then: 'the expected number of nodes is returned' assert countDataNodesInTree(result) == expectedNumberOfDataNodes - and: 'all data is read within #durationLimit ms and memory used is within limit' + and: 'all data is read within #durationLimit seconds and memory used is within limit' recordAndAssertResourceUsage("Query across anchors ${scenario}", durationLimit, durationInSeconds, memoryLimit, resourceMeter.getTotalMemoryUsageInMB()) where: 'the following parameters are used' scenario | cpspath || durationLimit | memoryLimit | expectedNumberOfDataNodes @@ -78,7 +78,7 @@ class QueryPerfTest extends CpsPerfTestBase { def durationInSeconds = resourceMeter.getTotalTimeInSeconds() then: 'the expected number of nodes is returned' assert countDataNodesInTree(result) == expectedNumberOfDataNodes - and: 'all data is read within #durationLimit ms and memory used is within limit' + and: 'all data is read within #durationLimit seconds and memory used is within limit' recordAndAssertResourceUsage("Query with ${scenario}", durationLimit, durationInSeconds, memoryLimit, resourceMeter.getTotalMemoryUsageInMB()) where: 'the following parameters are used' scenario | fetchDescendantsOption || durationLimit | memoryLimit | expectedNumberOfDataNodes @@ -95,7 +95,7 @@ class QueryPerfTest extends CpsPerfTestBase { def durationInSeconds = resourceMeter.getTotalTimeInSeconds() then: 'the expected number of nodes is returned' assert countDataNodesInTree(result) == expectedNumberOfDataNodes - and: 'all data is read within #durationLimit ms and memory used is within limit' + and: 'all data is read within #durationLimit seconds and memory used is within limit' recordAndAssertResourceUsage("Query ancestors with ${scenario}", durationLimit, durationInSeconds, memoryLimit, resourceMeter.getTotalMemoryUsageInMB()) where: 'the following parameters are used' scenario | fetchDescendantsOption || durationLimit | memoryLimit | expectedNumberOfDataNodes @@ -104,4 +104,21 @@ class QueryPerfTest extends CpsPerfTestBase { 'all descendants' | INCLUDE_ALL_DESCENDANTS || 1.34 | 400 | 1 + OPENROADM_DEVICES_PER_ANCHOR * OPENROADM_DATANODES_PER_DEVICE } + def 'Query data leaf with #scenario.'() { + when: 'query data leaf is called' + resourceMeter.start() + def result = objectUnderTest.queryDataLeaf(CPS_PERFORMANCE_TEST_DATASPACE, 'openroadm1', cpsPath, String) + resourceMeter.stop() + def durationInSeconds = resourceMeter.getTotalTimeInSeconds() + then: 'the expected number of results is returned' + assert result.size() == expectedNumberOfValues + and: 'all data is read within #durationLimit seconds and memory used is within limit' + recordAndAssertResourceUsage("Query data leaf ${scenario}", durationLimit, durationInSeconds, memoryLimit, resourceMeter.getTotalMemoryUsageInMB()) + where: 'the following parameters are used' + scenario | cpsPath || durationLimit | memoryLimit | expectedNumberOfValues + 'unique leaf value' | '/openroadm-devices/openroadm-device/@device-id' || 0.05 | 0.1 | OPENROADM_DEVICES_PER_ANCHOR + 'common leaf value' | '/openroadm-devices/openroadm-device/@ne-state' || 0.02 | 0.1 | 1 + 'non-existing data leaf' | '/openroadm-devices/openroadm-device/@non-existing' || 0.01 | 0.1 | 0 + } + } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/AlternateIdPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/AlternateIdPerfTest.groovy new file mode 100644 index 0000000000..b9d57cf14d --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/AlternateIdPerfTest.groovy @@ -0,0 +1,47 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.performance.ncmp + +import org.onap.cps.integration.ResourceMeter +import org.onap.cps.integration.base.CpsIntegrationSpecBase + +/** + * This test does not depend on common performance test data. Hence it just extends the integration spec base. + */ +class AlternateIdPerfTest extends CpsIntegrationSpecBase { + + def resourceMeter = new ResourceMeter() + + def 'Alternate Id Lookup Performance.'() { + given: 'register 1,000 cm handles (with alternative ids)' + registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(DMI1_URL, 'tagA', 1000, 1) + when: 'perform a 1,000 lookups by alternate id' + resourceMeter.start() + (1..1000).each { + networkCmProxyInventoryFacade.getNcmpServiceCmHandle("alt=${it}") + } + resourceMeter.stop() + then: 'record the result. Not asserted, just recorded in See https://lf-onap.atlassian.net/browse/CPS-2605' + println "*** CPS-2605 Execution time: ${resourceMeter.totalTimeInSeconds} ms" + cleanup: 'deregister test cm handles' + deregisterSequenceOfCmHandles(DMI1_URL, 1000, 1) + } +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryByAlternateIdPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryByAlternateIdPerfTest.groovy deleted file mode 100644 index cd2fc6ed7e..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryByAlternateIdPerfTest.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.performance.ncmp - -import org.onap.cps.integration.ResourceMeter -import org.onap.cps.integration.performance.base.NcmpPerfTestBase -import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher - -import java.util.stream.Collectors - -import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DATASPACE_NAME -import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY_ANCHOR -import static org.onap.cps.api.parameters.FetchDescendantsOption.OMIT_DESCENDANTS - -class CmHandleQueryByAlternateIdPerfTest extends NcmpPerfTestBase { - - AlternateIdMatcher objectUnderTest - ResourceMeter resourceMeter = new ResourceMeter() - - def setup() { objectUnderTest = alternateIdMatcher } - - def 'Query cm handle by longest match alternate id'() { - when: 'an alternate id as cps path query' - resourceMeter.start() - def cpsPath = "/a/b/c/d-5/e/f/g/h/i" - def dataNodes = objectUnderTest.getYangModelCmHandleByLongestMatchingAlternateId(cpsPath, '/') - and: 'the ids of the result are extracted and converted to xpath' - def cpsXpaths = dataNodes.stream().map(dataNode -> "/dmi-registry/cm-handles[@id='${dataNode.leaves.id}']".toString() ).collect(Collectors.toSet()) - and: 'a single get is executed to get all the parent objects and their descendants' - cpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsXpaths, OMIT_DESCENDANTS) - resourceMeter.stop() - def durationInSeconds = resourceMeter.getTotalTimeInSeconds() - print 'Total time in seconds to query ch handle by alternate id: ' + durationInSeconds - then: 'the required operations are performed within required time and memory limit' - recordAndAssertResourceUsage('Look up cm-handle by longest match alternate-id', 1, durationInSeconds, 300, resourceMeter.getTotalMemoryUsageInMB()) - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryPerfTest.groovy index 5389732181..dbf7e71710 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryPerfTest.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/CmHandleQueryPerfTest.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation + * Copyright (C) 2023-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -46,7 +46,7 @@ class CmHandleQueryPerfTest extends NcmpPerfTestBase { cpsDataService.getDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, REGISTRY_ANCHOR, '/dmi-registry/cm-handles[@id="cm-' + it + '"]', OMIT_DESCENDANTS) objectUnderTest.queryDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, REGISTRY_ANCHOR, - '/dmi-registry/cm-handles[@alternate-id="alt-' + it + '"]', OMIT_DESCENDANTS) + '/dmi-registry/cm-handles[@alternate-id="alt=' + it + '"]', OMIT_DESCENDANTS) } resourceMeter.stop() then: 'resource usage is as expected' @@ -100,7 +100,7 @@ class CmHandleQueryPerfTest extends NcmpPerfTestBase { resourceMeter.start() (1..100).each { count += cpsQueryService.queryDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, REGISTRY_ANCHOR, - '/dmi-registry/cm-handles[@alternate-id="alt-' + it + '"]', OMIT_DESCENDANTS).size() + '/dmi-registry/cm-handles[@alternate-id="alt=' + it + '"]', OMIT_DESCENDANTS).size() } resourceMeter.stop() then: @@ -116,7 +116,7 @@ class CmHandleQueryPerfTest extends NcmpPerfTestBase { def 'A batch of CM-handles is looked up by alternate-id.'() { given: 'a CPS Path Query to look up 100 alternate-ids in a single operation' - def cpsPathQuery = '/dmi-registry/cm-handles[' + (1..100).collect { "@alternate-id='alt-${it}'" }.join(' or ') + ']' + def cpsPathQuery = '/dmi-registry/cm-handles[' + (1..100).collect { "@alternate-id='alt=${it}'" }.join(' or ') + ']' when: 'CM-handles are looked up by alternate-ids in a single query' resourceMeter.start() def count = cpsQueryService.queryDataNodes(NCMP_PERFORMANCE_TEST_DATASPACE, REGISTRY_ANCHOR, cpsPathQuery, OMIT_DESCENDANTS).size() diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/WriteDataJobPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/WriteDataJobPerfTest.groovy new file mode 100644 index 0000000000..de7ffabe5e --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/ncmp/WriteDataJobPerfTest.groovy @@ -0,0 +1,112 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.performance.ncmp + +import org.onap.cps.integration.ResourceMeter +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.onap.cps.ncmp.api.datajobs.DataJobService +import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata +import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest +import org.onap.cps.ncmp.api.datajobs.models.WriteOperation +import org.springframework.beans.factory.annotation.Autowired +import spock.lang.Ignore +import java.util.concurrent.CompletableFuture +import java.util.concurrent.Executors + +/** + * This test does not depend on common performance test data. Hence it just extends the integration spec base. + */ +class WriteDataJobPerfTest extends CpsIntegrationSpecBase { + + @Autowired + DataJobService dataJobService + + def populateDataJobWriteRequests(int numberOfWriteOperations) { + def writeOperations = [] + for (int i = 1; i <= numberOfWriteOperations; i++) { + def basePath = "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${i}/ManagedElement=MyManagedElement${i}" + writeOperations.add(new WriteOperation("${basePath}/SomeChild=child-1", 'operation1', '1', null)) + writeOperations.add(new WriteOperation("${basePath}/SomeChild=child-2", 'operation2', '2', null)) + writeOperations.add(new WriteOperation(basePath, 'operation3', '3', null)) + } + return new DataJobWriteRequest(writeOperations) + } + + @Ignore // CPS-2691 + def 'Performance test for writeDataJob method'() { + given: 'register 10_000 cm handles (with alternate ids)' + registerTestCmHandles(10_000) + def dataJobWriteRequest = populateDataJobWriteRequests(10_000) + when: 'sending a write job to NCMP with dynamically generated write operations' + def executionResult = executeWriteJob('d1', dataJobWriteRequest) + then: 'record the result. Not asserted, just recorded in See https://lf-onap.atlassian.net/browse/CPS-2691' + println "*** CPS-2691 Execution time: ${executionResult.executionTime} seconds | Memory usage: ${executionResult.memoryUsage} MB" + cleanup: 'deregister test cm handles' + deregisterTestCmHandles(10_000) + } + + @Ignore // CPS-2692 + def 'Performance test for writeDataJob method with 10 parallel requests'() { + given: 'register 10_000 cm handles (with alternate ids)' + registerTestCmHandles(1_000) + when: 'sending 10 parallel write jobs to NCMP' + def executionResults = executeParallelWriteJobs(10, 1_000) + then: 'record execution times' + executionResults.eachWithIndex { result, index -> + logExecutionResults("CPS-2692 Job-${index + 1}", result) + } + cleanup: 'deregister test cm handles' + deregisterSequenceOfCmHandles(DMI1_URL, 1_000, 1) + } + + def registerTestCmHandles(numberOfCmHandles) { + registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady( + DMI1_URL, "tagA", numberOfCmHandles, 1, ModuleNameStrategy.UNIQUE, + { "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${it}/ManagedElement=MyManagedElement${it}" } + ) + } + + def executeParallelWriteJobs(numberOfJobs, numberOfWriteOperations) { + def executorService = Executors.newFixedThreadPool(numberOfJobs) + def futures = (0..<numberOfJobs).collect { jobId -> + CompletableFuture.supplyAsync({ -> executeWriteJob(jobId, populateDataJobWriteRequests(numberOfWriteOperations)) }, executorService) + } + def executionResults = futures.collect { it.join() } + executorService.shutdown() + return executionResults + } + + def executeWriteJob(jobId, dataJobWriteRequest) { + def localMeter = new ResourceMeter() + localMeter.start() + dataJobService.writeDataJob('', '', new DataJobMetadata("job-${jobId}", '', ''), dataJobWriteRequest) + localMeter.stop() + ['executionTime': localMeter.totalTimeInSeconds, 'memoryUsage': localMeter.totalMemoryUsageInMB] + } + + def logExecutionResults(jobId, result) { + println "*** ${jobId} Execution time: ${result.executionTime} seconds | Memory usage: ${result.memoryUsage} MB" + } + + def deregisterTestCmHandles(numberOfCmHandles) { + deregisterSequenceOfCmHandles(DMI1_URL, numberOfCmHandles, 1) + } +} diff --git a/integration-test/src/test/java/org/onap/cps/integration/KafkaTestContainer.java b/integration-test/src/test/java/org/onap/cps/integration/KafkaTestContainer.java index ff4aec4175..60c1637c5a 100644 --- a/integration-test/src/test/java/org/onap/cps/integration/KafkaTestContainer.java +++ b/integration-test/src/test/java/org/onap/cps/integration/KafkaTestContainer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation. + * Copyright (C) 2024-2025 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,8 +25,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.StringDeserializer; -import org.testcontainers.containers.KafkaContainer; -import org.testcontainers.utility.DockerImageName; +import org.testcontainers.kafka.ConfluentKafkaContainer; /** * The Apache Kafka test container wrapper. @@ -35,14 +34,14 @@ import org.testcontainers.utility.DockerImageName; * Avoid unnecessary resource and time consumption. */ @Slf4j -public class KafkaTestContainer extends KafkaContainer { +public class KafkaTestContainer extends ConfluentKafkaContainer { - private static final String IMAGE_NAME_AND_VERSION = "registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1"; + private static final String IMAGE_NAME_AND_VERSION = "confluentinc/cp-kafka:7.8.0"; private static volatile KafkaTestContainer kafkaTestContainer; private KafkaTestContainer() { - super(DockerImageName.parse(IMAGE_NAME_AND_VERSION).asCompatibleSubstituteFor("confluentinc/cp-kafka")); + super(IMAGE_NAME_AND_VERSION); } /** diff --git a/integration-test/src/test/resources/application-module-sync-delayed.yml b/integration-test/src/test/resources/application-module-sync-delayed.yml index 7b9c6aea4f..27c99e93b2 100644 --- a/integration-test/src/test/resources/application-module-sync-delayed.yml +++ b/integration-test/src/test/resources/application-module-sync-delayed.yml @@ -1,3 +1,4 @@ + # ============LICENSE_START======================================================= # Copyright (C) 2024 Nordix Foundation. # ================================================================================ @@ -14,7 +15,6 @@ # # SPDX-License-Identifier: Apache-2.0 # ============LICENSE_END========================================================= - test: ncmp: timers: diff --git a/integration-test/src/test/resources/application.yml b/integration-test/src/test/resources/application.yml index 30598dfb90..104034a92e 100644 --- a/integration-test/src/test/resources/application.yml +++ b/integration-test/src/test/resources/application.yml @@ -1,5 +1,5 @@ # ============LICENSE_START======================================================= -# Copyright (C) 2023-2024 Nordix Foundation. +# Copyright (C) 2023-2025 OpenInfra Foundation Europe. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -102,6 +102,7 @@ app: cm-subscription-dmi-out: ${CM_SUBSCRIPTION_DMI_OUT_TOPIC:dmi-ncmp-cm-avc-subscription} cm-subscription-ncmp-out: ${CM_SUBSCRIPTION_NCMP_OUT_TOPIC:subscription-response} cm-events-topic: ${NCMP_CM_EVENTS_TOPIC:cm-events} + inventory-events-topic: ncmp-inventory-events lcm: events: topic: ${LCM_EVENTS_TOPIC:ncmp-events} @@ -134,13 +135,6 @@ springdoc: - name: cps-ncmp-inventory url: /api-docs/cps-ncmp/openapi-inventory.yaml -security: - # comma-separated uri patterns which do not require authorization - permit-uri: /actuator/**,/swagger-ui.html,/swagger-ui/**,/swagger-resources/**,/api-docs/**,/v3/api-docs/** - auth: - username: cps - password: cpsr0cks! - # Actuator management: endpoints: @@ -179,6 +173,7 @@ ncmp: timers: advised-modules-sync: + initial-delay-ms: 0 sleep-time-ms: 1000000 cm-handle-data-sync: sleep-time-ms: 30000 @@ -189,10 +184,6 @@ ncmp: trust-level: dmi-availability-watchdog-ms: 30000 - modules-sync-watchdog: - async-executor: - parallelism-level: 2 - model-loader: maximum-attempt-count: 20 diff --git a/integration-test/src/test/resources/hibernate.cfg.xml b/integration-test/src/test/resources/hibernate.cfg.xml deleted file mode 100644 index 8d5139b605..0000000000 --- a/integration-test/src/test/resources/hibernate.cfg.xml +++ /dev/null @@ -1,16 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE hibernate-configuration PUBLIC - "-//Hibernate/Hibernate Configuration DTD 3.0//EN" - "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd"> - -<hibernate-configuration> - <session-factory> - <property name="hibernate.connection.driver_class">org.postgresql.Driver</property> - <property name="hibernate.connection.url">${DB_URL}</property> - <property name="hibernate.connection.username">${DB_USERNAME}</property> - <property name="hibernate.connection.password">${DB_PASSWORD}</property> - <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQLDialect</property> - <property name="show_sql">true</property> - <property name="hibernate.hbm2ddl.auto">none</property> - </session-factory> -</hibernate-configuration>
\ No newline at end of file diff --git a/jacoco-report/pom.xml b/jacoco-report/pom.xml index be1d42788d..d1a18ff453 100644 --- a/jacoco-report/pom.xml +++ b/jacoco-report/pom.xml @@ -5,7 +5,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> <modelVersion>4.0.0</modelVersion> diff --git a/k6-tests/install-deps.sh b/k6-tests/install-deps.sh index bb5deb93dd..393a255ed4 100755 --- a/k6-tests/install-deps.sh +++ b/k6-tests/install-deps.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2024 Nordix Foundation. +# Copyright 2024-2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ export PATH="$(pwd)/bin:$PATH" # Download docker-compose. if [ ! -x bin/docker-compose ]; then echo " Downloading docker-compose" - curl -s -L https://github.com/docker/compose/releases/download/v2.29.2/docker-compose-linux-x86_64 > bin/docker-compose + curl -s -L https://github.com/docker/compose/releases/download/v2.35.1/docker-compose-linux-x86_64 > bin/docker-compose chmod +x bin/docker-compose else echo " docker-compose already installed" diff --git a/k6-tests/make-logs.sh b/k6-tests/make-logs.sh new file mode 100644 index 0000000000..60976247e5 --- /dev/null +++ b/k6-tests/make-logs.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# +# Copyright 2025 Nordix Foundation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +SERVICE_NAME="cps-and-ncmp" +TIMESTAMP=$(date +"%Y%m%d%H%M%S") +LOG_DIR="${WORKSPACE:-.}/logs" +TEMP_DIR="$LOG_DIR/temp_$TIMESTAMP" +ZIP_FILE="$LOG_DIR/${SERVICE_NAME}_logs_$TIMESTAMP.zip" + +mkdir -p "$LOG_DIR" +mkdir -p "$TEMP_DIR" + +# Store logs for cps-and-ncmp containers to temp directory +CONTAINER_IDS=$(docker ps --filter "name=$SERVICE_NAME" --format "{{.ID}}") +for CONTAINER_ID in $CONTAINER_IDS; do + CONTAINER_NAME=$(docker inspect --format="{{.Name}}" "$CONTAINER_ID" | sed 's/\///g') + LOG_FILE="$TEMP_DIR/${CONTAINER_NAME}_logs_$TIMESTAMP.log" + docker logs "$CONTAINER_ID" > "$LOG_FILE" +done + +# Zip the logs +zip -r "$ZIP_FILE" "$TEMP_DIR" +echo "Logs saved to $ZIP_FILE inside workspace" + +# Clean temp files +rm -r "$TEMP_DIR" + +# Delete logs older than 2 weeks +find "$LOG_DIR" -name "${SERVICE_NAME}_logs_*.zip" -mtime +14 -delete diff --git a/k6-tests/ncmp/common/cmhandle-crud.js b/k6-tests/ncmp/common/cmhandle-crud.js index 285028f13c..3b6c3ff7b7 100644 --- a/k6-tests/ncmp/common/cmhandle-crud.js +++ b/k6-tests/ncmp/common/cmhandle-crud.js @@ -51,19 +51,30 @@ export function waitForAllCmHandlesToBeReady() { function createCmHandlePayload(cmHandleIds) { return { "dmiPlugin": DMI_PLUGIN_URL, - "createdCmHandles": cmHandleIds.map((cmHandleId, index) => ({ - "cmHandle": cmHandleId, - "alternateId": cmHandleId.replace('ch-', 'Subnetwork=Europe,ManagedElement='), - "moduleSetTag": MODULE_SET_TAGS[index % MODULE_SET_TAGS.length], - "cmHandleProperties": { - "id": "123" - }, - "publicCmHandleProperties": { - "Color": "yellow", - "Size": "small", - "Shape": "cube" - } - })), + "createdCmHandles": cmHandleIds.map((cmHandleId, index) => { + // Ensure unique networkSegment within range 1-10 + let networkSegmentId = Math.floor(Math.random() * 10) + 1; // Random between 1-10 + let moduleTag = MODULE_SET_TAGS[index % MODULE_SET_TAGS.length]; + + return { + "cmHandle": cmHandleId, + "alternateId": cmHandleId.replace('ch-', 'Region=NorthAmerica,Segment='), + "moduleSetTag": moduleTag, + "cmHandleProperties": { + "segmentId": index + 1, + "networkSegment": `Region=NorthAmerica,Segment=${networkSegmentId}`, // Unique within range 1-10 + "deviceIdentifier": `Element=RadioBaseStation_5G_${index + 1000}`, // Unique per cmHandle + "hardwareVersion": `HW-${moduleTag}`, // Shares uniqueness with moduleSetTag + "softwareVersion": `Firmware_${moduleTag}`, // Shares uniqueness with moduleSetTag + "syncStatus": "ACTIVE", + "nodeCategory": "VirtualNode" + }, + "publicCmHandleProperties": { + "systemId": index + 1, + "systemName": "ncmp" + } + }; + }), }; } diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js index 251ddf8890..c6732571ba 100644 --- a/k6-tests/ncmp/common/passthrough-crud.js +++ b/k6-tests/ncmp/common/passthrough-crud.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,18 +18,17 @@ * ============LICENSE_END========================================================= */ -import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js'; import { performPostRequest, performGetRequest, NCMP_BASE_URL, LEGACY_BATCH_TOPIC_NAME, - TOTAL_CM_HANDLES, + getRandomCmHandleReference, } from './utils.js'; export function passthroughRead(useAlternateId) { const cmHandleReference = getRandomCmHandleReference(useAlternateId); - const resourceIdentifier = 'NRCellDU/attributes/cellLocalId'; + const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1'; const datastoreName = 'ncmp-datastore:passthrough-operational'; const includeDescendants = true; const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants); @@ -38,7 +37,7 @@ export function passthroughRead(useAlternateId) { export function passthroughWrite(useAlternateId) { const cmHandleReference = getRandomCmHandleReference(useAlternateId); - const resourceIdentifier = 'NRCellDU/attributes/cellLocalId'; + const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1'; const datastoreName = 'ncmp-datastore:passthrough-running'; const includeDescendants = false; const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants); @@ -54,7 +53,7 @@ export function legacyBatchRead(cmHandleIds) { const payload = JSON.stringify({ "operations": [ { - "resourceIdentifier": "NRCellDU/attributes/cellLocalId", + "resourceIdentifier": "ManagedElement=NRNode1/GNBDUFunction=1", "targetIds": cmHandleIds, "datastore": "ncmp-datastore:passthrough-operational", "options": "(fields=NRCellDU/attributes/cellLocalId)", @@ -66,11 +65,6 @@ export function legacyBatchRead(cmHandleIds) { return performPostRequest(url, payload, 'batchRead'); } -function getRandomCmHandleReference(useAlternateId) { - const prefix = useAlternateId ? 'Subnetwork=Europe,ManagedElement=' : 'ch-'; - return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`; -} - function generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants) { const descendantsParam = includeDescendants ? `&include-descendants=${includeDescendants}` : ''; return `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleReference}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}${descendantsParam}`; diff --git a/k6-tests/once-off-test/kafka/produce-avc-event.js b/k6-tests/ncmp/common/produce-avc-event.js index db222f6a4a..220265c6e7 100644 --- a/k6-tests/once-off-test/kafka/produce-avc-event.js +++ b/k6-tests/ncmp/common/produce-avc-event.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,12 +18,14 @@ * ============LICENSE_END========================================================= */ -import { crypto } from 'k6/experimental/webcrypto'; -import { check } from 'k6'; -import { Writer, SchemaRegistry, SCHEMA_TYPE_STRING } from 'k6/x/kafka'; +import {crypto} from 'k6/experimental/webcrypto'; +import {check} from 'k6'; +import {Writer, SchemaRegistry, SCHEMA_TYPE_STRING} from 'k6/x/kafka'; const testEventPayload = JSON.stringify(JSON.parse(open('../../resources/sampleAvcInputEvent.json'))); const schemaRegistry = new SchemaRegistry(); +let messagesSent = 0; + const kafkaProducer = new Writer({ brokers: ['localhost:9092'], topic: 'dmi-cm-events', @@ -33,26 +35,8 @@ const kafkaProducer = new Writer({ requestTimeout: 30000 }); -const TOTAL_MESSAGES = 100000; -const VIRTUAL_USERS = 1000; - -export const options = { - setupTimeout: '1m', - teardownTimeout: '1m', - scenarios: { - produceKafkaMessages: { - executor: 'shared-iterations', - exec: 'sendKafkaMessages', - vus: VIRTUAL_USERS, - iterations: TOTAL_MESSAGES, - maxDuration: '10m', - } - } -}; - const getRandomNetworkElement = () => { - const networkElementIds = Array.from({ length: 10 }, (_, i) => `neType-${i + 1}`); - return networkElementIds[Math.floor(Math.random() * networkElementIds.length)]; + return `neType-${Math.floor(Math.random() * 10) + 1}`; }; function getCloudEventHeaders() { @@ -69,6 +53,7 @@ function getCloudEventHeaders() { } export function sendKafkaMessages() { + const cloudEventHeaders = getCloudEventHeaders(); const networkElementId = getRandomNetworkElement(); @@ -85,8 +70,8 @@ export function sendKafkaMessages() { }; try { - kafkaProducer.produce({ messages: [avcCloudEvent] }); - + kafkaProducer.produce({messages: [avcCloudEvent]}); + messagesSent++; const isMessageSent = check(kafkaProducer, { 'Message sent successfully': (producer) => producer != null, }); @@ -94,9 +79,8 @@ export function sendKafkaMessages() { if (!isMessageSent) { console.error('Failed to send message:', avcCloudEvent); } - } catch (error) { - console.error('Error during message production:', error, avcCloudEvent); + console.error(`Error during message production: ${error.message}`, avcCloudEvent); } } diff --git a/k6-tests/ncmp/common/search-base.js b/k6-tests/ncmp/common/search-base.js index af2caf71ec..91369e818f 100644 --- a/k6-tests/ncmp/common/search-base.js +++ b/k6-tests/ncmp/common/search-base.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,7 +31,7 @@ export function executeCmHandleIdSearch(scenario) { function executeSearchRequest(searchType, scenario) { const searchParameters = SEARCH_PARAMETERS_PER_SCENARIO[scenario]; const payload = JSON.stringify(searchParameters); - const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${searchType}`; + const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${searchType}?outputAlternateId=true`; return performPostRequest(url, payload, searchType); } @@ -51,7 +51,7 @@ const SEARCH_PARAMETERS_PER_SCENARIO = { "cmHandleQueryParameters": [ { "conditionName": "hasAllProperties", - "conditionParameters": [{"Color": "yellow"}] + "conditionParameters": [{"systemName": "ncmp"}] } ] }, diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js index 66d2dfe448..ea77aae176 100644 --- a/k6-tests/ncmp/common/utils.js +++ b/k6-tests/ncmp/common/utils.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ * ============LICENSE_END========================================================= */ +import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js'; import http from 'k6/http'; export const testConfig = JSON.parse(open(`../config/${__ENV.TEST_PROFILE}.json`)); @@ -26,8 +27,8 @@ export const NCMP_BASE_URL = testConfig.hosts.ncmpBaseUrl; export const DMI_PLUGIN_URL = testConfig.hosts.dmiStubUrl; export const CONTAINER_UP_TIME_IN_SECONDS = testConfig.hosts.containerUpTimeInSeconds; export const LEGACY_BATCH_TOPIC_NAME = 'legacy_batch_topic'; -export const TOTAL_CM_HANDLES = 20000; -export const REGISTRATION_BATCH_SIZE = 100; +export const TOTAL_CM_HANDLES = 50000; +export const REGISTRATION_BATCH_SIZE = 2000; export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as in docker-compose.yml export const CONTENT_TYPE_JSON_PARAM = {'Content-Type': 'application/json'}; @@ -48,6 +49,32 @@ export function makeBatchOfCmHandleIds(batchSize, batchNumber) { } /** + * Generates an unordered batch of Alternate IDs. + * The batch size is determined by `LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE`, + * and the IDs are generated within the range of `TOTAL_CM_HANDLES`. + * + * @returns {string[]} Array of Alternate IDs, for example, + * ['Region=NorthAmerica,Segment=8', 'Region=NorthAmerica,Segment=2' ... 'Region=NorthAmerica,Segment=32432'] + */ +export function makeRandomBatchOfAlternateIds() { + const alternateIds = new Set(); + while (alternateIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) { + alternateIds.add(getRandomCmHandleReference(true)); + } + return Array.from(alternateIds) +} + +/** + * Generates a random CM Handle reference based on the provided flag. + * @param useAlternateId + * @returns {string} CM Handle reference representing a CM handle ID or an alternate ID. + */ +export function getRandomCmHandleReference(useAlternateId) { + const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-'; + return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`; +} + +/** * Helper function to perform POST requests with JSON payload and content type. * @param {string} url - The URL to send the POST request to. * @param {Object} payload - The JSON payload to send in the POST request. @@ -100,9 +127,7 @@ export function makeCustomSummaryReport(testResults, scenarioConfig) { makeSummaryCsvLine('4c', 'CM-handle search with Property filter', 'milliseconds', 'cm_search_property_duration', 4500, testResults, scenarioConfig), makeSummaryCsvLine('4d', 'CM-handle search with Cps Path filter', 'milliseconds', 'cm_search_cpspath_duration', 4500, testResults, scenarioConfig), makeSummaryCsvLine('4e', 'CM-handle search with Trust Level filter', 'milliseconds', 'cm_search_trustlevel_duration', 7000, testResults, scenarioConfig), - makeSummaryCsvLine('5a', 'NCMP overhead for Synchronous single CM-handle pass-through read', 'milliseconds', 'ncmp_overhead_passthrough_read', 20, testResults, scenarioConfig), makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_read_alt_id', 40, testResults, scenarioConfig), - makeSummaryCsvLine('6a', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', 20, testResults, scenarioConfig), makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_write_alt_id', 40, testResults, scenarioConfig), makeSummaryCsvLine('7', 'Legacy batch read operation', 'events/second', 'legacy_batch_read_cmhandles_per_second', 300, testResults, scenarioConfig), ]; diff --git a/k6-tests/ncmp/config/endurance.json b/k6-tests/ncmp/config/endurance.json index d4893a45cc..8f65b81bb8 100644 --- a/k6-tests/ncmp/config/endurance.json +++ b/k6-tests/ncmp/config/endurance.json @@ -6,28 +6,16 @@ "containerUpTimeInSeconds": 420 }, "scenarios": { - "passthrough_read_scenario": { - "executor": "constant-vus", - "exec": "passthroughReadScenario", - "vus": 2, - "duration": "2h" - }, "passthrough_read_alt_id_scenario": { "executor": "constant-vus", "exec": "passthroughReadAltIdScenario", - "vus": 2, - "duration": "2h" - }, - "passthrough_write_scenario": { - "executor": "constant-vus", - "exec": "passthroughWriteScenario", - "vus": 2, + "vus": 4, "duration": "2h" }, "passthrough_write_alt_id_scenario": { "executor": "constant-vus", "exec": "passthroughWriteAltIdScenario", - "vus": 2, + "vus": 4, "duration": "2h" }, "cm_handle_id_search_nofilter_scenario": { diff --git a/k6-tests/ncmp/config/kpi.json b/k6-tests/ncmp/config/kpi.json index eed041de85..aa93091b67 100644 --- a/k6-tests/ncmp/config/kpi.json +++ b/k6-tests/ncmp/config/kpi.json @@ -6,44 +6,24 @@ "containerUpTimeInSeconds": 300 }, "scenarios": { - "passthrough_read_scenario": { - "executor": "constant-arrival-rate", - "exec": "passthroughReadScenario", - "rate": 5, - "timeUnit": "1s", - "duration": "15m", - "preAllocatedVUs": 5, - "startTime": "0ms" - }, "passthrough_read_alt_id_scenario": { "executor": "constant-arrival-rate", "exec": "passthroughReadAltIdScenario", - "rate": 5, + "rate": 25, "timeUnit": "1s", "duration": "15m", - "preAllocatedVUs": 5, + "preAllocatedVUs": 10, "startTime": "200ms" }, - - "passthrough_write_scenario": { - "executor": "constant-arrival-rate", - "exec": "passthroughWriteScenario", - "rate": 5, - "timeUnit": "1s", - "duration": "15m", - "preAllocatedVUs": 5, - "startTime": "400ms" - }, "passthrough_write_alt_id_scenario": { "executor": "constant-arrival-rate", "exec": "passthroughWriteAltIdScenario", - "rate": 5, + "rate": 13, "timeUnit": "1s", "duration": "15m", - "preAllocatedVUs": 5, + "preAllocatedVUs": 10, "startTime": "600ms" }, - "cm_handle_id_search_nofilter_scenario": { "executor": "constant-arrival-rate", "exec": "cmHandleIdSearchNoFilterScenario", @@ -89,7 +69,6 @@ "preAllocatedVUs": 1, "startTime": "1600ms" }, - "cm_handle_search_nofilter_scenario": { "executor": "constant-arrival-rate", "exec": "cmHandleSearchNoFilterScenario", @@ -135,7 +114,6 @@ "preAllocatedVUs": 1, "startTime": "12s" }, - "legacy_batch_produce_scenario": { "executor": "shared-iterations", "exec": "legacyBatchProduceScenario", @@ -147,14 +125,22 @@ "exec": "legacyBatchConsumeScenario", "vus": 1, "iterations": 1 + }, + "produceKafkaMessages": { + "executor": "constant-arrival-rate", + "rate": 500, + "timeUnit": "1s", + "duration": "15m", + "preAllocatedVUs": 10, + "maxVUs": 10, + "exec": "produceAvcEventsScenario", + "gracefulStop": "10s" } }, "thresholds": { "http_req_failed": ["rate == 0"], "cmhandles_created_per_second": ["avg >= 22"], "cmhandles_deleted_per_second": ["avg >= 22"], - "ncmp_overhead_passthrough_read": ["avg <= 40"], - "ncmp_overhead_passthrough_write": ["avg <= 40"], "ncmp_overhead_passthrough_read_alt_id": ["avg <= 40"], "ncmp_overhead_passthrough_write_alt_id": ["avg <= 40"], "id_search_nofilter_duration": ["avg <= 2000"], diff --git a/k6-tests/ncmp/ncmp-test-runner.js b/k6-tests/ncmp/ncmp-test-runner.js index 9ab326c44c..1c53139991 100644 --- a/k6-tests/ncmp/ncmp-test-runner.js +++ b/k6-tests/ncmp/ncmp-test-runner.js @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation + * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,19 +23,18 @@ import { Trend } from 'k6/metrics'; import { Reader } from 'k6/x/kafka'; import { TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS, - makeCustomSummaryReport, makeBatchOfCmHandleIds, LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, - REGISTRATION_BATCH_SIZE, LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS, KAFKA_BOOTSTRAP_SERVERS, - LEGACY_BATCH_TOPIC_NAME, CONTAINER_UP_TIME_IN_SECONDS, testConfig + makeCustomSummaryReport, makeBatchOfCmHandleIds, makeRandomBatchOfAlternateIds, + LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, REGISTRATION_BATCH_SIZE, LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS, + KAFKA_BOOTSTRAP_SERVERS, LEGACY_BATCH_TOPIC_NAME, CONTAINER_UP_TIME_IN_SECONDS, testConfig } from './common/utils.js'; import { createCmHandles, deleteCmHandles, waitForAllCmHandlesToBeReady } from './common/cmhandle-crud.js'; import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js'; import { passthroughRead, passthroughWrite, legacyBatchRead } from './common/passthrough-crud.js'; +import { sendKafkaMessages } from './common/produce-avc-event.js'; let cmHandlesCreatedPerSecondTrend = new Trend('cmhandles_created_per_second', false); let cmHandlesDeletedPerSecondTrend = new Trend('cmhandles_deleted_per_second', false); -let passthroughReadNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_read', true); let passthroughReadNcmpOverheadTrendWithAlternateId = new Trend('ncmp_overhead_passthrough_read_alt_id', true); -let passthroughWriteNcmpOverheadTrend = new Trend('ncmp_overhead_passthrough_write', true); let passthroughWriteNcmpOverheadTrendWithAlternateId = new Trend('ncmp_overhead_passthrough_write_alt_id', true); let idSearchNoFilterDurationTrend = new Trend('id_search_nofilter_duration', true); let idSearchModuleDurationTrend = new Trend('id_search_module_duration', true); @@ -101,14 +100,6 @@ export function teardown() { sleep(CONTAINER_UP_TIME_IN_SECONDS); } -export function passthroughReadScenario() { - const response = passthroughRead(false); - if (check(response, { 'passthrough read status equals 200': (r) => r.status === 200 })) { - const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS; - passthroughReadNcmpOverheadTrend.add(overhead); - } -} - export function passthroughReadAltIdScenario() { const response = passthroughRead(true); if (check(response, { 'passthrough read with alternate Id status equals 200': (r) => r.status === 200 })) { @@ -117,14 +108,6 @@ export function passthroughReadAltIdScenario() { } } -export function passthroughWriteScenario() { - const response = passthroughWrite(false); - if (check(response, { 'passthrough write status equals 201': (r) => r.status === 201 })) { - const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS; - passthroughWriteNcmpOverheadTrend.add(overhead); - } -} - export function passthroughWriteAltIdScenario() { const response = passthroughWrite(true); if (check(response, { 'passthrough write with alternate Id status equals 201': (r) => r.status === 201 })) { @@ -136,7 +119,7 @@ export function passthroughWriteAltIdScenario() { export function cmHandleIdSearchNoFilterScenario() { const response = executeCmHandleIdSearch('no-filter'); if (check(response, { 'CM handle ID no-filter search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID no-filter search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID no-filter search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchNoFilterDurationTrend.add(response.timings.duration); } } @@ -152,7 +135,7 @@ export function cmHandleSearchNoFilterScenario() { export function cmHandleIdSearchModuleScenario() { const response = executeCmHandleIdSearch('module'); if (check(response, { 'CM handle ID module search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID module search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID module search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchModuleDurationTrend.add(response.timings.duration); } } @@ -168,7 +151,7 @@ export function cmHandleSearchModuleScenario() { export function cmHandleIdSearchPropertyScenario() { const response = executeCmHandleIdSearch('property'); if (check(response, { 'CM handle ID property search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID property search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID property search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchPropertyDurationTrend.add(response.timings.duration); } } @@ -184,7 +167,7 @@ export function cmHandleSearchPropertyScenario() { export function cmHandleIdSearchCpsPathScenario() { const response = executeCmHandleIdSearch('cps-path-for-ready-cm-handles'); if (check(response, { 'CM handle ID cps path search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID cps path search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID cps path search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchCpsPathDurationTrend.add(response.timings.duration); } } @@ -200,7 +183,7 @@ export function cmHandleSearchCpsPathScenario() { export function cmHandleIdSearchTrustLevelScenario() { const response = executeCmHandleIdSearch('trust-level'); if (check(response, { 'CM handle ID trust level search status equals 200': (r) => r.status === 200 }) - && check(response, { 'CM handle ID trust level search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) { + && check(response, { 'CM handle ID trust level search returned the correct number of cm handle references': (r) => r.json('#') === TOTAL_CM_HANDLES })) { idSearchTrustLevelDurationTrend.add(response.timings.duration); } } @@ -214,11 +197,15 @@ export function cmHandleSearchTrustLevelScenario() { } export function legacyBatchProduceScenario() { - const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, 0); - const response = legacyBatchRead(nextBatchOfCmHandleIds); + const nextBatchOfAlternateIds = makeRandomBatchOfAlternateIds(); + const response = legacyBatchRead(nextBatchOfAlternateIds); check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 }); } +export function produceAvcEventsScenario() { + sendKafkaMessages(); +} + export function legacyBatchConsumeScenario() { const TOTAL_MESSAGES_TO_CONSUME = LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS * LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE; try { diff --git a/k6-tests/ncmp/register-cmhandles-only.js b/k6-tests/ncmp/register-cmhandles-only.js new file mode 100644 index 0000000000..18c2f85c06 --- /dev/null +++ b/k6-tests/ncmp/register-cmhandles-only.js @@ -0,0 +1,44 @@ +/* + * ============LICENSE_START======================================================= + * Copyright 2025 OpenInfra Foundation Europe. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +/** + * To run this script, ensure docker-compose is started, then run this k6 script: + * docker-compose -f docker-compose/docker-compose.yml --profile dmi-stub --project-name kpi up --wait + * k6 run register-cmhandles-only.js -e TEST_PROFILE=kpi + * After, the system will be running with 50,000 CM-handles created. + */ + +import { check } from 'k6'; +import { TOTAL_CM_HANDLES, REGISTRATION_BATCH_SIZE, makeBatchOfCmHandleIds } from './common/utils.js'; +import { createCmHandles, waitForAllCmHandlesToBeReady } from './common/cmhandle-crud.js'; + +/** + * This function registers CM-handles in batches and waits until all are in READY state. + * The number of handles to be registered is TOTAL_CM_HANDLES defined in common/utils.js + */ +export default function () { + const TOTAL_BATCHES = Math.ceil(TOTAL_CM_HANDLES / REGISTRATION_BATCH_SIZE); + for (let batchNumber = 0; batchNumber < TOTAL_BATCHES; batchNumber++) { + const nextBatchOfCmHandleIds = makeBatchOfCmHandleIds(REGISTRATION_BATCH_SIZE, batchNumber); + const response = createCmHandles(nextBatchOfCmHandleIds); + check(response, { 'create CM-handles status equals 200': (r) => r.status === 200 }); + } + waitForAllCmHandlesToBeReady(); +} diff --git a/k6-tests/setup.sh b/k6-tests/setup.sh index c794c64dd0..d990475522 100755 --- a/k6-tests/setup.sh +++ b/k6-tests/setup.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2024 Nordix Foundation. +# Copyright 2024-2025 Nordix Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,24 +18,20 @@ testProfile=$1 echo "Spinning off the CPS and NCMP containers for $testProfile testing..." -if [[ "$testProfile" == "endurance" ]]; then - docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name "$testProfile" --env-file ../docker-compose/config/endurance.env up --quiet-pull -d - CONTAINER_IDS=$(docker ps --filter "name=endurance-cps-and-ncmp" --format "{{.ID}}") -else - docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name "$testProfile" up --quiet-pull -d - CONTAINER_IDS=$(docker ps --filter "name=kpi-cps-and-ncmp" --format "{{.ID}}") -fi - -echo "Waiting for CPS to start..." -READY_MESSAGE="Inventory Model updated successfully" +ENV_FILE="../docker-compose/env/${testProfile}.env" +docker-compose \ + --file "../docker-compose/docker-compose.yml" \ + --env-file "$ENV_FILE" \ + --project-name "$testProfile" \ + --profile dmi-stub \ + up --quiet-pull --detach --wait || exit 1 -# Check the logs for each container -for CONTAINER_ID in $CONTAINER_IDS; do - echo "Checking logs for container: $CONTAINER_ID" - docker logs "$CONTAINER_ID" -f | grep -m 1 "$READY_MESSAGE" >/dev/null && echo "CPS is ready in container: $CONTAINER_ID" || true -done +if [[ "$testProfile" == "kpi" ]]; then + ACTUATOR_PORT=8883 +elif [[ "$testProfile" == "endurance" ]]; then + ACTUATOR_PORT=8884 +fi -# Output build information including git commit info echo "Build information:" -curl http://localhost:8883/actuator/info +curl --silent --show-error http://localhost:$ACTUATOR_PORT/actuator/info echo diff --git a/k6-tests/teardown.sh b/k6-tests/teardown.sh index c3233919dc..7804a73286 100755 --- a/k6-tests/teardown.sh +++ b/k6-tests/teardown.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2024 Nordix Foundation. +# Copyright 2024-2025 Nordix Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,10 @@ echo '================================== docker info ==========================' docker ps -a +# Zip and store logs for the containers +chmod +x make-logs.sh +./make-logs.sh + testProfile=$1 docker_compose_shutdown_cmd="docker-compose -f ../docker-compose/docker-compose.yml --profile dmi-stub --project-name $testProfile down --volumes" diff --git a/policy-executor-stub/pom.xml b/policy-executor-stub/pom.xml index bdd21e191f..40c684b4d8 100644 --- a/policy-executor-stub/pom.xml +++ b/policy-executor-stub/pom.xml @@ -6,7 +6,7 @@ <parent> <groupId>org.onap.cps</groupId> <artifactId>cps-parent</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <relativePath>../cps-parent/pom.xml</relativePath> </parent> @@ -27,7 +27,7 @@ <modelVersion>4.0.0</modelVersion>
<groupId>org.onap.cps</groupId>
<artifactId>cps-aggregator</artifactId>
- <version>3.6.0-SNAPSHOT</version>
+ <version>3.6.3-SNAPSHOT</version>
<packaging>pom</packaging>
<name>cps</name>
diff --git a/postman-collections/CPS-CORE.postman_collection.json b/postman-collections/CPS-CORE.postman_collection.json deleted file mode 100644 index 730f69eac5..0000000000 --- a/postman-collections/CPS-CORE.postman_collection.json +++ /dev/null @@ -1,822 +0,0 @@ -{ - "info": { - "_postman_id": "e864733f-4781-45b6-8ea2-0b841a703dae", - "name": "CPS-CORE", - "description": "A collection of the endpoints in CPS-CORE. This is not an exhaustive collection but captures the main functionality.\n\nTo perform this functionality execute the API calls in order to create a dataspace, schema set (file is provided), anchor, and data node. Then GET the nodes to retrieve the information. The APIs in CPS-ADMIN can be used after initial POST requests.\n\nbookstore-model.yang can be found at cps/postman-collections/bookstore-model.yang", - "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "17907116" - }, - "item": [ - { - "name": "CPS-ADMIN", - "item": [ - { - "name": "Get all dataspaces", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/admin/dataspaces", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "admin", - "dataspaces" - ] - }, - "description": "Read all dataspaces" - }, - "response": [] - }, - { - "name": "Get a dataspace", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/admin/dataspaces/my-store", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "admin", - "dataspaces", - "my-store" - ] - }, - "description": "Read a dataspace given a dataspace name" - }, - "response": [] - }, - { - "name": "Get anchors", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors" - ] - }, - "description": "Read all anchors, given a dataspace" - }, - "response": [] - }, - { - "name": "Get an anchor", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor" - ] - }, - "description": "Read all anchors, given a dataspace" - }, - "response": [] - }, - { - "name": "Get schema sets", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "schema-sets" - ] - }, - "description": "Read all schema sets, given a dataspace" - }, - "response": [] - }, - { - "name": "Get a schema set", - "request": { - "method": "GET", - "header": [ - { - "key": "Accept", - "value": "application/json" - } - ], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets/stores-module", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "schema-sets", - "stores-module" - ] - }, - "description": "Read a schema set given a schema set name and a dataspace" - }, - "response": [] - } - ], - "description": "Some endpoints of CPS-ADMIN not mentioed in the CPS-CORE folder" - }, - { - "name": "Create Dataspace", - "request": { - "method": "POST", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces?dataspace-name=my-store", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces" - ], - "query": [ - { - "key": "dataspace-name", - "value": "my-store" - } - ] - } - }, - "response": [] - }, - { - "name": "Create Schema Set", - "request": { - "method": "POST", - "header": [ - { - "key": "Content-Type", - "value": "multipart/form-data", - "type": "text", - "disabled": true - } - ], - "body": { - "mode": "formdata", - "formdata": [ - { - "key": "file", - "type": "file", - "src": "cps/postman-collections/bookstore.zip" - } - ] - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets?schema-set-name=stores-module", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "schema-sets" - ], - "query": [ - { - "key": "schema-set-name", - "value": "stores-module" - } - ] - }, - "description": "Make sure in the body there is form data. key = file and value will be the zip/yang file." - }, - "response": [] - }, - { - "name": "Create Anchor", - "request": { - "method": "POST", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors?schema-set-name=stores-module&anchor-name=bookstore-anchor", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors" - ], - "query": [ - { - "key": "schema-set-name", - "value": "stores-module" - }, - { - "key": "anchor-name", - "value": "bookstore-anchor" - } - ] - } - }, - "response": [] - }, - { - "name": "Create DataNodes", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"bookstore-address\": [\n {\n \"bookstore-name\": \"Easons\",\n \"address\": \"Dublin,Ireland\",\n \"postal-code\": \"D02HA21\"\n }\n ],\n \"bookstore\": {\n \"bookstore-name\": \"Easons\",\n \"premises\": {\n \"addresses\": [\n {\n \"house-number\": 2,\n \"street\": \"Main Street\",\n \"town\": \"Maynooth\",\n \"county\": \"Kildare\"\n },\n {\n \"house-number\": 24,\n \"street\": \"Grafton Street\",\n \"town\": \"Dublin\",\n \"county\": \"Dublin\"\n }\n ]\n },\n \"categories\": [\n {\n \"code\": 1,\n \"name\": \"Children\",\n \"books\" : [\n {\n \"title\": \"Matilda\",\n \"lang\": \"English\",\n \"authors\": [\"Roald Dahl\"],\n \"editions\": [1988, 2000],\n \"price\": 20\n },\n {\n \"title\": \"The Gruffalo\",\n \"lang\": \"English\",\n \"authors\": [\"Julia Donaldson\"],\n \"editions\": [1999],\n \"price\": 15\n }\n ]\n },\n {\n \"code\": 2,\n \"name\": \"Thriller\",\n \"books\" : [\n {\n \"title\": \"Annihilation\",\n \"lang\": \"English\",\n \"authors\": [\"Jeff VanderMeer\"],\n \"editions\": [2014],\n \"price\": 15\n }\n ]\n },\n {\n \"code\": 3,\n \"name\": \"Comedy\",\n \"books\" : [\n {\n \"title\": \"Good Omens\",\n \"lang\": \"English\",\n \"authors\": [\"Neil Gaiman\", \"Terry Pratchett\"],\n \"editions\": [2006],\n \"price\": 13\n },\n {\n \"title\": \"The Colour of Magic\",\n \"lang\": \"English\",\n \"authors\": [\"Terry Pratchett\"],\n \"editions\": [1983],\n \"price\": 12\n },\n {\n \"title\": \"The Light Fantastic\",\n \"lang\": \"English\",\n \"authors\": [\"Terry Pratchett\"],\n \"editions\": [1986],\n \"price\": 14\n },\n {\n \"title\": \"A Book with No Language\",\n \"lang\": \"\",\n \"authors\": [\"Joe Bloggs\"],\n \"editions\": [2023],\n \"price\": 20\n }\n ]\n },\n {\n \"code\": 4,\n \"name\": \"Computing\",\n \"books\" : [\n {\n \"title\": \"Debian GNU/Linux\",\n \"lang\": \"German\",\n \"authors\": [\"Peter H. Ganten\", \"Wulf Alex\"],\n \"editions\": [2007, 2013, 2021],\n \"price\": 39\n },\n {\n \"title\": \"Logarithm tables\",\n \"lang\": \"N/A\",\n \"authors\": [\"Joe Bloggs\"],\n \"editions\": [2009],\n \"price\": 11\n }\n ]\n },\n {\n \"code\": 5,\n \"name\": \"Discount books\",\n \"books\" : [\n {\n \"title\": \"Book 1\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 1\n },\n {\n \"title\": \"Book 2\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 2\n },\n {\n \"title\": \"Book 3\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 3\n },\n {\n \"title\": \"Book 4\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 4\n },\n {\n \"title\": \"Book 5\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 5\n },\n {\n \"title\": \"Book 6\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 6\n },\n {\n \"title\": \"Book 7\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 7\n },\n {\n \"title\": \"Book 8\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 8\n },\n {\n \"title\": \"Book 9\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 9\n },\n {\n \"title\": \"Book 10\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 10\n }\n ]\n }\n ]\n }\n}\n", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes" - ] - } - }, - "response": [] - }, - { - "name": "Get Root Node All Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/&descendants=all", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "node" - ], - "query": [ - { - "key": "xpath", - "value": "/" - }, - { - "key": "descendants", - "value": "all" - } - ] - } - }, - "response": [] - }, - { - "name": "Get Node Categories No Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/categories&descendants=none", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "node" - ], - "query": [ - { - "key": "xpath", - "value": "/bookstore/categories" - }, - { - "key": "descendants", - "value": "none" - } - ] - } - }, - "response": [] - }, - { - "name": "Get Node Premises All Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/premises&descendants=all", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "node" - ], - "query": [ - { - "key": "xpath", - "value": "/bookstore/premises" - }, - { - "key": "descendants", - "value": "all" - } - ] - } - }, - "response": [] - }, - { - "name": "Get Node Categories with code 2 Direct Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/categories[@code='2']&descendants=direct", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "node" - ], - "query": [ - { - "key": "xpath", - "value": "/bookstore/categories[@code='2']" - }, - { - "key": "descendants", - "value": "direct" - } - ] - } - }, - "response": [] - }, - { - "name": "Query Path Leaf Condition All Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes/query?cps-path=/bookstore/categories[@name=\"Discount books\"]&descendants=-1", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes", - "query" - ], - "query": [ - { - "key": "cps-path", - "value": "/bookstore/categories[@name=\"Discount books\"]" - }, - { - "key": "descendants", - "value": "-1" - } - ] - } - }, - "response": [] - }, - { - "name": "Query Path Ancestor No Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes/query?cps-path=//books[@lang=\"German\"]/ancestor::categories&descendants=0", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes", - "query" - ], - "query": [ - { - "key": "cps-path", - "value": "//books[@lang=\"German\"]/ancestor::categories" - }, - { - "key": "descendants", - "value": "0" - } - ] - } - }, - "response": [] - }, - { - "name": "Patch Nodes", - "request": { - "method": "PATCH", - "header": [], - "body": { - "mode": "raw", - "raw": "{\r\n \"bookstore\": {\r\n \"bookstore-name\": \"Chapters\"\r\n }\r\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes" - ], - "query": [ - { - "key": "xpath", - "value": "/" - } - ] - } - }, - "response": [] - }, - { - "name": "Put Nodes", - "request": { - "method": "PUT", - "header": [], - "body": { - "mode": "raw", - "raw": "\r\n {\r\n \"premises\": {\r\n \"addresses\": [\r\n {\r\n \"town\": \"Maynooth\",\r\n \"county\": \"Kildare\",\r\n \"street\": \"Main Street\",\r\n \"house-number\": 2\r\n },\r\n {\r\n \"town\": \"Dublin\",\r\n \"county\": \"Dublin\",\r\n \"street\": \"Grafton Street\",\r\n \"house-number\": 24\r\n },\r\n {\r\n \"house-number\": 13,\r\n \"street\": \"Church Street\",\r\n \"town\": \"Athlone\",\r\n \"county\": \"Westmeath\"\r\n }\r\n ]\r\n }\r\n }\r\n", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/bookstore", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes" - ], - "query": [ - { - "key": "xpath", - "value": "/bookstore" - } - ] - } - }, - "response": [] - }, - { - "name": "Get Node Premises Direct Descendants", - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/premises&descendants=1", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "node" - ], - "query": [ - { - "key": "xpath", - "value": "/bookstore/premises" - }, - { - "key": "descendants", - "value": "1" - } - ] - } - }, - "response": [] - }, - { - "name": "Delete DataNodes", - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor", - "nodes" - ], - "query": [ - { - "key": "xpath", - "value": "/" - } - ] - } - }, - "response": [] - }, - { - "name": "Delete Anchor", - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "anchors", - "bookstore-anchor" - ] - } - }, - "response": [] - }, - { - "name": "Delete Schema Set", - "request": { - "method": "DELETE", - "header": [ - { - "key": "Content-Type", - "value": "multipart/form-data", - "type": "text", - "disabled": true - } - ], - "body": { - "mode": "formdata", - "formdata": [ - { - "key": "file", - "type": "file", - "src": "/home/jeff/Downloads/bookstore-model.yang" - } - ] - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets/stores-module", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces", - "my-store", - "schema-sets", - "stores-module" - ] - }, - "description": "Make sure in the body there is form data. key = file and value will be the zip/yang file." - }, - "response": [] - }, - { - "name": "Delete Dataspace", - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces?dataspace-name=my-store", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "cps", - "api", - "v2", - "dataspaces" - ], - "query": [ - { - "key": "dataspace-name", - "value": "my-store" - } - ] - } - }, - "response": [] - } - ], - "auth": { - "type": "basic", - "basic": [ - { - "key": "password", - "value": "cpsr0cks!", - "type": "string" - }, - { - "key": "username", - "value": "cpsuser", - "type": "string" - } - ] - } -}
\ No newline at end of file diff --git a/postman-collections/CPS.postman_collection.json b/postman-collections/CPS.postman_collection.json new file mode 100644 index 0000000000..ca630e2e17 --- /dev/null +++ b/postman-collections/CPS.postman_collection.json @@ -0,0 +1,1715 @@ +{ + "info": { + "_postman_id": "284e78f0-74c9-45f5-bdd5-25869a51028c", + "name": "CPS", + "description": "<img src=\"https://content.pstmn.io/bfbd86c2-7aa4-4afd-bd4a-207de1305bb7/Q1BTTG9nby5wbmc=\">\n\n# [CPS ONAP Documentation](https://docs.onap.org/projects/onap-cps/en/latest/index.html)\n\n# [DMI ONAP Documentation](https://docs.onap.org/projects/onap-cps-ncmp-dmi-plugin/en/latest/index.html)\n\n### How-to\n\nBuild CPS docker images with maven:\n\n`mvn clean install`\n\nor\n\n`mvn clean install -DskipTests`\n\nNavigate to docker-compose folder:\n\n`cd .\\docker-compose\\`\n\nBring up docker containers using commands found in docker-compose.yaml or run CPS services including dmi plugin\n\n`docker-compose --profile dmi-service up -d`\n\n### CPS-Enviroment\n\nTo enable the CPS enviroment, go to the top right of the screen in Postman and click envrioments. Import the CPS-envrioment file from the Postman Collections file in CPS.\n\n**To utilise NCMP-DMI-Stub endpoints begin the docker containers with:**\n\n`docker-compose --profile dmi-stub --profile monitoring up -d`\n\n# CPS Core Example\n\n- Create dataspace\n \n- Verify dataspace exists\n \n- etc...\n \n\n# CPS-NCMP Example\n\n- Create CM handles\n \n- Verify CM handles exist\n \n- etc...", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "_exporter_id": "26733783" + }, + "item": [ + { + "name": "CPS-Core", + "item": [ + { + "name": "CPS-Admin", + "item": [ + { + "name": "Create a dataspace", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces?dataspace-name=my-store", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces" + ], + "query": [ + { + "key": "dataspace-name", + "value": "my-store" + } + ] + } + }, + "response": [] + }, + { + "name": "Get a dataspace", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/admin/dataspaces/my-store", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "admin", + "dataspaces", + "my-store" + ] + }, + "description": "Read a dataspace given a dataspace name" + }, + "response": [] + }, + { + "name": "Get all dataspaces", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/admin/dataspaces", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "admin", + "dataspaces" + ] + }, + "description": "Read all dataspaces" + }, + "response": [] + }, + { + "name": "Create a schema set", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "multipart/form-data", + "type": "text", + "disabled": true + } + ], + "body": { + "mode": "formdata", + "formdata": [ + { + "key": "file", + "type": "file", + "src": "cps/postman-collections/bookstore.zip" + } + ] + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets?schema-set-name=stores-module", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "schema-sets" + ], + "query": [ + { + "key": "schema-set-name", + "value": "stores-module" + } + ] + }, + "description": "Make sure in the body there is form data. key = file and value will be the zip/yang file." + }, + "response": [] + }, + { + "name": "Get a schema set", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets/stores-module", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "schema-sets", + "stores-module" + ] + }, + "description": "Read a schema set given a schema set name and a dataspace" + }, + "response": [] + }, + { + "name": "Get schema sets", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "schema-sets" + ] + }, + "description": "Read all schema sets, given a dataspace" + }, + "response": [] + }, + { + "name": "Create an anchor", + "request": { + "method": "POST", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors?schema-set-name=stores-module&anchor-name=bookstore-anchor", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors" + ], + "query": [ + { + "key": "schema-set-name", + "value": "stores-module" + }, + { + "key": "anchor-name", + "value": "bookstore-anchor" + } + ] + } + }, + "response": [] + }, + { + "name": "Get an anchor", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor" + ] + }, + "description": "Read all anchors, given a dataspace" + }, + "response": [] + }, + { + "name": "Get anchors", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors" + ] + }, + "description": "Read all anchors, given a dataspace" + }, + "response": [] + }, + { + "name": "Delete a schema set", + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "multipart/form-data", + "type": "text", + "disabled": true + } + ], + "body": { + "mode": "formdata", + "formdata": [ + { + "key": "file", + "type": "file", + "src": "/home/jeff/Downloads/bookstore-model.yang" + } + ] + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/schema-sets/stores-module", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "schema-sets", + "stores-module" + ] + }, + "description": "Make sure in the body there is form data. key = file and value will be the zip/yang file." + }, + "response": [] + }, + { + "name": "Delete an anchor", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor" + ] + } + }, + "response": [] + }, + { + "name": "Delete a dataspace", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces?dataspace-name=my-store", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces" + ], + "query": [ + { + "key": "dataspace-name", + "value": "my-store" + } + ] + } + }, + "response": [] + } + ], + "description": "Some endpoints of CPS-ADMIN not mentioned in the CPS-CORE folder" + }, + { + "name": "CPS-Data", + "item": [ + { + "name": "Create data nodes", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\n \"bookstore-address\": [\n {\n \"bookstore-name\": \"Easons\",\n \"address\": \"Dublin,Ireland\",\n \"postal-code\": \"D02HA21\"\n }\n ],\n \"bookstore\": {\n \"bookstore-name\": \"Easons\",\n \"premises\": {\n \"addresses\": [\n {\n \"house-number\": 2,\n \"street\": \"Main Street\",\n \"town\": \"Maynooth\",\n \"county\": \"Kildare\"\n },\n {\n \"house-number\": 24,\n \"street\": \"Grafton Street\",\n \"town\": \"Dublin\",\n \"county\": \"Dublin\"\n }\n ]\n },\n \"categories\": [\n {\n \"code\": 1,\n \"name\": \"Children\",\n \"books\" : [\n {\n \"title\": \"Matilda\",\n \"lang\": \"English\",\n \"authors\": [\"Roald Dahl\"],\n \"editions\": [1988, 2000],\n \"price\": 20\n },\n {\n \"title\": \"The Gruffalo\",\n \"lang\": \"English\",\n \"authors\": [\"Julia Donaldson\"],\n \"editions\": [1999],\n \"price\": 15\n }\n ]\n },\n {\n \"code\": 2,\n \"name\": \"Thriller\",\n \"books\" : [\n {\n \"title\": \"Annihilation\",\n \"lang\": \"English\",\n \"authors\": [\"Jeff VanderMeer\"],\n \"editions\": [2014],\n \"price\": 15\n }\n ]\n },\n {\n \"code\": 3,\n \"name\": \"Comedy\",\n \"books\" : [\n {\n \"title\": \"Good Omens\",\n \"lang\": \"English\",\n \"authors\": [\"Neil Gaiman\", \"Terry Pratchett\"],\n \"editions\": [2006],\n \"price\": 13\n },\n {\n \"title\": \"The Colour of Magic\",\n \"lang\": \"English\",\n \"authors\": [\"Terry Pratchett\"],\n \"editions\": [1983],\n \"price\": 12\n },\n {\n \"title\": \"The Light Fantastic\",\n \"lang\": \"English\",\n \"authors\": [\"Terry Pratchett\"],\n \"editions\": [1986],\n \"price\": 14\n },\n {\n \"title\": \"A Book with No Language\",\n \"lang\": \"\",\n \"authors\": [\"Joe Bloggs\"],\n \"editions\": [2023],\n \"price\": 20\n }\n ]\n },\n {\n \"code\": 4,\n \"name\": \"Computing\",\n \"books\" : [\n {\n \"title\": \"Debian GNU/Linux\",\n \"lang\": \"German\",\n \"authors\": [\"Peter H. Ganten\", \"Wulf Alex\"],\n \"editions\": [2007, 2013, 2021],\n \"price\": 39\n },\n {\n \"title\": \"Logarithm tables\",\n \"lang\": \"N/A\",\n \"authors\": [\"Joe Bloggs\"],\n \"editions\": [2009],\n \"price\": 11\n }\n ]\n },\n {\n \"code\": 5,\n \"name\": \"Discount books\",\n \"books\" : [\n {\n \"title\": \"Book 1\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 1\n },\n {\n \"title\": \"Book 2\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 2\n },\n {\n \"title\": \"Book 3\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 3\n },\n {\n \"title\": \"Book 4\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 4\n },\n {\n \"title\": \"Book 5\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 5\n },\n {\n \"title\": \"Book 6\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 6\n },\n {\n \"title\": \"Book 7\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 7\n },\n {\n \"title\": \"Book 8\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 8\n },\n {\n \"title\": \"Book 9\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 9\n },\n {\n \"title\": \"Book 10\",\n \"lang\": \"blah\",\n \"authors\": [],\n \"editions\": [],\n \"price\": 10\n }\n ]\n }\n ]\n }\n}\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes" + ] + } + }, + "response": [] + }, + { + "name": "Get root node, all descendants", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/&descendants=all", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "node" + ], + "query": [ + { + "key": "xpath", + "value": "/" + }, + { + "key": "descendants", + "value": "all" + } + ] + } + }, + "response": [] + }, + { + "name": "Get node categories, no descendants", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/categories&descendants=none", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "node" + ], + "query": [ + { + "key": "xpath", + "value": "/bookstore/categories" + }, + { + "key": "descendants", + "value": "none" + } + ] + } + }, + "response": [] + }, + { + "name": "Get node category by key, direct descendants only", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/categories[@code='2']&descendants=direct", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "node" + ], + "query": [ + { + "key": "xpath", + "value": "/bookstore/categories[@code='2']" + }, + { + "key": "descendants", + "value": "direct" + } + ] + } + }, + "response": [] + }, + { + "name": "Get node premises, all descendants", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/node?xpath=/bookstore/premises&descendants=all", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "node" + ], + "query": [ + { + "key": "xpath", + "value": "/bookstore/premises" + }, + { + "key": "descendants", + "value": "all" + } + ] + } + }, + "response": [] + }, + { + "name": "Patch nodes", + "request": { + "method": "PATCH", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"bookstore\": {\r\n \"bookstore-name\": \"Chapters\"\r\n }\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes" + ], + "query": [ + { + "key": "xpath", + "value": "/" + } + ] + } + }, + "response": [] + }, + { + "name": "Put nodes", + "request": { + "method": "PUT", + "header": [], + "body": { + "mode": "raw", + "raw": "\r\n {\r\n \"premises\": {\r\n \"addresses\": [\r\n {\r\n \"town\": \"Maynooth\",\r\n \"county\": \"Kildare\",\r\n \"street\": \"Main Street\",\r\n \"house-number\": 2\r\n },\r\n {\r\n \"town\": \"Dublin\",\r\n \"county\": \"Dublin\",\r\n \"street\": \"Grafton Street\",\r\n \"house-number\": 24\r\n },\r\n {\r\n \"house-number\": 13,\r\n \"street\": \"Church Street\",\r\n \"town\": \"Athlone\",\r\n \"county\": \"Westmeath\"\r\n }\r\n ]\r\n }\r\n }\r\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/bookstore", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes" + ], + "query": [ + { + "key": "xpath", + "value": "/bookstore" + } + ] + } + }, + "response": [] + }, + { + "name": "Delete data nodes", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes?xpath=/", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes" + ], + "query": [ + { + "key": "xpath", + "value": "/" + } + ] + } + }, + "response": [] + } + ] + }, + { + "name": "CPS-Query", + "item": [ + { + "name": "Query path leaf condition, all descendants", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes/query?cps-path=/bookstore/categories[@name=\"Discount books\"]&descendants=-1", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes", + "query" + ], + "query": [ + { + "key": "cps-path", + "value": "/bookstore/categories[@name=\"Discount books\"]" + }, + { + "key": "descendants", + "value": "-1" + } + ] + } + }, + "response": [] + }, + { + "name": "Query path ancestor, no descendants", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/cps/api/v2/dataspaces/my-store/anchors/bookstore-anchor/nodes/query?cps-path=//books[@lang=\"German\"]/ancestor::categories&descendants=0", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "cps", + "api", + "v2", + "dataspaces", + "my-store", + "anchors", + "bookstore-anchor", + "nodes", + "query" + ], + "query": [ + { + "key": "cps-path", + "value": "//books[@lang=\"German\"]/ancestor::categories" + }, + { + "key": "descendants", + "value": "0" + } + ] + } + }, + "response": [] + } + ] + } + ], + "description": "CPS-Core endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps/en/latest/design.html#cps-core)\n\nTo perform this functionality execute the API calls in order to create a dataspace, schema set (file is provided), anchor, and data node. Then GET the nodes to retrieve the information. The APIs in CPS-ADMIN can be used after initial POST requests.\n\n## Running the collection\n\nTo run the requests in the collection simply select the request and click send. \"Create Schema Set\" in \"CPS-CORE\" requires a file to send the request. Example files are provided: \"bookstore.yang\" and \"bookstore-types.yang\" (these files must be zipped before adding them to the request)\n\nbookstore-model.yang can be found at cps/postman-collections/bookstore-model.yang" + }, + { + "name": "CPS-NCMP", + "item": [ + { + "name": "Inventory", + "item": [ + { + "name": "CM handle registration (discovery)", + "event": [ + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"dmiPlugin\": \"http://ncmp-dmi-plugin-demo-and-csit-stub:8092\",\r\n \"createdCmHandles\": [\r\n {\r\n \"cmHandle\": \"cm-handle-1\",\r\n \"alternateId\": \"Subnetwork=Europe,ManagedElement=X1\",\r\n \"dataProducerIdentifier\": \"my-data-producer-identifier\",\r\n \"cmHandleProperties\": {},\r\n \"publicCmHandleProperties\": {\r\n \"id\": \"123\",\r\n \"userLabel\": \"test\"\r\n },\r\n \"moduleSetTag\": \"my-module-set-tag\",\r\n \"trustLevel\": \"COMPLETE\"\r\n },\r\n {\r\n \"cmHandle\": \"cm-handle-2\",\r\n \"alternateId\": \"Subnetwork=Europe,ManagedElement=X2\",\r\n \"dataProducerIdentifier\": \"my-data-producer-identifier\",\r\n \"cmHandleProperties\": {},\r\n \"publicCmHandleProperties\": {\r\n \"id\": \"456\",\r\n \"userLabel\": \"test\"\r\n },\r\n \"moduleSetTag\": \"my-module-set-tag\",\r\n \"trustLevel\": \"COMPLETE\"\r\n }\r\n ]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmpInventory", + "v1", + "ch" + ] + }, + "description": "DMI notifies NCMP of new CM handles" + }, + "response": [] + }, + { + "name": "Get registered CM handle references", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch/cmHandles?dmi-plugin-identifier=http://ncmp-dmi-plugin-demo-and-csit-stub:8092", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmpInventory", + "v1", + "ch", + "cmHandles" + ], + "query": [ + { + "key": "dmi-plugin-identifier", + "value": "http://ncmp-dmi-plugin-demo-and-csit-stub:8092" + }, + { + "key": "outputAlternateId", + "value": "true", + "description": "Returns alternate ids rather then cm handle ids", + "disabled": true + } + ] + } + }, + "response": [] + }, + { + "name": "Query for CM handle ids", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"cmHandleQueryParameters\": [\r\n {\r\n \"conditionName\": \"hasAllProperties\",\r\n \"conditionParameters\": [\r\n {\r\n \"id\": \"123\"\r\n }\r\n ]\r\n }\r\n ]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch/searches", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmpInventory", + "v1", + "ch", + "searches" + ], + "query": [ + { + "key": "outputAlternateId", + "value": "true", + "description": "Returns alternate ids rather then cm handle ids", + "disabled": true + } + ] + } + }, + "response": [] + }, + { + "name": "Get module references", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/modules", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "modules" + ] + } + }, + "response": [] + }, + { + "name": "Get module definitions", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/modules/definitions", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "modules", + "definitions" + ] + } + }, + "response": [] + }, + { + "name": "CM handle id search", + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"cmHandleQueryParameters\": [\r\n {\r\n \"conditionName\": \"hasAllProperties\",\r\n \"conditionParameters\": [\r\n {\r\n \"id\": \"123\"\r\n }\r\n ]\r\n }\r\n ]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/id-searches", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "id-searches" + ], + "query": [ + { + "key": "outputAlternateId", + "value": "true", + "description": "Returns alternate ids rather then cm handle ids", + "disabled": true + } + ] + } + }, + "response": [] + }, + { + "name": "CM handle object search", + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"cmHandleQueryParameters\": [\r\n {\r\n \"conditionName\": \"hasAllProperties\",\r\n \"conditionParameters\": [\r\n {\r\n \"id\": \"123\"\r\n }\r\n ]\r\n }\r\n ]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "searches" + ] + } + }, + "response": [] + }, + { + "name": "Get CM handle details", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1" + ] + } + }, + "response": [] + }, + { + "name": "Get CM handle details using alternate-id", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/Subnetwork%3DEurope%2CManagedElement%3DX1", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "Subnetwork%3DEurope%2CManagedElement%3DX1" + ] + } + }, + "response": [] + }, + { + "name": "Get CM handle properties", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/properties", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "properties" + ] + } + }, + "response": [] + }, + { + "name": "Get CM handle state", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/state", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "state" + ] + } + }, + "response": [] + }, + { + "name": "Set CM handle 'dataSync' flag", + "request": { + "method": "PUT", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data-sync?dataSyncEnabled=false", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data-sync" + ], + "query": [ + { + "key": "dataSyncEnabled", + "value": "false" + } + ] + } + }, + "response": [] + }, + { + "name": "CM handle de-registration", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"dmiPlugin\": \"http://ncmp-dmi-plugin-demo-and-csit-stub:8092\",\r\n \"removedCmHandles\": [ \"cm-handle-1\", \"cm-handle-2\"]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmpInventory", + "v1", + "ch" + ] + }, + "description": "DMI notifies NCMP of new CM handles" + }, + "response": [] + } + ], + "description": "CPS-NCMP-Inventory endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps/en/latest/design.html#cps-ncmp-inventory)" + }, + { + "name": "Data", + "item": [ + { + "name": "Create resource data", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"ietf-network:networks\": {\r\n \"network\": [\r\n {\r\n \"network-types\": {},\r\n \"network-id\": \"otn-hc\",\r\n \"node\": [\r\n {\r\n \"node-id\": \"D1\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"1-0-1\"\r\n },\r\n {\r\n \"tp-id\": \"1-2-1\"\r\n },\r\n {\r\n \"tp-id\": \"1-3-1\"\r\n }\r\n ]\r\n },\r\n {\r\n \"node-id\": \"D2\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"2-0-1\"\r\n },\r\n {\r\n \"tp-id\": \"2-1-1\"\r\n },\r\n {\r\n \"tp-id\": \"2-3-1\"\r\n }\r\n ]\r\n },\r\n {\r\n \"node-id\": \"D3\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"3-1-1\"\r\n },\r\n {\r\n \"tp-id\": \"3-2-1\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"ietf-network-topology:link\": [\r\n {\r\n \"link-id\": \"D1,1-2-1,D2,2-1-1\",\r\n \"source\": {\r\n \"source-node\": \"D1\",\r\n \"source-tp\": \"1-2-1\"\r\n },\r\n \"destination\": {\r\n \"dest-node\": \"D2\",\r\n \"dest-tp\": \"2-1-1\"\r\n }\r\n },\r\n {\r\n \"link-id\": \"D2,2-1-1,D1,1-2-1\",\r\n \"source\": {\r\n \"source-node\": \"D2\",\r\n \"source-tp\": \"2-1-1\"\r\n },\r\n \"destination\": {\r\n \"dest-node\": \"D1\",\r\n \"dest-tp\": \"1-2-1\"\r\n }\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=NRCellDU/attributes/cellLocalId", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:passthrough-running" + ], + "query": [ + { + "key": "resourceIdentifier", + "value": "NRCellDU/attributes/cellLocalId" + } + ] + } + }, + "response": [] + }, + { + "name": "Get resource data", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=NRCellDU/attributes/cellLocalId", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:passthrough-running" + ], + "query": [ + { + "key": "resourceIdentifier", + "value": "NRCellDU/attributes/cellLocalId" + } + ] + } + }, + "response": [] + }, + { + "name": "Query resource data", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:operational/query", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:operational", + "query" + ], + "query": [ + { + "key": "options", + "value": "{ \"options\": \"(depth=3)\"}", + "disabled": true + }, + { + "key": "topic", + "value": "{ \"topic\": \"my-topic-name\"}", + "disabled": true + }, + { + "key": "include-descendants", + "value": "true", + "disabled": true + }, + { + "key": "cps-path", + "value": "", + "disabled": true + } + ] + } + }, + "response": [] + }, + { + "name": "Update resource data", + "request": { + "method": "PUT", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"ietf-network:networks\": {\r\n \"network\": [\r\n {\r\n \"network-types\": {},\r\n \"network-id\": \"otn-hc\",\r\n \"node\": [\r\n {\r\n \"node-id\": \"D1\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"1-0-1\"\r\n },\r\n {\r\n \"tp-id\": \"1-2-1\"\r\n },\r\n {\r\n \"tp-id\": \"1-3-1\"\r\n }\r\n ]\r\n },\r\n {\r\n \"node-id\": \"D2\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"2-0-1\"\r\n },\r\n {\r\n \"tp-id\": \"2-1-1\"\r\n },\r\n {\r\n \"tp-id\": \"2-3-1\"\r\n }\r\n ]\r\n },\r\n {\r\n \"node-id\": \"D3\",\r\n \"termination-point\": [\r\n {\r\n \"tp-id\": \"3-1-1\"\r\n },\r\n {\r\n \"tp-id\": \"3-2-1\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"ietf-network-topology:link\": [\r\n {\r\n \"link-id\": \"D1,1-2-1,D2,2-1-1\",\r\n \"source\": {\r\n \"source-node\": \"D1\",\r\n \"source-tp\": \"1-2-1\"\r\n },\r\n \"destination\": {\r\n \"dest-node\": \"D2\",\r\n \"dest-tp\": \"2-1-1\"\r\n }\r\n },\r\n {\r\n \"link-id\": \"D2,2-1-1,D1,1-2-1\",\r\n \"source\": {\r\n \"source-node\": \"D2\",\r\n \"source-tp\": \"2-1-1\"\r\n },\r\n \"destination\": {\r\n \"dest-node\": \"D1\",\r\n \"dest-tp\": \"1-2-1\"\r\n }\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=NRCellDU/attributes/cellLocalId", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:passthrough-running" + ], + "query": [ + { + "key": "resourceIdentifier", + "value": "NRCellDU/attributes/cellLocalId" + } + ] + } + }, + "response": [] + }, + { + "name": "Patch resource data", + "request": { + "method": "PATCH", + "header": [], + "body": { + "mode": "raw", + "raw": "{\r\n \"ietf-restconf:yang-patch\": {\r\n \"patch-id\": \"patch-1\",\r\n \"edit\": [\r\n {\r\n \"edit-id\": \"edit1\",\r\n \"operation\": \"merge\",\r\n \"target\": \"/\",\r\n \"value\": {\r\n \"test:bookstore\": {\r\n \"bookstore-name\": \"Chapters\",\r\n \"categories\": [\r\n {\r\n \"code\": \"01\",\r\n \"name\": \"Science\",\r\n \"books\": [\r\n {\r\n \"authors\": [\r\n \"Author1\",\r\n \"Author2\"\r\n ]\r\n }\r\n ]\r\n },\r\n {\r\n \"code\": \"02\",\r\n \"name\": \"Arts\",\r\n \"books\": [\r\n {\r\n \"authors\": [\r\n \"Author3\"\r\n ]\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n }\r\n },\r\n {\r\n \"edit-id\": \"edit2\",\r\n \"operation\": \"merge\",\r\n \"target\": \"/\",\r\n \"value\": {\r\n \"test:bookstore\": {\r\n \"bookstore-name\": \"Novels\",\r\n \"categories\": [\r\n {\r\n \"code\": \"03\",\r\n \"name\": \"History\",\r\n \"books\": [\r\n {\r\n \"authors\": [\r\n \"Iain M. Banks\",\r\n \"Ursula K. Le Guin\"\r\n ]\r\n }\r\n ]\r\n },\r\n {\r\n \"code\": \"04\",\r\n \"name\": \"Fiction\",\r\n \"books\": [\r\n {\r\n \"authors\": [\r\n \"Philip Pullman\"\r\n ]\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n }\r\n }\r\n ]\r\n }\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=NRCellDU/attributes/cellLocalId", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:passthrough-running" + ], + "query": [ + { + "key": "resourceIdentifier", + "value": "NRCellDU/attributes/cellLocalId" + } + ] + } + }, + "response": [] + }, + { + "name": "Execute a batch data operation", + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "Basic Y3BzdXNlcjpjcHNyMGNrcyE=", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"operations\": [\r\n {\r\n \"resourceIdentifier\": \"NRCellDU/attributes/cellLocalId\",\r\n \"targetIds\": [\r\n \"cm-handle-1\",\"cm-handle-2\"\r\n ],\r\n \"datastore\": \"ncmp-datastore:passthrough-operational\",\r\n \"options\": \"(fields=NRCellDU/attributes/cellLocalId)\",\r\n \"operationId\": \"12\",\r\n \"operation\": \"read\"\r\n }\r\n ]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/data?topic=batch-read-topic", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "data" + ], + "query": [ + { + "key": "topic", + "value": "batch-read-topic" + } + ] + } + }, + "response": [] + }, + { + "name": "Delete resource data", + "request": { + "method": "DELETE", + "header": [], + "url": { + "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/cm-handle-1/data/ds/ncmp-datastore:passthrough-running?resourceIdentifier=NRCellDU/attributes/cellLocalId", + "protocol": "http", + "host": [ + "{{CPS_HOST}}" + ], + "port": "{{CPS_PORT}}", + "path": [ + "ncmp", + "v1", + "ch", + "cm-handle-1", + "data", + "ds", + "ncmp-datastore:passthrough-running" + ], + "query": [ + { + "key": "resourceIdentifier", + "value": "NRCellDU/attributes/cellLocalId" + } + ] + } + }, + "response": [] + } + ], + "description": "CPS-NCMP endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps/en/latest/design.html#cps-ncmp)\n\nDepends on the ncmp-dmi-stub for some endpoints returns static content" + } + ], + "description": "CPS-NCMP endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps/en/latest/design.html#cps-ncmp)\n\nCPS-NCMP-Inventory endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps/en/latest/design.html#cps-ncmp-inventory)" + }, + { + "name": "NCMP-DMI-Stub", + "item": [ + { + "name": "Execute a batch data operation (DMI)", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{ \"operations\":\n [\n {\n \"resourceIdentifier\": \"NRCellDU/attributes/cellLocalId\",\n \"datastore\": \"ncmp-datastore:passthrough-operational\",\n \"options\": \"some option\",\n \"operationId\": \"12\",\n \"cmHandles\": [\n {\n \"id\": \"cmHandle123\",\n \"cmHandleProperties\": {\n \"myProp\": \"some value\",\n \"otherProp\": \"other value\"\n }\n },\n {\n \"id\": \"cmHandle123\",\n \"cmHandleProperties\": {\n \"myProp\": \"some value\",\n \"otherProp\": \"other value\"\n }\n }\n ],\n \"operation\": \"read\"\n }\n ]\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{DMI_HOST}}:{{DMI_PORT}}/dmi/v1/data?topic=ncmp-async-m2m&requestId=4753fc1f-7de2-449a-b306-a6204b5370b33", + "protocol": "http", + "host": [ + "{{DMI_HOST}}" + ], + "port": "{{DMI_PORT}}", + "path": [ + "dmi", + "v1", + "data" + ], + "query": [ + { + "key": "topic", + "value": "ncmp-async-m2m" + }, + { + "key": "requestId", + "value": "4753fc1f-7de2-449a-b306-a6204b5370b33" + } + ] + } + }, + "response": [] + }, + { + "name": "Get module resources", + "request": { + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "{}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{DMI_HOST}}:{{DMI_PORT}}/dmi/v1/ch/cm/moduleResources", + "protocol": "http", + "host": [ + "{{DMI_HOST}}" + ], + "port": "{{DMI_PORT}}", + "path": [ + "dmi", + "v1", + "ch", + "cm", + "moduleResources" + ] + } + }, + "response": [] + }, + { + "name": "Get all modules (DMI)", + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "method": "POST", + "header": [ + { + "key": "Accept", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "http://{{DMI_HOST}}:{{DMI_PORT}}/dmi/v1/ch/cm-bookStore/modules", + "protocol": "http", + "host": [ + "{{DMI_HOST}}" + ], + "port": "{{DMI_PORT}}", + "path": [ + "dmi", + "v1", + "ch", + "cm-bookStore", + "modules" + ] + } + }, + "response": [] + } + ], + "description": "NCMP-DMI-Stub endpoints as defined by Rest OpenApi Specification [here](https://docs.onap.org/projects/onap-cps-ncmp-dmi-plugin/en/latest/design.html#offered-apis)\n\n**To utilise NCMP-DMI-Stub begin the docker containers with:**\n\n`docker-compose --profile dmi-stub --profile monitoring up -d`" + } + ] +}
\ No newline at end of file diff --git a/postman-collections/Data Operational Performance Test.postman_collection.json b/postman-collections/Data Operational Performance Test.postman_collection.json deleted file mode 100644 index 668c541d91..0000000000 --- a/postman-collections/Data Operational Performance Test.postman_collection.json +++ /dev/null @@ -1,339 +0,0 @@ -{ - "info": { - "_postman_id": "23a07cf3-5e20-48d5-8af3-8aa65c6a681f", - "name": "Data Operational Performance Test", - "description": "A collection to test performance of registering and getting cmHandles (20000 is the default)", - "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "17907116" - }, - "item": [ - { - "name": "Register CM Handles", - "event": [ - { - "listen": "prerequest", - "script": { - "exec": [ - "const uuid = require(\"uuid\");", - "", - "const TOTAL_CMHANDLES = Number(pm.collectionVariables.get(\"TOTAL_CMHANDLES\"));", - "", - "var cmHandleIds = [];", - "var createdCmHandles = []", - "", - "for (i = 0; i < TOTAL_CMHANDLES; i++) {", - " let cmHandleId = uuid.v4().replace(/-/g, \"\");", - " cmHandleIds.push(cmHandleId);", - " createdCmHandles.push({", - " \"cmHandle\": cmHandleId,", - " \"cmHandleProperties\": { \"neType\": \"RadioNode\" },", - " \"publicCmHandleProperties\": {", - " \"Book\": \"Harry Potter\",", - " \"Author\": \"JK Rowling\"", - " }", - " });", - "}", - "", - "pm.collectionVariables.set(\"createdCmHandleIds\", JSON.stringify(cmHandleIds));", - "pm.collectionVariables.set(\"createdCmHandles\", JSON.stringify(createdCmHandles));", - "pm.collectionVariables.set(\"targetIds\", cmHandleIds);" - ], - "type": "text/javascript" - } - }, - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"HTTP Status is 200\", function () {\r", - " pm.response.to.be.ok;\r", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\r\n \"dmiPlugin\": \"http://{{DMI_HOST}}:{{DMI_PORT}}\",\r\n \"createdCmHandles\": {{createdCmHandles}}\r\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmpInventory", - "v1", - "ch" - ] - } - }, - "response": [] - }, - { - "name": "Get All Cm Handle IDs", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "// Convert the response body to a JSON object\r", - "var responseBodyAsJsonArray = JSON.parse(responseBody);\r", - "\r", - "var cmHandleInReadyState = [];\r", - "var cmHandleInNonReadyState = [];\r", - "\r", - "function countCmHandleStateInReady() {\r", - "for(var index = 0; index < responseBodyAsJsonArray.length; index++) {\r", - " if(typeof responseBodyAsJsonArray[index].state.cmHandleState !== 'undefined'\r", - " && responseBodyAsJsonArray[index].state.cmHandleState == 'READY') {\r", - " cmHandleInReadyState.push(responseBodyAsJsonArray[index].state.cmHandleState);\r", - " }else{\r", - " cmHandleInNonReadyState.push(responseBodyAsJsonArray[index].state.cmHandleState);\r", - " }\r", - "}\r", - "}\r", - "\r", - "countCmHandleStateInReady();\r", - "\r", - "pm.collectionVariables.set(\"numberOfCmHandlesInReadyState\", cmHandleInReadyState.length);\r", - "pm.collectionVariables.set(\"numberOfCmHandlesInNonReadyState\", cmHandleInNonReadyState.length);\r", - "\r", - "pm.test(\"HTTP Status is 200 with valid JSON response\", function () {\r", - " pm.response.to.be.ok;\r", - " pm.response.to.be.withBody;\r", - " pm.response.to.be.json;\r", - "});\r", - "\r", - "pm.test(\"20000 cm handles are in Ready state\", function () {\r", - " pm.expect(cmHandleInReadyState.length).to.eql(20000);\r", - "});\r", - "" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\r\n \"conditions\": [\r\n {\r\n \"name\": \"hasAllModules\"\r\n }\r\n ]\r\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ], - "query": [ - { - "key": "resourceIdentifier", - "value": "stores:bookstore", - "disabled": true - }, - { - "key": "options", - "value": "(fields=schemas/schema/location,depth=5)", - "disabled": true - } - ] - } - }, - "response": [] - }, - { - "name": "ReadResourceDataForCmHandles", - "event": [ - { - "listen": "prerequest", - "script": { - "exec": [ - "const cmHandleIds = pm.collectionVariables.get(\"targetIds\"); \r", - "\r", - "var dataOperationRequest = []\r", - "var targetIds = [];\r", - "\r", - "for (index = 0; index < cmHandleIds.length ; index++) {\r", - "\r", - " targetIds.push(cmHandleIds[index]);\r", - " var dataOperationTargetIds = [];\r", - " if (targetIds.length == 5){\r", - " dataOperationTargetIds = targetIds;\r", - " dataOperationRequest.push({\r", - " \"operation\": \"read\",\r", - " \"operationId\": \"operational-\"+index,\r", - " \"datastore\": \"ncmp-datastore:passthrough-running\",\r", - " \"resourceIdentifier\": \"parent/child\",\r", - " \"targetIds\": dataOperationTargetIds\r", - " });\r", - "\r", - " pm.collectionVariables.unset(\"dataOperationTargetIds\");\r", - " // pm.globals.unset(\"variable_key\");\r", - " // pm.environment.unset(\"variable_key\");\r", - " // pm.collectionVariables.set(\"variable_key\", \"variable_value\");\r", - " // pm.sendRequest(\"https://postman-echo.com/get\", function (err, response) {\r", - " // console.log(response.json());\r", - " // });\r", - " // for (index = 0; index < dataOperationTargetIds.length ; index++) {\r", - " // targetIds.pop(dataOperationTargetIds[index]);\r", - " // }\r", - " }\r", - "}\r", - "\r", - " pm.collectionVariables.set(\"dataOperationRequest\", JSON.stringify(dataOperationRequest));" - ], - "type": "text/javascript" - } - }, - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"HTTP Status is 200 with valid JSON response\", function () {\r", - " pm.response.to.be.ok;\r", - " pm.response.to.be.withBody;\r", - " pm.response.to.be.json;\r", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [ - { - "key": "topic", - "value": "client-topic", - "type": "default", - "disabled": true - } - ], - "body": { - "mode": "raw", - "raw": "{\r\n \"operations\":\r\n {{dataOperationRequest}}\r\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/data?topic=my-client-topic", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "data" - ], - "query": [ - { - "key": "topic", - "value": "my-client-topic" - } - ] - } - }, - "response": [] - } - ], - "auth": { - "type": "basic", - "basic": [ - { - "key": "password", - "value": "cpsr0cks!", - "type": "string" - }, - { - "key": "username", - "value": "cpsuser", - "type": "string" - } - ] - }, - "event": [ - { - "listen": "prerequest", - "script": { - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ], - "variable": [ - { - "key": "createdCmHandleIds", - "value": "" - }, - { - "key": "createdCmHandles", - "value": "" - }, - { - "key": "TOTAL_CMHANDLES", - "value": "20000", - "type": "default" - }, - { - "key": "numberOfCmHandlesInReadyState", - "value": "", - "type": "default" - }, - { - "key": "numberOfCmHandlesInNonReadyState", - "value": "", - "type": "default" - }, - { - "key": "dataOperationRequest", - "value": "", - "type": "default" - }, - { - "key": "targetIds", - "value": "", - "type": "default" - } - ] -}
\ No newline at end of file diff --git a/postman-collections/NCMP Register CmHandle and Query.postman_collection.json b/postman-collections/NCMP Register CmHandle and Query.postman_collection.json deleted file mode 100644 index fb14287241..0000000000 --- a/postman-collections/NCMP Register CmHandle and Query.postman_collection.json +++ /dev/null @@ -1,649 +0,0 @@ -{ - "info": { - "_postman_id": "4e232e66-914a-4b5e-8f7f-f9ae0d476f54", - "name": "NCMP Register CmHandle and Query", - "description": "A collection to register a cmHandle then query. There are \"Valid Queries\" and \"Invalid Queries\" sub-folders containing examples. These are by no means exhaustive.", - "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "17907116" - }, - "item": [ - { - "name": "Valid Queries", - "item": [ - { - "name": "search cmHandles without conditions", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "search cmHandles with modules query", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllModules\",\n \"conditionParameters\": [ {\"moduleName\": \"notifications\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "search cmHandles with public properties", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ {\"Color\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "search cmHandles with multiple public properties", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ {\"Color\": \"yellow\"}, {\"Size\": \"small\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "search cmHandles with modules & pubprop query", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllModules\",\n \"conditionParameters\": [ {\"moduleName\": \"notifications\"} ]\n },\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ {\"Color\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - } - ] - }, - { - "name": "Invalid Queries", - "item": [ - { - "name": "get cmHandles with wrong condition name", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"wrong\",\n \"conditionParameters\": [ {\"Color\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with empty condition name", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"\",\n \"conditionParameters\": [ {\"Color\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with null condition name", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionParameters\": [ {\"Color\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with empty key", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ {\"\": \"yellow\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with empty condition", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ {} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with empty condition list", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\",\n \"conditionParameters\": [ ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with null condition list", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllProperties\"\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with empty module value", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllModules\",\n \"conditionParameters\": [ {\"moduleName\": \"\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandles with wrong module name", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"cmHandleQueryParameters\": [\n {\n \"conditionName\": \"hasAllModules\",\n \"conditionParameters\": [ {\"moduleName2\": \"mudule-1\"} ]\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/searches", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "searches" - ] - } - }, - "response": [] - } - ] - }, - { - "name": "Register cmHandle for Node", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"dmiPlugin\": \"http://{{DMI_HOST}}:{{DMI_PORT}}\",\n \"createdCmHandles\": [\n {\n \"cmHandle\": \"CmHandle0\",\n \"cmHandleProperties\": {\n \"Books\": \"Sci-Fi Book\"\n },\n \"publicCmHandleProperties\": {\n \"Color\": \"yellow\",\n \"Size\": \"small\",\n \"Shape\": \"cube\"\n }\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmpInventory", - "v1", - "ch" - ] - } - }, - "response": [] - }, - { - "name": "get cmHandle", - "protocolProfileBehavior": { - "disableBodyPruning": true - }, - "request": { - "method": "GET", - "header": [], - "body": { - "mode": "raw", - "raw": "", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/CmHandle0", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "CmHandle0" - ] - } - }, - "response": [] - }, - { - "name": "get modules for cmHandle", - "protocolProfileBehavior": { - "disableBodyPruning": true - }, - "request": { - "method": "GET", - "header": [], - "body": { - "mode": "raw", - "raw": "", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmp/v1/ch/CmHandle0/modules", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmp", - "v1", - "ch", - "CmHandle0", - "modules" - ] - } - }, - "response": [] - }, - { - "name": "Update cmHandle for Node", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"dmiPlugin\": \"http://{{DMI_HOST}}:{{DMI_PORT}}\",\n \"updatedCmHandles\": [\n {\n \"cmHandle\": \"CmHandle0\",\n \"cmHandleProperties\": {\n \"Books\": \"Sci-Fi Book\",\n \"Some property\": \"Some value\"\n },\n \"publicCmHandleProperties\": {\n \"Color\": \"yellow\",\n \"Size\": \"small\",\n \"Shape\": \"cube\",\n \"Some public property\": \"Some public value\"\n }\n }\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmpInventory", - "v1", - "ch" - ] - } - }, - "response": [] - }, - { - "name": "De-register cmHandle for Node", - "request": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"dmiPlugin\": \"http://{{DMI_HOST}}:{{DMI_PORT}}\",\n \"removedCmHandles\": [\n \"CmHandle0\"\n ]\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "http://{{CPS_HOST}}:{{CPS_PORT}}/ncmpInventory/v1/ch", - "protocol": "http", - "host": [ - "{{CPS_HOST}}" - ], - "port": "{{CPS_PORT}}", - "path": [ - "ncmpInventory", - "v1", - "ch" - ] - } - }, - "response": [] - } - ], - "auth": { - "type": "basic", - "basic": [ - { - "key": "password", - "value": "cpsr0cks!", - "type": "string" - }, - { - "key": "username", - "value": "cpsuser", - "type": "string" - } - ] - }, - "event": [ - { - "listen": "prerequest", - "script": { - "type": "text/javascript", - "exec": [ - "" - ] - } - }, - { - "listen": "test", - "script": { - "type": "text/javascript", - "exec": [ - "" - ] - } - } - ] -}
\ No newline at end of file diff --git a/postman-collections/README.md b/postman-collections/README.md index 004ef5b744..d4a1c63adf 100644 --- a/postman-collections/README.md +++ b/postman-collections/README.md @@ -27,8 +27,11 @@ To import the CPS collections and environment: 4. The collections and environment should now be imported 5. Set the current environment to "CPS Environment" (usually at the top right. Default is "No Environment") This will provide the necessary variables such as "CPS_HOST" and "CPS_PORT" to allow the requests to be run +## Using collections in Postman +A how-to guide is provided in the CPS collection. To access this guide click on the parent CPS folder icon at the top of the collection and follow the provided instructions. + ## Running the collections -To run the requests in the collections simply select the request and click send. "Create Schema Set" in "CPS-CORE" requires a file to send the request. Example files are provided: "bookstore.yang" and "bookstore-types.yang" (these files must be zipped before adding them to the request) +To run the requests in the collections in CPS-CORE simply select the request and click send. "Create Schema Set" in "CPS-CORE" requires a file to send the request. Example files are provided: "bookstore.yang" and "bookstore-types.yang" (these files must be zipped before adding them to the request) ## Notes When exporting postman collections tabs are used for spacing, so replacing all the tabs is necessary diff --git a/postman-collections/bookstore-types.yang b/postman-collections/bookstore-types.yang deleted file mode 100644 index 5ad7b6e130..0000000000 --- a/postman-collections/bookstore-types.yang +++ /dev/null @@ -1,18 +0,0 @@ -module bookstore-types { - yang-version 1.1; - namespace "org:onap:cps:types:sample"; - - prefix types; - - revision "2024-01-30" { - description - "Sample Types"; - } - - typedef year { - type uint16 { - range "1000..9999"; - } - } - -}
\ No newline at end of file diff --git a/postman-collections/bookstore.yang b/postman-collections/bookstore.yang deleted file mode 100644 index 0d093ea36c..0000000000 --- a/postman-collections/bookstore.yang +++ /dev/null @@ -1,127 +0,0 @@ -module stores { - yang-version 1.1; - namespace "org:onap:cps:sample"; - - prefix book-store; - - import bookstore-types { - prefix "types"; - revision-date 2024-01-30; - } - - revision "2024-02-08" { - description - "Order of book authors is preserved"; - } - - revision "2024-01-30" { - description - "Extracted bookstore types"; - } - - revision "2020-09-15" { - description - "Sample Model"; - } - - list bookstore-address { - key "bookstore-name"; - leaf bookstore-name { - type string; - description - "Name of bookstore. Example: My Bookstore"; - } - leaf address { - type string; - description - "Address of store"; - } - leaf postal-code { - type string; - description - "Postal code of store"; - } - } - - container bookstore { - - leaf bookstore-name { - type string; - } - - container webinfo { - leaf domain-name { - type string; - } - leaf contact-email { - type string; - } - } - - container support-info { - leaf support-office { - type string; - } - container contact-emails { - leaf email { - type string; - } - } - } - - container container-without-leaves { } - - container premises { - list addresses { - key "house-number street"; - - leaf house-number { - type uint16; - } - leaf street { - type string; - } - leaf town { - type string; - } - leaf county { - type string; - } - } - } - - list categories { - - key "code"; - - leaf code { - type string; - } - - leaf name { - type string; - } - - list books { - key title; - - leaf title { - type string; - } - leaf lang { - type string; - } - leaf-list authors { - ordered-by user; - type string; - } - leaf-list editions { - type types:year; - } - leaf price { - type uint64; - } - } - } - } -} diff --git a/postman-collections/bookstore.zip b/postman-collections/bookstore.zip Binary files differnew file mode 100644 index 0000000000..2d5e15b6ea --- /dev/null +++ b/postman-collections/bookstore.zip diff --git a/releases/3.6.0-container.yaml b/releases/3.6.0-container.yaml new file mode 100644 index 0000000000..f97a5184fc --- /dev/null +++ b/releases/3.6.0-container.yaml @@ -0,0 +1,8 @@ +distribution_type: container +container_release_tag: 3.6.0 +project: cps +log_dir: cps-maven-docker-stage-master/949/ +ref: 851ad00169168fab91d81f1fd23d7f84f7a1005d +containers: + - name: 'cps-and-ncmp' + version: '3.6.0-20250129T162415Z' diff --git a/releases/3.6.0.yaml b/releases/3.6.0.yaml new file mode 100644 index 0000000000..7476bd47a4 --- /dev/null +++ b/releases/3.6.0.yaml @@ -0,0 +1,4 @@ +distribution_type: maven +log_dir: cps-maven-stage-master/957/ +project: cps +version: 3.6.0 diff --git a/releases/3.6.1-container.yaml b/releases/3.6.1-container.yaml new file mode 100644 index 0000000000..6525c046e6 --- /dev/null +++ b/releases/3.6.1-container.yaml @@ -0,0 +1,8 @@ +distribution_type: container +container_release_tag: 3.6.1 +project: cps +log_dir: cps-maven-docker-stage-master/950/ +ref: 2ba07ae6a944621fc1a4b86b46aaf12338eb6d69 +containers: + - name: 'cps-and-ncmp' + version: '3.6.1-20250303T143305Z' diff --git a/releases/3.6.1.yaml b/releases/3.6.1.yaml new file mode 100644 index 0000000000..1f52cdd375 --- /dev/null +++ b/releases/3.6.1.yaml @@ -0,0 +1,4 @@ +distribution_type: maven +log_dir: cps-maven-stage-master/958/ +project: cps +version: 3.6.1 diff --git a/releases/3.6.2-container.yaml b/releases/3.6.2-container.yaml new file mode 100644 index 0000000000..f23cb94738 --- /dev/null +++ b/releases/3.6.2-container.yaml @@ -0,0 +1,8 @@ +distribution_type: container +container_release_tag: 3.6.2 +project: cps +log_dir: cps-maven-docker-stage-master/951/ +ref: c6bee6fce3ee8f5face6208ff40d42cb537af4f6 +containers: + - name: 'cps-and-ncmp' + version: '3.6.2-20250416T111958Z' diff --git a/releases/3.6.2.yaml b/releases/3.6.2.yaml new file mode 100644 index 0000000000..87d7fb4328 --- /dev/null +++ b/releases/3.6.2.yaml @@ -0,0 +1,4 @@ +distribution_type: maven +log_dir: cps-maven-stage-master/959/ +project: cps +version: 3.6.2 diff --git a/spotbugs/pom.xml b/spotbugs/pom.xml index 8c685e95bc..b579db7d8d 100644 --- a/spotbugs/pom.xml +++ b/spotbugs/pom.xml @@ -25,7 +25,7 @@ <modelVersion>4.0.0</modelVersion> <groupId>org.onap.cps</groupId> <artifactId>spotbugs</artifactId> - <version>3.6.0-SNAPSHOT</version> + <version>3.6.3-SNAPSHOT</version> <properties> <onap.nexus.url>https://nexus.onap.org</onap.nexus.url> diff --git a/test-tools/generate-metrics-report.sh b/test-tools/generate-metrics-report.sh index 7d94e5b49f..4d99adfdff 100755 --- a/test-tools/generate-metrics-report.sh +++ b/test-tools/generate-metrics-report.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2023 Nordix Foundation. +# Copyright 2023-2025 Nordix Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -75,13 +75,13 @@ function generate_report() { grep --invert-match "^#" $TEMP_DIR/metrics-raw.txt | sort | sed 's/,[}]/}\t/' >$TEMP_DIR/metrics-all.txt # Extract useful metrics. - grep -E "^cps_|^spring_data_" $TEMP_DIR/metrics-all.txt >$TEMP_DIR/metrics-cps.txt + grep -E "^cps_|^spring_data_|^http_server_|^http_client_|^tasks_scheduled_execution_|^spring_kafka_template_|^spring_kafka_listener_" $TEMP_DIR/metrics-all.txt >$TEMP_DIR/metrics-cps.txt # Extract into columns. - grep "_count" $TEMP_DIR/metrics-cps.txt | sed 's/_count//' | cut -f 1 >$TEMP_DIR/column1.txt - grep "_count" $TEMP_DIR/metrics-cps.txt | cut -f 2 >$TEMP_DIR/column2.txt - grep "_sum" $TEMP_DIR/metrics-cps.txt | cut -f 2 >$TEMP_DIR/column3.txt - grep "_max" $TEMP_DIR/metrics-cps.txt | cut -f 2 >$TEMP_DIR/column4.txt + grep "_count" $TEMP_DIR/metrics-cps.txt | sed 's/_count//' | cut -d ' ' -f 1 >$TEMP_DIR/column1.txt + grep "_count" $TEMP_DIR/metrics-cps.txt | cut -d ' ' -f 2 >$TEMP_DIR/column2.txt + grep "_sum" $TEMP_DIR/metrics-cps.txt | cut -d ' ' -f 2 >$TEMP_DIR/column3.txt + grep "_max" $TEMP_DIR/metrics-cps.txt | cut -d ' ' -f 2 >$TEMP_DIR/column4.txt # Combine columns into report. paste $TEMP_DIR/column{1,2,3,4}.txt >$TEMP_DIR/report.txt diff --git a/test-tools/perf-test-ncmp-passthrough-read.sh b/test-tools/perf-test-ncmp-passthrough-read.sh index 21b031ce95..3460dddcaa 100755 --- a/test-tools/perf-test-ncmp-passthrough-read.sh +++ b/test-tools/perf-test-ncmp-passthrough-read.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2024 Nordix Foundation. +# Copyright 2024-2025 OpenInfra Foundation Europe. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,8 +28,6 @@ set -o pipefail # Use last non-zero exit code in a pipeline ############################ CPS_HOST=localhost CPS_PORT=8883 -CPS_USERNAME=cpsuser -CPS_PASSWORD=cpsr0cks! PARALLEL_REQUESTS=12 WARMUP_REQUESTS=600 MEASUREMENT_REQUESTS=240 @@ -40,7 +38,7 @@ DMI_DATA_DELAY=$(grep 'DATA_FOR_CM_HANDLE_DELAY_MS:' "$SCRIPT_DIR"/../docker-com function cmHandleExists() { local cmHandleId=$1 - curl --silent --fail --output /dev/null --user "$CPS_USERNAME:$CPS_PASSWORD" --basic "http://$CPS_HOST:$CPS_PORT/ncmp/v1/ch/$cmHandleId" + curl --silent --fail --output /dev/null "http://$CPS_HOST:$CPS_PORT/ncmp/v1/ch/$cmHandleId" } function failIfCmHandlesNotFound() { @@ -66,7 +64,6 @@ function measureAverageResponseTimeInMillis() { curl --show-error --fail --fail-early \ --output /dev/null --write-out '%{time_total}\n' \ --parallel --parallel-max $PARALLEL_REQUESTS --parallel-immediate \ - --user "$CPS_USERNAME:$CPS_PASSWORD" --basic \ --request POST "http://$CPS_HOST:$CPS_PORT/ncmp/v1/ch/ch-[1-$totalRequests]/data/ds/ncmp-datastore%3Apassthrough-operational?resourceIdentifier=x&include-descendants=true" | awk '{ sum += $1; n++ } END { if (n > 0) print (sum / n) * 1000; }' } diff --git a/version.properties b/version.properties index 81372a7e58..60e2178e78 100644 --- a/version.properties +++ b/version.properties @@ -22,7 +22,7 @@ major=3 minor=6 -patch=0 +patch=3 base_version=${major}.${minor}.${patch} |