summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--checkstyle/pom.xml2
-rwxr-xr-xcps-application/pom.xml2
-rw-r--r--cps-application/src/main/resources/application.yml4
-rw-r--r--cps-bom/pom.xml2
-rwxr-xr-xcps-dependencies/pom.xml9
-rw-r--r--cps-events/pom.xml2
-rw-r--r--cps-ncmp-events/pom.xml2
-rw-r--r--cps-ncmp-rest-stub/pom.xml2
-rw-r--r--cps-ncmp-rest/pom.xml2
-rw-r--r--cps-ncmp-service/pom.xml22
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfig.java127
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/events/EventsPublisher.java13
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy10
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncBatchEventConsumerSpec.groovy9
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfigSpec.groovy62
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/avc/AvcEventConsumerSpec.groovy9
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy9
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy29
-rw-r--r--cps-ncmp-service/src/test/resources/application.yml8
-rwxr-xr-xcps-parent/pom.xml2
-rw-r--r--cps-path-parser/pom.xml2
-rwxr-xr-xcps-rest/pom.xml2
-rw-r--r--cps-ri/pom.xml16
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java97
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java54
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java (renamed from cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentExtract.java)21
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java127
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java57
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy56
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy109
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy17
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy74
-rwxr-xr-xcps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java72
-rw-r--r--cps-ri/src/test/java/org/onap/cps/TestApplication.java33
-rw-r--r--cps-ri/src/test/resources/application.yml38
-rw-r--r--cps-ri/src/test/resources/data/anchor.sql39
-rw-r--r--cps-ri/src/test/resources/data/anchors-schemaset-modules.sql49
-rw-r--r--cps-ri/src/test/resources/data/clear-all.sql28
-rwxr-xr-xcps-ri/src/test/resources/data/fragment.sql116
-rw-r--r--cps-ri/src/test/resources/data/perf-test.sql28
-rw-r--r--cps-ri/src/test/resources/data/schemaset.sql57
-rw-r--r--cps-ri/src/test/resources/hibernate.cfg.xml16
-rw-r--r--cps-service/pom.xml2
-rw-r--r--docs/api/swagger/ncmp/openapi.yaml93
-rwxr-xr-xdocs/release-notes.rst35
-rw-r--r--integration-test/pom.xml2
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy12
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/base/TestConfig.groovy15
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy (renamed from cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy)38
-rw-r--r--jacoco-report/pom.xml2
-rw-r--r--pom.xml2
-rw-r--r--releases/3.3.2-container.yaml8
-rw-r--r--releases/3.3.2.yaml4
-rw-r--r--spotbugs/pom.xml2
-rwxr-xr-xversion.properties2
55 files changed, 661 insertions, 991 deletions
diff --git a/checkstyle/pom.xml b/checkstyle/pom.xml
index cc07fce027..2129244dfa 100644
--- a/checkstyle/pom.xml
+++ b/checkstyle/pom.xml
@@ -26,7 +26,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.onap.cps</groupId>
<artifactId>checkstyle</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<profiles>
<profile>
diff --git a/cps-application/pom.xml b/cps-application/pom.xml
index f6a56ce222..2e7b12dfb9 100755
--- a/cps-application/pom.xml
+++ b/cps-application/pom.xml
@@ -28,7 +28,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index 802da9e87c..ed71339f93 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -75,7 +75,7 @@ spring:
security:
protocol: PLAINTEXT
producer:
- value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
+ value-serializer: io.cloudevents.kafka.CloudEventSerializer
client-id: cps-core
consumer:
group-id: ${NCMP_CONSUMER_GROUP_ID:ncmp-group}
@@ -83,7 +83,7 @@ spring:
value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
properties:
spring.deserializer.key.delegate.class: org.apache.kafka.common.serialization.StringDeserializer
- spring.deserializer.value.delegate.class: org.springframework.kafka.support.serializer.JsonDeserializer
+ spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer
spring.json.use.type.headers: false
jackson:
diff --git a/cps-bom/pom.xml b/cps-bom/pom.xml
index 4c99fcb86d..a87b34ba51 100644
--- a/cps-bom/pom.xml
+++ b/cps-bom/pom.xml
@@ -25,7 +25,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.onap.cps</groupId>
<artifactId>cps-bom</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<packaging>pom</packaging>
<description>This artifact contains dependencyManagement declarations of all published CPS components.</description>
diff --git a/cps-dependencies/pom.xml b/cps-dependencies/pom.xml
index e06bbd7a1f..8003d30a4a 100755
--- a/cps-dependencies/pom.xml
+++ b/cps-dependencies/pom.xml
@@ -27,7 +27,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.onap.cps</groupId>
<artifactId>cps-dependencies</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.groupId}:${project.artifactId}</name>
@@ -147,6 +147,13 @@
<version>3.1</version>
</dependency>
<dependency>
+ <groupId>io.cloudevents</groupId>
+ <artifactId>cloudevents-bom</artifactId>
+ <version>2.5.0</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.11</version>
diff --git a/cps-events/pom.xml b/cps-events/pom.xml
index b8ddb20095..66c4fe53db 100644
--- a/cps-events/pom.xml
+++ b/cps-events/pom.xml
@@ -24,7 +24,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-ncmp-events/pom.xml b/cps-ncmp-events/pom.xml
index 52ca77e936..1dfb7eba83 100644
--- a/cps-ncmp-events/pom.xml
+++ b/cps-ncmp-events/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-ncmp-rest-stub/pom.xml b/cps-ncmp-rest-stub/pom.xml
index f434863c7e..0a6684fc0b 100644
--- a/cps-ncmp-rest-stub/pom.xml
+++ b/cps-ncmp-rest-stub/pom.xml
@@ -26,7 +26,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-ncmp-rest/pom.xml b/cps-ncmp-rest/pom.xml
index b3ac659782..8c84546b23 100644
--- a/cps-ncmp-rest/pom.xml
+++ b/cps-ncmp-rest/pom.xml
@@ -27,7 +27,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-ncmp-service/pom.xml b/cps-ncmp-service/pom.xml
index b87fe64366..19ef988d30 100644
--- a/cps-ncmp-service/pom.xml
+++ b/cps-ncmp-service/pom.xml
@@ -27,7 +27,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
@@ -42,6 +42,18 @@
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
+ <groupId>io.cloudevents</groupId>
+ <artifactId>cloudevents-json-jackson</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>io.cloudevents</groupId>
+ <artifactId>cloudevents-kafka</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>io.cloudevents</groupId>
+ <artifactId>cloudevents-spring</artifactId>
+ </dependency>
+ <dependency>
<groupId>${project.groupId}</groupId>
<artifactId>cps-service</artifactId>
</dependency>
@@ -54,8 +66,8 @@
<artifactId>cps-path-parser</artifactId>
</dependency>
<dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-web</artifactId>
+ <groupId>com.hazelcast</groupId>
+ <artifactId>hazelcast-spring</artifactId>
</dependency>
<dependency>
<groupId>org.mapstruct</groupId>
@@ -66,8 +78,8 @@
<artifactId>mapstruct-processor</artifactId>
</dependency>
<dependency>
- <groupId>com.hazelcast</groupId>
- <artifactId>hazelcast-spring</artifactId>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
</dependency>
<!-- T E S T - D E P E N D E N C I E S -->
<dependency>
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfig.java
new file mode 100644
index 0000000000..b76f86ebeb
--- /dev/null
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfig.java
@@ -0,0 +1,127 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.api.impl.config.kafka;
+
+import io.cloudevents.CloudEvent;
+import java.util.Map;
+import lombok.RequiredArgsConstructor;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+import org.springframework.kafka.annotation.EnableKafka;
+import org.springframework.kafka.core.ConsumerFactory;
+import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.core.DefaultKafkaProducerFactory;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.core.ProducerFactory;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
+import org.springframework.kafka.support.serializer.JsonSerializer;
+
+/**
+ * kafka Configuration for legacy and cloud events.
+ *
+ * @param <T> valid legacy event to be published over the wire.
+ */
+@Configuration
+@EnableKafka
+@RequiredArgsConstructor
+public class KafkaTemplateConfig<T> {
+
+ private final KafkaProperties kafkaProperties;
+
+ /**
+ * This sets the strategy for creating legacy Kafka producer instance from kafka properties defined into
+ * application.yml and replaces value-serializer by JsonSerializer.
+ *
+ * @return legacy event producer instance.
+ */
+ @Bean
+ public ProducerFactory<String, T> legacyEventProducerFactory() {
+ final Map<String, Object> producerConfigProperties = kafkaProperties.buildProducerProperties();
+ producerConfigProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
+ return new DefaultKafkaProducerFactory<>(producerConfigProperties);
+ }
+
+ /**
+ * The ConsumerFactory implementation is to produce new legacy instance for provided kafka properties defined
+ * into application.yml and replaces deserializer-value by JsonDeserializer.
+ *
+ * @return an instance of legacy consumer factory.
+ */
+ @Bean
+ public ConsumerFactory<String, T> legacyEventConsumerFactory() {
+ final Map<String, Object> consumerConfigProperties = kafkaProperties.buildConsumerProperties();
+ consumerConfigProperties.put("spring.deserializer.value.delegate.class", JsonDeserializer.class);
+ return new DefaultKafkaConsumerFactory<>(consumerConfigProperties);
+ }
+
+ /**
+ * This sets the strategy for creating cloud Kafka producer instance from kafka properties defined into
+ * application.yml with CloudEventSerializer.
+ *
+ * @return cloud event producer instance.
+ */
+ @Bean
+ public ProducerFactory<String, CloudEvent> cloudEventProducerFactory() {
+ final Map<String, Object> producerConfigProperties = kafkaProperties.buildProducerProperties();
+ return new DefaultKafkaProducerFactory<>(producerConfigProperties);
+ }
+
+ /**
+ * The ConsumerFactory implementation to produce new legacy instance for provided kafka properties defined
+ * into application.yml having CloudEventDeserializer as deserializer-value.
+ *
+ * @return an instance of cloud consumer factory.
+ */
+ @Bean
+ public ConsumerFactory<String, CloudEvent> cloudEventConsumerFactory() {
+ final Map<String, Object> consumerConfigProperties = kafkaProperties.buildConsumerProperties();
+ return new DefaultKafkaConsumerFactory<>(consumerConfigProperties);
+ }
+
+ /**
+ * A legacy Kafka event template for executing high-level operations. The legacy producer factory ensure this.
+ *
+ * @return an instance of legacy Kafka template.
+ */
+ @Bean
+ @Primary
+ public KafkaTemplate<String, T> legacyEventKafkaTemplate() {
+ final KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(legacyEventProducerFactory());
+ kafkaTemplate.setConsumerFactory(legacyEventConsumerFactory());
+ return kafkaTemplate;
+ }
+
+ /**
+ * A cloud Kafka event template for executing high-level operations. The cloud producer factory ensure this.
+ *
+ * @return an instance of cloud Kafka template.
+ */
+ @Bean
+ public KafkaTemplate<String, CloudEvent> cloudEventKafkaTemplate() {
+ final KafkaTemplate<String, CloudEvent> kafkaTemplate = new KafkaTemplate<>(cloudEventProducerFactory());
+ kafkaTemplate.setConsumerFactory(cloudEventConsumerFactory());
+ return kafkaTemplate;
+ }
+
+}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/events/EventsPublisher.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/events/EventsPublisher.java
index d92316dc58..7b28b4cd5f 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/events/EventsPublisher.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/events/EventsPublisher.java
@@ -20,6 +20,7 @@
package org.onap.cps.ncmp.api.impl.events;
+import io.cloudevents.CloudEvent;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@@ -42,7 +43,12 @@ import org.springframework.util.concurrent.ListenableFutureCallback;
@RequiredArgsConstructor
public class EventsPublisher<T> {
- private final KafkaTemplate<String, T> eventKafkaTemplate;
+ /** Once all cps events will be modified to cloud compliant, will remove legacyKafkaEventTemplate with
+ it's java configuration file KafkaTemplateConfig. **/
+ @Deprecated(forRemoval = true)
+ private final KafkaTemplate<String, T> legacyKafkaEventTemplate;
+
+ private final KafkaTemplate<String, CloudEvent> cloudEventKafkaTemplate;
/**
* Generic Event publisher.
@@ -54,7 +60,8 @@ public class EventsPublisher<T> {
*/
@Deprecated
public void publishEvent(final String topicName, final String eventKey, final T event) {
- final ListenableFuture<SendResult<String, T>> eventFuture = eventKafkaTemplate.send(topicName, eventKey, event);
+ final ListenableFuture<SendResult<String, T>> eventFuture
+ = legacyKafkaEventTemplate.send(topicName, eventKey, event);
eventFuture.addCallback(handleCallback(topicName));
}
@@ -70,7 +77,7 @@ public class EventsPublisher<T> {
final ProducerRecord<String, T> producerRecord =
new ProducerRecord<>(topicName, null, eventKey, event, eventHeaders);
- final ListenableFuture<SendResult<String, T>> eventFuture = eventKafkaTemplate.send(producerRecord);
+ final ListenableFuture<SendResult<String, T>> eventFuture = legacyKafkaEventTemplate.send(producerRecord);
eventFuture.addCallback(handleCallback(topicName));
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
index bcf75a29b2..fe7b3f11cb 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
@@ -22,6 +22,7 @@ package org.onap.cps.ncmp.api.impl.async
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.kafka.clients.consumer.KafkaConsumer
+import org.apache.kafka.common.serialization.StringDeserializer
import org.mapstruct.factory.Mappers
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
@@ -34,7 +35,6 @@ import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
-
import java.time.Duration
@SpringBootTest(classes = [EventsPublisher, NcmpAsyncRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper])
@@ -44,7 +44,7 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase
@SpringBean
EventsPublisher cpsAsyncRequestResponseEventPublisher =
- new EventsPublisher<NcmpAsyncRequestResponseEvent>(kafkaTemplate);
+ new EventsPublisher<NcmpAsyncRequestResponseEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate);
@SpringBean
@@ -59,18 +59,18 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase
@Autowired
JsonObjectMapper jsonObjectMapper
- def kafkaConsumer = new KafkaConsumer<>(consumerConfigProperties('test'))
+ def legacyEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test', StringDeserializer))
def 'Consume and forward valid message'() {
given: 'consumer has a subscription'
- kafkaConsumer.subscribe(['test-topic'] as List<String>)
+ legacyEventKafkaConsumer.subscribe(['test-topic'] as List<String>)
and: 'an event is sent'
def jsonData = TestUtils.getResourceFileContent('dmiAsyncRequestResponseEvent.json')
def testEventSent = jsonObjectMapper.convertJsonString(jsonData, DmiAsyncRequestResponseEvent.class)
when: 'the event is consumed'
ncmpAsyncRequestResponseEventConsumer.consumeAndForward(testEventSent)
and: 'the topic is polled'
- def records = kafkaConsumer.poll(Duration.ofMillis(1500))
+ def records = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500))
then: 'poll returns one record'
assert records.size() == 1
and: 'consumed forwarded event id is the same as sent event id'
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncBatchEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncBatchEventConsumerSpec.groovy
index 28464bb91c..02071cd8cf 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncBatchEventConsumerSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncBatchEventConsumerSpec.groovy
@@ -25,6 +25,7 @@ import org.apache.commons.lang3.SerializationUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.header.internals.RecordHeader
+import org.apache.kafka.common.serialization.StringDeserializer
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
import org.onap.cps.ncmp.events.async.BatchDataResponseEventV1
@@ -46,7 +47,7 @@ import java.time.Duration
class NcmpAsyncBatchEventConsumerSpec extends MessagingBaseSpec {
@SpringBean
- EventsPublisher asyncBatchEventPublisher = new EventsPublisher<BatchDataResponseEventV1>(kafkaTemplate)
+ EventsPublisher asyncBatchEventPublisher = new EventsPublisher<BatchDataResponseEventV1>(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
@SpringBean
NcmpAsyncBatchEventConsumer asyncBatchEventConsumer = new NcmpAsyncBatchEventConsumer(asyncBatchEventPublisher)
@@ -57,19 +58,19 @@ class NcmpAsyncBatchEventConsumerSpec extends MessagingBaseSpec {
@Autowired
RecordFilterStrategy<String, BatchDataResponseEventV1> recordFilterStrategy
- def kafkaConsumer = new KafkaConsumer<>(consumerConfigProperties('test'))
+ def legacyEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test', StringDeserializer))
def static clientTopic = 'client-topic'
def static batchEventType = 'org.onap.cps.ncmp.events.async.BatchDataResponseEventV1'
def 'Consume and publish event to client specified topic'() {
given: 'consumer subscribing to client topic'
- kafkaConsumer.subscribe([clientTopic])
+ legacyEventKafkaConsumer.subscribe([clientTopic])
and: 'consumer record for batch event'
def consumerRecordIn = createConsumerRecord(batchEventType)
when: 'the batch event is consumed and published to client specified topic'
asyncBatchEventConsumer.consumeAndPublish(consumerRecordIn)
and: 'the client specified topic is polled'
- def consumerRecordOut = kafkaConsumer.poll(Duration.ofMillis(1500))[0]
+ def consumerRecordOut = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500))[0]
then: 'verifying consumed event operationID is same as published event operationID'
def operationIdIn = consumerRecordIn.value.event.batchResponses[0].operationId
def operationIdOut = jsonObjectMapper.convertJsonString((String)consumerRecordOut.value(), BatchDataResponseEventV1.class).event.batchResponses[0].operationId
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfigSpec.groovy
new file mode 100644
index 0000000000..ed5f161258
--- /dev/null
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/kafka/KafkaTemplateConfigSpec.groovy
@@ -0,0 +1,62 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.api.impl.config.kafka;
+
+import io.cloudevents.CloudEvent
+import io.cloudevents.kafka.CloudEventDeserializer
+import io.cloudevents.kafka.CloudEventSerializer
+import org.spockframework.spring.EnableSharedInjection
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties
+import org.springframework.boot.context.properties.EnableConfigurationProperties
+import org.springframework.boot.test.context.SpringBootTest
+import org.springframework.kafka.core.KafkaTemplate
+import org.springframework.kafka.support.serializer.JsonDeserializer
+import org.springframework.kafka.support.serializer.JsonSerializer
+import spock.lang.Shared
+import spock.lang.Specification
+
+@SpringBootTest(classes = [KafkaProperties, KafkaTemplateConfig])
+@EnableSharedInjection
+@EnableConfigurationProperties
+class KafkaTemplateConfigSpec extends Specification {
+
+ @Shared
+ @Autowired
+ KafkaTemplate<String, String> legacyEventKafkaTemplate
+
+ @Shared
+ @Autowired
+ KafkaTemplate<String, CloudEvent> cloudEventKafkaTemplate
+
+ def 'Verify kafka template serializer and deserializer configuration for #eventType.'() {
+ expect: 'kafka template is instantiated'
+ assert kafkaTemplateInstance.properties['beanName'] == beanName
+ and: 'verify event key and value serializer'
+ assert kafkaTemplateInstance.properties['producerFactory'].configs['value.serializer'].asType(String.class).contains(valueSerializer.getCanonicalName())
+ and: 'verify event key and value deserializer'
+ assert kafkaTemplateInstance.properties['consumerFactory'].configs['spring.deserializer.value.delegate.class'].asType(String.class).contains(delegateDeserializer.getCanonicalName())
+ where: 'the following event type is used'
+ eventType | kafkaTemplateInstance || beanName | valueSerializer | delegateDeserializer
+ 'legacy event' | legacyEventKafkaTemplate || 'legacyEventKafkaTemplate' | JsonSerializer | JsonDeserializer
+ 'cloud event' | cloudEventKafkaTemplate || 'cloudEventKafkaTemplate' | CloudEventSerializer | CloudEventDeserializer
+ }
+}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/avc/AvcEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/avc/AvcEventConsumerSpec.groovy
index 5f54bbe3dd..3dffac714b 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/avc/AvcEventConsumerSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/avc/AvcEventConsumerSpec.groovy
@@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.header.internals.RecordHeader
+import org.apache.kafka.common.serialization.StringDeserializer
import org.mapstruct.factory.Mappers
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
@@ -48,7 +49,7 @@ class AvcEventConsumerSpec extends MessagingBaseSpec {
AvcEventMapper avcEventMapper = Mappers.getMapper(AvcEventMapper.class)
@SpringBean
- EventsPublisher eventsPublisher = new EventsPublisher<AvcEvent>(kafkaTemplate)
+ EventsPublisher eventsPublisher = new EventsPublisher<AvcEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
@SpringBean
AvcEventConsumer acvEventConsumer = new AvcEventConsumer(eventsPublisher, avcEventMapper)
@@ -56,13 +57,13 @@ class AvcEventConsumerSpec extends MessagingBaseSpec {
@Autowired
JsonObjectMapper jsonObjectMapper
- def kafkaConsumer = new KafkaConsumer<>(consumerConfigProperties('ncmp-group'))
+ def legacyEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('ncmp-group', StringDeserializer))
def 'Consume and forward valid message'() {
given: 'consumer has a subscription on a topic'
def cmEventsTopicName = 'cm-events'
acvEventConsumer.cmEventsTopicName = cmEventsTopicName
- kafkaConsumer.subscribe([cmEventsTopicName] as List<String>)
+ legacyEventKafkaConsumer.subscribe([cmEventsTopicName] as List<String>)
and: 'an event is sent'
def jsonData = TestUtils.getResourceFileContent('sampleAvcInputEvent.json')
def testEventSent = jsonObjectMapper.convertJsonString(jsonData, AvcEvent.class)
@@ -73,7 +74,7 @@ class AvcEventConsumerSpec extends MessagingBaseSpec {
when: 'the event is consumed'
acvEventConsumer.consumeAndForward(consumerRecord)
and: 'the topic is polled'
- def records = kafkaConsumer.poll(Duration.ofMillis(1500))
+ def records = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500))
then: 'poll returns one record'
assert records.size() == 1
and: 'record can be converted to AVC event'
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
index 93741261f6..4c6880421b 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
@@ -22,6 +22,7 @@ package org.onap.cps.ncmp.api.impl.events.lcm
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.kafka.clients.consumer.KafkaConsumer
+import org.apache.kafka.common.serialization.StringDeserializer
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
import org.onap.cps.ncmp.events.lcm.v1.Event
@@ -42,12 +43,12 @@ import java.time.Duration
@DirtiesContext
class LcmEventsPublisherSpec extends MessagingBaseSpec {
- def kafkaConsumer = new KafkaConsumer<>(consumerConfigProperties('ncmp-group'))
+ def legacyEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('ncmp-group', StringDeserializer))
def testTopic = 'ncmp-events-test'
@SpringBean
- EventsPublisher<LcmEvent> lcmEventsPublisher = new EventsPublisher(kafkaTemplate)
+ EventsPublisher<LcmEvent> lcmEventsPublisher = new EventsPublisher(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
@Autowired
JsonObjectMapper jsonObjectMapper
@@ -82,11 +83,11 @@ class LcmEventsPublisherSpec extends MessagingBaseSpec {
eventSchema : eventSchema,
eventSchemaVersion: eventSchemaVersion]
and: 'consumer has a subscription'
- kafkaConsumer.subscribe([testTopic] as List<String>)
+ legacyEventKafkaConsumer.subscribe([testTopic] as List<String>)
when: 'an event is published'
lcmEventsPublisher.publishEvent(testTopic, eventKey, eventHeader, eventData)
and: 'topic is polled'
- def records = kafkaConsumer.poll(Duration.ofMillis(1500))
+ def records = legacyEventKafkaConsumer.poll(Duration.ofMillis(1500))
then: 'poll returns one record'
assert records.size() == 1
and: 'record key matches the expected event key'
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
index 337178e128..603b8cdda6 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
@@ -20,6 +20,8 @@
package org.onap.cps.ncmp.api.kafka
+import io.cloudevents.CloudEvent
+import io.cloudevents.kafka.CloudEventSerializer
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.StringSerializer
import org.spockframework.spring.SpringBean
@@ -44,30 +46,33 @@ class MessagingBaseSpec extends Specification {
static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka'))
- def producerConfigProperties() {
+ @SpringBean
+ KafkaTemplate legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<Integer, String>(eventProducerConfigProperties(JsonSerializer)))
+
+ @SpringBean
+ KafkaTemplate cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
+
+ @DynamicPropertySource
+ static void registerKafkaProperties(DynamicPropertyRegistry dynamicPropertyRegistry) {
+ dynamicPropertyRegistry.add('spring.kafka.bootstrap-servers', kafkaTestContainer::getBootstrapServers)
+ }
+
+ def eventProducerConfigProperties(valueSerializer) {
return [('bootstrap.servers'): kafkaTestContainer.getBootstrapServers().split(',')[0],
('retries') : 0,
('batch-size') : 16384,
('linger.ms') : 1,
('buffer.memory') : 33554432,
('key.serializer') : StringSerializer,
- ('value.serializer') : JsonSerializer]
+ ('value.serializer') : valueSerializer]
}
- def consumerConfigProperties(consumerGroupId) {
+ def eventConsumerConfigProperties(consumerGroupId, valueSerializer) {
return [('bootstrap.servers') : kafkaTestContainer.getBootstrapServers().split(',')[0],
('key.deserializer') : StringDeserializer,
- ('value.deserializer'): StringDeserializer,
+ ('value.deserializer'): valueSerializer,
('auto.offset.reset') : 'earliest',
('group.id') : consumerGroupId
]
}
-
- @SpringBean
- KafkaTemplate kafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<Integer, String>(producerConfigProperties()))
-
- @DynamicPropertySource
- static void registerKafkaProperties(DynamicPropertyRegistry dynamicPropertyRegistry) {
- dynamicPropertyRegistry.add('spring.kafka.bootstrap-servers', kafkaTestContainer::getBootstrapServers)
- }
}
diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml
index 1016f2b033..197bfda19c 100644
--- a/cps-ncmp-service/src/test/resources/application.yml
+++ b/cps-ncmp-service/src/test/resources/application.yml
@@ -16,6 +16,14 @@
# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
+spring:
+ kafka:
+ producer:
+ value-serializer: io.cloudevents.kafka.CloudEventSerializer
+ consumer:
+ properties:
+ spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer
+
app:
ncmp:
avc:
diff --git a/cps-parent/pom.xml b/cps-parent/pom.xml
index 17c96c577c..0f58fbba76 100755
--- a/cps-parent/pom.xml
+++ b/cps-parent/pom.xml
@@ -32,7 +32,7 @@
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
diff --git a/cps-path-parser/pom.xml b/cps-path-parser/pom.xml
index c388c36e79..bd8b47ca94 100644
--- a/cps-path-parser/pom.xml
+++ b/cps-path-parser/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-rest/pom.xml b/cps-rest/pom.xml
index c9d07973d1..f6f81006ed 100755
--- a/cps-rest/pom.xml
+++ b/cps-rest/pom.xml
@@ -28,7 +28,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/cps-ri/pom.xml b/cps-ri/pom.xml
index 66b89de926..89e60dbfbe 100644
--- a/cps-ri/pom.xml
+++ b/cps-ri/pom.xml
@@ -26,15 +26,15 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
<artifactId>cps-ri</artifactId>
<properties>
- <minimum-coverage>0.34</minimum-coverage>
- <!-- Additional coverage is provided by the integration-test module -->
+ <minimum-coverage>0.29</minimum-coverage>
+ <!-- Additional coverage is provided by integration-test module -->
</properties>
<dependencies>
@@ -125,16 +125,6 @@
</exclusion>
</exclusions>
</dependency>
- <dependency>
- <groupId>org.testcontainers</groupId>
- <artifactId>postgresql</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.testcontainers</groupId>
- <artifactId>spock</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
deleted file mode 100644
index 697eb8de00..0000000000
--- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2022-2023 Nordix Foundation
- * Modifications Copyright (C) 2023 TechMahindra Ltd.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.entities;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import lombok.AccessLevel;
-import lombok.NoArgsConstructor;
-
-@NoArgsConstructor(access = AccessLevel.PRIVATE)
-public class FragmentEntityArranger {
-
- /**
- * Convert a collection of (related) FragmentExtracts into FragmentEntities (trees) with descendants.
- *
- * @param anchorEntity the anchor(entity) all the fragments belong to
- * @param fragmentExtracts FragmentExtracts to convert
- * @return a collection of FragmentEntities (trees) with descendants.
- */
- public static Collection<FragmentEntity> toFragmentEntityTrees(final AnchorEntity anchorEntity,
- final Collection<FragmentExtract> fragmentExtracts) {
- final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>();
- if (fragmentExtracts != null) {
- for (final FragmentExtract fragmentExtract : fragmentExtracts) {
- final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
- fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
- }
- }
- return reuniteChildrenWithTheirParents(fragmentEntityPerId);
- }
-
- /**
- * Convert a collection of (related) FragmentExtracts into FragmentEntities (trees) with descendants.
- *
- * @param anchorEntityPerId the anchor(entities) the fragments belong to
- * @param fragmentExtracts FragmentExtracts to convert
- * @return a collection of FragmentEntities (trees) with descendants.
- */
- public static Collection<FragmentEntity> toFragmentEntityTreesAcrossAnchors(
- final Map<Long, AnchorEntity> anchorEntityPerId, final Collection<FragmentExtract> fragmentExtracts) {
- final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>();
- for (final FragmentExtract fragmentExtract : fragmentExtracts) {
- final AnchorEntity anchorEntity = anchorEntityPerId.get(fragmentExtract.getAnchorId());
- final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
- fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
- }
- return reuniteChildrenWithTheirParents(fragmentEntityPerId);
- }
-
- private static FragmentEntity toFragmentEntity(final AnchorEntity anchorEntity,
- final FragmentExtract fragmentExtract) {
- final FragmentEntity fragmentEntity = new FragmentEntity();
- fragmentEntity.setAnchor(anchorEntity);
- fragmentEntity.setId(fragmentExtract.getId());
- fragmentEntity.setXpath(fragmentExtract.getXpath());
- fragmentEntity.setAttributes(fragmentExtract.getAttributes());
- fragmentEntity.setParentId(fragmentExtract.getParentId());
- fragmentEntity.setChildFragments(new HashSet<>());
- return fragmentEntity;
- }
-
- private static Collection<FragmentEntity> reuniteChildrenWithTheirParents(
- final Map<Long, FragmentEntity> fragmentEntityPerId) {
- final Collection<FragmentEntity> fragmentEntitiesWithoutParentInResultSet = new HashSet<>();
- for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) {
- final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId());
- if (parentFragmentEntity == null) {
- fragmentEntitiesWithoutParentInResultSet.add(fragmentEntity);
- } else {
- parentFragmentEntity.getChildFragments().add(fragmentEntity);
- }
- }
- return fragmentEntitiesWithoutParentInResultSet;
- }
-
-}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index 02f723029d..e6e250f082 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -36,7 +36,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -52,8 +51,6 @@ import org.onap.cps.spi.FetchDescendantsOption;
import org.onap.cps.spi.entities.AnchorEntity;
import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.FragmentEntity;
-import org.onap.cps.spi.entities.FragmentEntityArranger;
-import org.onap.cps.spi.entities.FragmentExtract;
import org.onap.cps.spi.exceptions.AlreadyDefinedException;
import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch;
import org.onap.cps.spi.exceptions.ConcurrencyException;
@@ -248,7 +245,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
final Collection<FragmentEntity> fragmentEntities =
getFragmentEntities(anchorEntity, xpaths, fetchDescendantsOption);
- return toDataNodes(fragmentEntities, fetchDescendantsOption);
+ return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities);
}
private Collection<FragmentEntity> getFragmentEntities(final AnchorEntity anchorEntity,
@@ -269,19 +266,16 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
normalizedXpaths.addAll(fragmentRepository.findAllXpathByAnchorAndParentIdIsNull(anchorEntity));
}
- final List<FragmentExtract> fragmentExtracts =
- fragmentRepository.findExtractsWithDescendants(anchorEntity.getId(), normalizedXpaths,
- fetchDescendantsOption.getDepth());
+ final List<FragmentEntity> fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity,
+ normalizedXpaths);
- return FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts);
+ return fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, fragmentEntities);
}
private FragmentEntity getFragmentEntity(final AnchorEntity anchorEntity, final String xpath) {
final FragmentEntity fragmentEntity;
if (isRootXpath(xpath)) {
- final List<FragmentExtract> fragmentExtracts = fragmentRepository.findAllExtractsByAnchor(anchorEntity);
- fragmentEntity = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts)
- .stream().findFirst().orElse(null);
+ fragmentEntity = fragmentRepository.findOneByAnchorId(anchorEntity.getId()).orElse(null);
} else {
fragmentEntity = fragmentRepository.getByAnchorAndXpath(anchorEntity, getNormalizedXpath(xpath));
}
@@ -320,8 +314,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, ancestorXpaths);
}
}
- fragmentEntities = prefetchDescendantsForFragmentEntities(fetchDescendantsOption, anchorEntity,
- fragmentEntities);
+ fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption,
+ fragmentEntities);
return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities);
}
@@ -331,31 +325,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
return queryDataNodes(dataspaceName, QUERY_ACROSS_ANCHORS, cpsPath, fetchDescendantsOption);
}
- private Collection<FragmentEntity> prefetchDescendantsForFragmentEntities(
- final FetchDescendantsOption fetchDescendantsOption,
- final AnchorEntity anchorEntity,
- final Collection<FragmentEntity> proxiedFragmentEntities) {
- if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) {
- return proxiedFragmentEntities;
- }
-
- final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream()
- .map(FragmentEntity::getId).collect(Collectors.toList());
-
- final List<FragmentExtract> fragmentExtracts =
- fragmentRepository.findExtractsWithDescendantsByIds(fragmentEntityIds, fetchDescendantsOption.getDepth());
-
- if (anchorEntity == ALL_ANCHORS) {
- final Collection<Long> anchorIds = fragmentExtracts.stream()
- .map(FragmentExtract::getAnchorId).collect(Collectors.toSet());
- final List<AnchorEntity> anchorEntities = anchorRepository.findAllById(anchorIds);
- final Map<Long, AnchorEntity> anchorEntityPerId = anchorEntities.stream()
- .collect(Collectors.toMap(AnchorEntity::getId, Function.identity()));
- return FragmentEntityArranger.toFragmentEntityTreesAcrossAnchors(anchorEntityPerId, fragmentExtracts);
- }
- return FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts);
- }
-
private List<DataNode> createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption,
final Collection<FragmentEntity> fragmentEntities) {
final List<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size());
@@ -422,15 +391,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
.withChildDataNodes(childDataNodes).build();
}
- private Collection<DataNode> toDataNodes(final Collection<FragmentEntity> fragmentEntities,
- final FetchDescendantsOption fetchDescendantsOption) {
- final Collection<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size());
- for (final FragmentEntity fragmentEntity : fragmentEntities) {
- dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption));
- }
- return dataNodes;
- }
-
private List<DataNode> getChildDataNodes(final FragmentEntity fragmentEntity,
final FetchDescendantsOption fetchDescendantsOption) {
if (fetchDescendantsOption.hasNext()) {
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentExtract.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java
index 50be3c7b7a..2460db869a 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentExtract.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022-2023 Nordix Foundation.
+ * Copyright (C) 2023 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,17 +18,14 @@
* ============LICENSE_END=========================================================
*/
-package org.onap.cps.spi.entities;
+package org.onap.cps.spi.repository;
-public interface FragmentExtract {
+import java.util.Collection;
+import org.onap.cps.spi.FetchDescendantsOption;
+import org.onap.cps.spi.entities.FragmentEntity;
- Long getId();
-
- Long getAnchorId();
-
- String getXpath();
-
- Long getParentId();
-
- String getAttributes();
+public interface FragmentPrefetchRepository {
+ Collection<FragmentEntity> prefetchDescendantsOfFragmentEntities(
+ final FetchDescendantsOption fetchDescendantsOption,
+ final Collection<FragmentEntity> proxiedFragmentEntities);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java
new file mode 100644
index 0000000000..4f056c8f6e
--- /dev/null
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java
@@ -0,0 +1,127 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.repository;
+
+import java.sql.Connection;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import lombok.RequiredArgsConstructor;
+import org.onap.cps.spi.FetchDescendantsOption;
+import org.onap.cps.spi.entities.AnchorEntity;
+import org.onap.cps.spi.entities.FragmentEntity;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.PreparedStatementSetter;
+import org.springframework.jdbc.core.RowMapper;
+import org.springframework.stereotype.Repository;
+
+@Repository
+@RequiredArgsConstructor
+public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepository {
+
+ private final JdbcTemplate jdbcTemplate;
+
+ @Override
+ public Collection<FragmentEntity> prefetchDescendantsOfFragmentEntities(
+ final FetchDescendantsOption fetchDescendantsOption,
+ final Collection<FragmentEntity> proxiedFragmentEntities) {
+
+ if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) {
+ return proxiedFragmentEntities;
+ }
+
+ final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream()
+ .map(FragmentEntity::getId).collect(Collectors.toList());
+
+ final Map<Long, AnchorEntity> anchorEntityPerId = proxiedFragmentEntities.stream()
+ .map(FragmentEntity::getAnchor)
+ .collect(Collectors.toMap(AnchorEntity::getId, anchor -> anchor, (anchor1, anchor2) -> anchor1));
+
+ final int maxDepth = fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ ? Integer.MAX_VALUE
+ : fetchDescendantsOption.getDepth();
+ return findFragmentEntitiesWithDescendantsByIds(fragmentEntityIds, anchorEntityPerId, maxDepth);
+ }
+
+ private Collection<FragmentEntity> findFragmentEntitiesWithDescendantsByIds(
+ final Collection<Long> fragmentEntityIds,
+ final Map<Long, AnchorEntity> anchorEntityPerId,
+ final int maxDepth) {
+ final String sql
+ = "WITH RECURSIVE parent_search AS ("
+ + " SELECT id, 0 AS depth "
+ + " FROM fragment "
+ + " WHERE id = ANY (?) "
+ + " UNION "
+ + " SELECT child.id, depth + 1 "
+ + " FROM fragment child INNER JOIN parent_search parent ON child.parent_id = parent.id"
+ + " WHERE depth < ?"
+ + ") "
+ + "SELECT fragment.id, anchor_id AS anchorId, xpath, parent_id AS parentId, "
+ + " CAST(attributes AS TEXT) AS attributes "
+ + "FROM fragment INNER JOIN parent_search ON fragment.id = parent_search.id";
+
+ final PreparedStatementSetter preparedStatementSetter = preparedStatement -> {
+ final Connection connection = preparedStatement.getConnection();
+ final java.sql.Array idArray = connection.createArrayOf("bigint", fragmentEntityIds.toArray());
+ preparedStatement.setArray(1, idArray);
+ preparedStatement.setInt(2, maxDepth);
+ };
+
+ final RowMapper<FragmentEntity> fragmentEntityRowMapper = (resultSet, rowNum) -> {
+ final FragmentEntity fragmentEntity = new FragmentEntity();
+ fragmentEntity.setId(resultSet.getLong("id"));
+ fragmentEntity.setXpath(resultSet.getString("xpath"));
+ fragmentEntity.setParentId(resultSet.getLong("parentId"));
+ fragmentEntity.setAttributes(resultSet.getString("attributes"));
+ fragmentEntity.setAnchor(anchorEntityPerId.get(resultSet.getLong("anchorId")));
+ fragmentEntity.setChildFragments(new HashSet<>());
+ return fragmentEntity;
+ };
+
+ final Map<Long, FragmentEntity> fragmentEntityPerId;
+ try (final Stream<FragmentEntity> fragmentEntityStream = jdbcTemplate.queryForStream(sql,
+ preparedStatementSetter, fragmentEntityRowMapper)) {
+ fragmentEntityPerId = fragmentEntityStream.collect(
+ Collectors.toMap(FragmentEntity::getId, Function.identity()));
+ }
+ return reuniteChildrenWithTheirParents(fragmentEntityPerId);
+ }
+
+ private static Collection<FragmentEntity> reuniteChildrenWithTheirParents(
+ final Map<Long, FragmentEntity> fragmentEntityPerId) {
+ final Collection<FragmentEntity> fragmentEntitiesWithoutParent = new HashSet<>();
+ for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) {
+ final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId());
+ if (parentFragmentEntity == null) {
+ fragmentEntitiesWithoutParent.add(fragmentEntity);
+ } else {
+ parentFragmentEntity.getChildFragments().add(fragmentEntity);
+ }
+ }
+ return fragmentEntitiesWithoutParent;
+ }
+
+}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
index 82c422f6fd..03de95eb8d 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
@@ -29,7 +29,6 @@ import java.util.Optional;
import org.onap.cps.spi.entities.AnchorEntity;
import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.FragmentEntity;
-import org.onap.cps.spi.entities.FragmentExtract;
import org.onap.cps.spi.exceptions.DataNodeNotFoundException;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
@@ -38,7 +37,8 @@ import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@Repository
-public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery {
+public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery,
+ FragmentPrefetchRepository {
Optional<FragmentEntity> findByAnchorAndXpath(AnchorEntity anchorEntity, String xpath);
@@ -47,7 +47,10 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath));
}
- List<FragmentEntity> findByAnchorIdAndXpathIn(long anchorId, String[] xpaths);
+ @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)",
+ nativeQuery = true)
+ List<FragmentEntity> findByAnchorIdAndXpathIn(@Param("anchorId") long anchorId,
+ @Param("xpaths") String[] xpaths);
default List<FragmentEntity> findByAnchorAndXpathIn(final AnchorEntity anchorEntity,
final Collection<String> xpaths) {
@@ -66,8 +69,8 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
boolean existsByAnchorId(long anchorId);
- @Query("SELECT f FROM FragmentEntity f WHERE anchor = :anchor")
- List<FragmentExtract> findAllExtractsByAnchor(@Param("anchor") AnchorEntity anchorEntity);
+ @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId LIMIT 1", nativeQuery = true)
+ Optional<FragmentEntity> findOneByAnchorId(@Param("anchorId") long anchorId);
@Modifying
@Query(value = "DELETE FROM fragment WHERE anchor_id = ANY (:anchorIds)", nativeQuery = true)
@@ -111,48 +114,4 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
@Query("SELECT xpath FROM FragmentEntity WHERE anchor = :anchor AND parentId IS NULL")
List<String> findAllXpathByAnchorAndParentIdIsNull(@Param("anchor") AnchorEntity anchorEntity);
- @Query(value
- = "WITH RECURSIVE parent_search AS ("
- + " SELECT id, 0 AS depth "
- + " FROM fragment "
- + " WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths) "
- + " UNION "
- + " SELECT c.id, depth + 1 "
- + " FROM fragment c INNER JOIN parent_search p ON c.parent_id = p.id"
- + " WHERE depth < (SELECT CASE WHEN :maxDepth = -1 THEN " + Integer.MAX_VALUE + " ELSE :maxDepth END) "
- + ") "
- + "SELECT f.id, anchor_id AS anchorId, xpath, f.parent_id AS parentId, CAST(attributes AS TEXT) AS attributes "
- + "FROM fragment f INNER JOIN parent_search p ON f.id = p.id",
- nativeQuery = true
- )
- List<FragmentExtract> findExtractsWithDescendants(@Param("anchorId") long anchorId,
- @Param("xpaths") String[] xpaths,
- @Param("maxDepth") int maxDepth);
-
- default List<FragmentExtract> findExtractsWithDescendants(final long anchorId, final Collection<String> xpaths,
- final int maxDepth) {
- return findExtractsWithDescendants(anchorId, xpaths.toArray(new String[0]), maxDepth);
- }
-
- @Query(value
- = "WITH RECURSIVE parent_search AS ("
- + " SELECT id, 0 AS depth "
- + " FROM fragment "
- + " WHERE id = ANY (:ids) "
- + " UNION "
- + " SELECT c.id, depth + 1 "
- + " FROM fragment c INNER JOIN parent_search p ON c.parent_id = p.id"
- + " WHERE depth < (SELECT CASE WHEN :maxDepth = -1 THEN " + Integer.MAX_VALUE + " ELSE :maxDepth END) "
- + ") "
- + "SELECT f.id, anchor_id AS anchorId, xpath, f.parent_id AS parentId, CAST(attributes AS TEXT) AS attributes "
- + "FROM fragment f INNER JOIN parent_search p ON f.id = p.id",
- nativeQuery = true
- )
- List<FragmentExtract> findExtractsWithDescendantsByIds(@Param("ids") long[] ids,
- @Param("maxDepth") int maxDepth);
-
- default List<FragmentExtract> findExtractsWithDescendantsByIds(final Collection<Long> ids, final int maxDepth) {
- return findExtractsWithDescendantsByIds(ids.stream().mapToLong(id -> id).toArray(), maxDepth);
- }
-
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index e8921b3ed0..cb554faee8 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -26,7 +26,7 @@ import org.onap.cps.spi.FetchDescendantsOption
import org.onap.cps.spi.entities.AnchorEntity
import org.onap.cps.spi.entities.DataspaceEntity
import org.onap.cps.spi.entities.FragmentEntity
-import org.onap.cps.spi.entities.FragmentExtract
+
import org.onap.cps.spi.exceptions.ConcurrencyException
import org.onap.cps.spi.exceptions.DataValidationException
import org.onap.cps.spi.model.DataNode
@@ -55,6 +55,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
def setup() {
mockAnchorRepository.getByDataspaceAndName(_, _) >> anchorEntity
+ mockFragmentRepository.prefetchDescendantsOfFragmentEntities(_, _) >> { fetchDescendantsOption, fragmentEntities -> fragmentEntities }
}
def 'Storing data nodes individually when batch operation fails'(){
@@ -93,20 +94,20 @@ class CpsDataPersistenceServiceSpec extends Specification {
def 'Batch update data node leaves and descendants: #scenario'(){
given: 'the fragment repository returns fragment entities related to the xpath inputs'
- mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
- mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
- mockFragmentExtract(1, null, 123, '/test/xpath', "{\"id\":\"testId1\"}")
+ mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> []
+ mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [
+ new FragmentEntity(1, '/test/xpath', null, "{\"id\":\"testId\"}", anchorEntity, [] as Set)
]
- mockFragmentRepository.findExtractsWithDescendants(123, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
- mockFragmentExtract(1, null, 123, '/test/xpath1', "{\"id\":\"testId1\"}"),
- mockFragmentExtract(2, null, 123, '/test/xpath2', "{\"id\":\"testId1\"}")
+ mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [
+ new FragmentEntity(1, '/test/xpath1', null, "{\"id\":\"testId1\"}", anchorEntity, [] as Set),
+ new FragmentEntity(2, '/test/xpath2', null, "{\"id\":\"testId2\"}", anchorEntity, [] as Set)
]
when: 'replace data node tree'
objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName',
dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)))
then: 'call fragment repository save all method'
1 * mockFragmentRepository.saveAll({fragmentEntities ->
- assert fragmentEntities as List == expectedFragmentEntities
+ assert fragmentEntities.sort() == expectedFragmentEntities.sort()
assert fragmentEntities.size() == expectedSize
})
where: 'the following Data Type is passed'
@@ -172,9 +173,9 @@ class CpsDataPersistenceServiceSpec extends Specification {
def 'Retrieving multiple data nodes.'() {
given: 'fragment repository returns a collection of fragments'
- mockFragmentRepository.findExtractsWithDescendants(123, ['/xpath1', '/xpath2'] as Set, _) >> [
- mockFragmentExtract(1, null, 123, '/xpath1', null),
- mockFragmentExtract(2, null, 123, '/xpath2', null)
+ mockFragmentRepository.findByAnchorAndXpathIn(anchorEntity, ['/xpath1', '/xpath2'] as Set) >> [
+ new FragmentEntity(1, '/xpath1', null, null, anchorEntity, [] as Set),
+ new FragmentEntity(2, '/xpath2', null, null, anchorEntity, [] as Set)
]
when: 'getting data nodes for 2 xpaths'
def result = objectUnderTest.getDataNodesForMultipleXpaths('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
@@ -207,9 +208,9 @@ class CpsDataPersistenceServiceSpec extends Specification {
def 'Replace data node and descendants: #scenario'(){
given: 'the fragment repository returns fragment entities related to the xpath inputs'
- mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
- mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
- mockFragmentExtract(1, null, 123, '/test/xpath', null)
+ mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> []
+ mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [
+ new FragmentEntity(1, '/test/xpath', null, '{"id":"testId"}', anchorEntity, [] as Set)
]
when: 'replace data node tree'
objectUnderTest.updateDataNodesAndDescendants('dataspaceName', 'anchorName', dataNodes)
@@ -223,9 +224,9 @@ class CpsDataPersistenceServiceSpec extends Specification {
def 'Replace data nodes and descendants'() {
given: 'the fragment repository returns fragment entities related to the xpath inputs'
- mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
- mockFragmentExtract(1, null, 123, '/test/xpath1', null),
- mockFragmentExtract(2, null, 123, '/test/xpath2', null)
+ mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [
+ new FragmentEntity(1, '/test/xpath1', null, null, anchorEntity, [] as Set),
+ new FragmentEntity(2, '/test/xpath2', null, null, anchorEntity, [] as Set)
]
and: 'some data nodes with descendants'
def dataNode1 = new DataNode(xpath: '/test/xpath1', leaves: ['id': 'testId1'], childDataNodes: [new DataNode(xpath: '/test/xpath1/child', leaves: ['id': 'childTestId1'])])
@@ -253,38 +254,27 @@ class CpsDataPersistenceServiceSpec extends Specification {
def createDataNodesAndMockRepositoryMethodSupportingThem(Map<String, String> xpathToScenarioMap) {
def dataNodes = []
- def fragmentExtracts = []
+ def fragmentEntities = []
def fragmentId = 1
xpathToScenarioMap.each {
def xpath = it.key
def scenario = it.value
def dataNode = new DataNodeBuilder().withXpath(xpath).build()
dataNodes.add(dataNode)
- def fragmentExtract = mockFragmentExtract(fragmentId, null, 123, xpath, null)
- fragmentExtracts.add(fragmentExtract)
def fragmentEntity = new FragmentEntity(id: fragmentId, anchor: anchorEntity, xpath: xpath, childFragments: [])
+ fragmentEntities.add(fragmentEntity)
if ('EXCEPTION' == scenario) {
mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") }
}
fragmentId++
}
- mockFragmentRepository.findExtractsWithDescendants(_, xpathToScenarioMap.keySet(), _) >> fragmentExtracts
+ mockFragmentRepository.findByAnchorAndXpathIn(_, xpathToScenarioMap.keySet()) >> fragmentEntities
return dataNodes
}
def mockFragmentWithJson(json) {
- def fragmentExtract = mockFragmentExtract(456, null, 123, '/parent-01', json)
- mockFragmentRepository.findExtractsWithDescendants(123, ['/parent-01'] as Set, _) >> [fragmentExtract]
- }
-
- def mockFragmentExtract(id, parentId, anchorId, xpath, attributes) {
- def fragmentExtract = Mock(FragmentExtract)
- fragmentExtract.getId() >> id
- fragmentExtract.getParentId() >> parentId
- fragmentExtract.getAnchorId() >> anchorId
- fragmentExtract.getXpath() >> xpath
- fragmentExtract.getAttributes() >> attributes
- return fragmentExtract
+ def fragmentEntity = new FragmentEntity(456, '/parent-01', null, json, anchorEntity, [] as Set)
+ mockFragmentRepository.findByAnchorAndXpathIn(_, ['/parent-01'] as Set) >> [fragmentEntity]
}
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
index 65d63dfe3b..2e4dba2e9b 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
@@ -20,81 +20,126 @@
*/
package org.onap.cps.spi.impl
-
import org.hibernate.exception.ConstraintViolationException
+import org.onap.cps.spi.CpsAdminPersistenceService
import org.onap.cps.spi.CpsModulePersistenceService
import org.onap.cps.spi.entities.DataspaceEntity
+import org.onap.cps.spi.entities.SchemaSetEntity
import org.onap.cps.spi.exceptions.DuplicatedYangResourceException
import org.onap.cps.spi.model.ModuleReference
import org.onap.cps.spi.repository.DataspaceRepository
+import org.onap.cps.spi.repository.ModuleReferenceRepository
+import org.onap.cps.spi.repository.SchemaSetRepository
import org.onap.cps.spi.repository.YangResourceRepository
import org.spockframework.spring.SpringBean
import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.boot.test.context.SpringBootTest
import org.springframework.dao.DataIntegrityViolationException
-import spock.lang.Shared
+import org.springframework.retry.annotation.EnableRetry
+import spock.lang.Specification
import java.sql.SQLException
-class CpsModulePersistenceServiceConcurrencySpec extends CpsPersistenceSpecBase {
+@SpringBootTest(classes=[CpsModulePersistenceServiceImpl])
+@EnableRetry
+class CpsModulePersistenceServiceConcurrencySpec extends Specification {
@Autowired
CpsModulePersistenceService objectUnderTest
@SpringBean
- YangResourceRepository yangResourceRepositoryMock = Mock()
+ DataspaceRepository dataspaceRepository = Mock()
+
+ @SpringBean
+ YangResourceRepository yangResourceRepository = Mock()
+
+ @SpringBean
+ SchemaSetRepository schemaSetRepository = Mock()
@SpringBean
- DataspaceRepository dataspaceRepositoryMock = Mock()
+ CpsAdminPersistenceService cpsAdminPersistenceService = Mock()
- static final String DATASPACE_NAME = 'DATASPACE-001'
- static final String SCHEMA_SET_NAME_NEW = 'SCHEMA-SET-NEW'
- static final String NEW_RESOURCE_NAME = 'some new resource'
- static final String NEW_RESOURCE_CONTENT = 'module stores {\n' +
+ @SpringBean
+ ModuleReferenceRepository moduleReferenceRepository = Mock()
+
+ def NEW_RESOURCE_NAME = 'some new resource'
+ def NEW_RESOURCE_CONTENT = 'module stores {\n' +
' yang-version 1.1;\n' +
' namespace "org:onap:ccsdk:sample";\n' +
'}'
def newYangResourcesNameToContentMap = [(NEW_RESOURCE_NAME):NEW_RESOURCE_CONTENT]
- @Shared
- yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539'
+ def yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539'
- @Shared
- yangResourceChecksumDbConstraint = 'yang_resource_checksum_key'
+ def yangResourceChecksumDbConstraint = 'yang_resource_checksum_key'
- @Shared
- sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum)
+ def sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum)
- @Shared
- checksumIntegrityException =
- new DataIntegrityViolationException("checksum integrity exception",
+ def checksumIntegrityException = new DataIntegrityViolationException("checksum integrity exception",
new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint))
- def 'Store new schema set, retry mechanism'() {
+ def 'Store new schema set, maximum retries.'() {
given: 'no pre-existing schemaset in database'
- dataspaceRepositoryMock.getByName(_) >> new DataspaceEntity()
- yangResourceRepositoryMock.findAllByChecksumIn(_) >> Collections.emptyList()
+ dataspaceRepository.getByName(_) >> new DataspaceEntity()
+ yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList()
when: 'a new schemaset is stored'
- objectUnderTest.storeSchemaSet(DATASPACE_NAME, SCHEMA_SET_NAME_NEW, newYangResourcesNameToContentMap)
- then: ' duplicated yang resource exception is thrown '
- def e = thrown(DuplicatedYangResourceException)
+ objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap)
+ then: 'a duplicated yang resource exception is thrown '
+ thrown(DuplicatedYangResourceException)
and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)'
- 5 * yangResourceRepositoryMock.saveAll(_) >> { throw checksumIntegrityException }
+ 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException }
+ }
+
+ def 'Store new schema set, succeed on third attempt.'() {
+ given: 'no pre-existing schemaset in database'
+ dataspaceRepository.getByName(_) >> new DataspaceEntity()
+ yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList()
+ when: 'a new schemaset is stored'
+ objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap)
+ then: 'no exception is thrown '
+ noExceptionThrown()
+ and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed'
+ 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException }
+ 1 * yangResourceRepository.saveAll(_) >> []
}
- def 'Store schema set using modules, retry mechanism'() {
+ def 'Store schema set using modules, maximum retries.'() {
given: 'map of new modules, a list of existing modules, module reference'
def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }']
def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12")
def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule]
and: 'no pre-existing schemaset in database'
- dataspaceRepositoryMock.getByName(_) >> new DataspaceEntity()
- yangResourceRepositoryMock.findAllByChecksumIn(_) >> Collections.emptyList()
+ dataspaceRepository.getByName(_) >> new DataspaceEntity()
+ yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList()
when: 'a new schemaset is stored from a module'
- objectUnderTest.storeSchemaSetFromModules(DATASPACE_NAME, "newSchemaSetName" , mapOfNewModules, listOfExistingModulesModuleReference)
- then: ' duplicated yang resource exception is thrown '
- def e = thrown(DuplicatedYangResourceException)
+ objectUnderTest.storeSchemaSetFromModules('some dataspace', 'some new schema set' , mapOfNewModules, listOfExistingModulesModuleReference)
+ then: 'a duplicated yang resource exception is thrown '
+ thrown(DuplicatedYangResourceException)
and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)'
- 5 * yangResourceRepositoryMock.saveAll(_) >> { throw checksumIntegrityException }
+ 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException }
+ }
+
+ def 'Store schema set using modules, succeed on third attempt.'() {
+ given: 'map of new modules, a list of existing modules, module reference'
+ def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }']
+ def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12")
+ def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule]
+ and: 'no pre-existing schemaset in database'
+ def dataspaceEntity = new DataspaceEntity()
+ dataspaceRepository.getByName(_) >> new DataspaceEntity()
+ yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList()
+ yangResourceRepository.getResourceIdsByModuleReferences(_) >> []
+ and: 'can retrieve schemaset details after storing it'
+ def schemaSetEntity = new SchemaSetEntity()
+ schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'new schema set') >> schemaSetEntity
+ when: 'a new schemaset is stored from a module'
+ objectUnderTest.storeSchemaSetFromModules('some dataspace', 'new schema set' , mapOfNewModules, listOfExistingModulesModuleReference)
+ then: 'no exception is thrown '
+ noExceptionThrown()
+ and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed'
+ 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException }
+ 1 * yangResourceRepository.saveAll(_) >> []
}
+
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
index 5e42ce04e7..52651c6b18 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
@@ -1,7 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (c) 2021 Bell Canada.
- * Modifications Copyright (C) 2022 Nordix Foundation
+ * Modifications Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -28,7 +28,6 @@ import org.onap.cps.spi.repository.ModuleReferenceRepository
import org.onap.cps.spi.repository.SchemaSetRepository
import org.onap.cps.spi.repository.YangResourceRepository
import org.springframework.dao.DataIntegrityViolationException
-import spock.lang.Shared
import spock.lang.Specification
import java.sql.SQLException
@@ -38,17 +37,14 @@ import java.sql.SQLException
*/
class CpsModulePersistenceServiceSpec extends Specification {
- // Instance to test
CpsModulePersistenceService objectUnderTest
- // Mocks
def dataspaceRepositoryMock = Mock(DataspaceRepository)
def yangResourceRepositoryMock = Mock(YangResourceRepository)
def schemaSetRepositoryMock = Mock(SchemaSetRepository)
def cpsAdminPersistenceServiceMock = Mock(CpsAdminPersistenceService)
def moduleReferenceRepositoryMock = Mock(ModuleReferenceRepository)
- // Constants
def yangResourceName = 'my-yang-resource-name'
def yangResourceContent = 'module stores {\n' +
' yang-version 1.1;\n' +
@@ -62,17 +58,14 @@ class CpsModulePersistenceServiceSpec extends Specification {
' }' +
'}'
- // Scenario data
static yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539'
static yangResourceChecksumDbConstraint = 'yang_resource_checksum_key'
static sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum)
- static checksumIntegrityException = new DataIntegrityViolationException(
- "checksum integrity exception",
+ static checksumIntegrityException = new DataIntegrityViolationException('checksum integrity exception',
new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint))
- static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException(
- "checksum integrity exception",
+ static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException('checksum integrity exception',
new ConstraintViolationException('', new SQLException('no checksum'), yangResourceChecksumDbConstraint))
- static anotherIntegrityException = new DataIntegrityViolationException("another integrity exception")
+ static otherIntegrityException = new DataIntegrityViolationException('another integrity exception')
def setup() {
objectUnderTest = new CpsModulePersistenceServiceImpl(yangResourceRepositoryMock, schemaSetRepositoryMock,
@@ -94,7 +87,7 @@ class CpsModulePersistenceServiceSpec extends Specification {
scenario | dbException || expectedThrownException | expectedThrownExceptionMessage
'checksum data failure' | checksumIntegrityException || DuplicatedYangResourceException | yangResourceChecksum
'checksum failure without checksum' | checksumIntegrityExceptionWithoutChecksum || DuplicatedYangResourceException | 'no checksum found'
- 'other data failure' | anotherIntegrityException || DataIntegrityViolationException | 'another integrity exception'
+ 'other data failure' | otherIntegrityException || DataIntegrityViolationException | 'another integrity exception'
}
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy
deleted file mode 100644
index 34a040e604..0000000000
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2021-2022 Nordix Foundation
- * Modifications Copyright (C) 2021 Pantheon.tech
- * Modifications Copyright (C) 2021 Bell Canada.
- * Modifications Copyright (C) 2023 TechMahindra Ltd.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.impl
-
-import com.fasterxml.jackson.databind.ObjectMapper
-import org.onap.cps.DatabaseTestContainer
-import org.onap.cps.spi.repository.AnchorRepository
-import org.onap.cps.spi.repository.DataspaceRepository
-import org.onap.cps.spi.repository.FragmentRepository
-import org.onap.cps.spi.repository.YangResourceRepository
-import org.onap.cps.utils.JsonObjectMapper
-import org.spockframework.spring.SpringBean
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.boot.test.context.SpringBootTest
-import org.testcontainers.spock.Testcontainers
-import spock.lang.Shared
-import spock.lang.Specification
-
-@SpringBootTest
-@Testcontainers
-class CpsPersistenceSpecBase extends Specification {
-
- @Shared
- DatabaseTestContainer databaseTestContainer = DatabaseTestContainer.getInstance()
-
- @Autowired
- DataspaceRepository dataspaceRepository
-
- @Autowired
- YangResourceRepository yangResourceRepository
-
- @Autowired
- AnchorRepository anchorRepository
-
- @Autowired
- FragmentRepository fragmentRepository
-
- @SpringBean
- JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
-
- protected static final String CLEAR_DATA = '/data/clear-all.sql'
-
- static def DATASPACE_NAME = 'DATASPACE-001'
- static def SCHEMA_SET_NAME1 = 'SCHEMA-SET-001'
- static def SCHEMA_SET_NAME2 = 'SCHEMA-SET-002'
- static def ANCHOR_NAME1 = 'ANCHOR-001'
- static def ANCHOR_NAME2 = 'ANCHOR-002'
- static def ANCHOR_NAME3 = 'ANCHOR-003'
- static def ANCHOR_FOR_DATA_NODES_WITH_LEAVES = 'ANCHOR-003'
- static def ANCHOR_FOR_SHOP_EXAMPLE = 'ANCHOR-004'
- static def ANCHOR_HAVING_SINGLE_TOP_LEVEL_FRAGMENT = 'ANCHOR-005'
- static def ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS = 'ANCHOR-006'
-}
diff --git a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java b/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java
deleted file mode 100755
index 61a5c042a6..0000000000
--- a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2020 Pantheon.tech
- * Modifications Copyright (C) 2022 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps;
-
-import org.testcontainers.containers.PostgreSQLContainer;
-import org.testcontainers.utility.DockerImageName;
-
-/**
- * The Postgresql database test container wrapper.
- * Singleton implementation allows saving time on database initialization which otherwise would occur on each test.
- * for debugging/developing purposes you can suspend any test and connect to this database:
- * docker exec -it {container-id} sh
- * psql -d test -U test
- */
-public class DatabaseTestContainer extends PostgreSQLContainer<DatabaseTestContainer> {
- private static final String IMAGE_VERSION = "registry.nordix.org/onaptest/postgres:14.1";
- private static DatabaseTestContainer databaseTestContainer;
-
- private DatabaseTestContainer() {
- super(DockerImageName.parse(IMAGE_VERSION).asCompatibleSubstituteFor("postgres"));
- }
-
- /**
- * Provides an instance of test container wrapper.
- * The returned value expected to be assigned to static variable annotated with @ClassRule.
- * This will allow to initialize DB connection env variables before DataSource object
- * is initialized by Spring framework.
- *
- */
- public static DatabaseTestContainer getInstance() {
- if (databaseTestContainer == null) {
- databaseTestContainer = new DatabaseTestContainer();
- Runtime.getRuntime().addShutdownHook(new Thread(databaseTestContainer::terminate));
- }
- return databaseTestContainer;
- }
-
- @Override
- public void start() {
- super.start();
- System.setProperty("DB_URL", databaseTestContainer.getJdbcUrl());
- System.setProperty("DB_USERNAME", databaseTestContainer.getUsername());
- System.setProperty("DB_PASSWORD", databaseTestContainer.getPassword());
- }
-
- @Override
- public void stop() {
- // do nothing on test completion, image removal will be performed via terminate() on JVM shutdown
- }
-
- private void terminate() {
- super.stop();
- }
-}
diff --git a/cps-ri/src/test/java/org/onap/cps/TestApplication.java b/cps-ri/src/test/java/org/onap/cps/TestApplication.java
deleted file mode 100644
index 075a241fc7..0000000000
--- a/cps-ri/src/test/java/org/onap/cps/TestApplication.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2020 Pantheon.tech
- * Modifications Copyright (C) 2021 Bell Canada.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps;
-
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.retry.annotation.EnableRetry;
-
-/**
- * The @SpringBootApplication annotated class is required in order to run tests
- * marked with @SpringBootTest annotation.
- */
-@SpringBootApplication(scanBasePackages = "org.onap.cps.spi")
-@EnableRetry
-public class TestApplication {
-}
diff --git a/cps-ri/src/test/resources/application.yml b/cps-ri/src/test/resources/application.yml
deleted file mode 100644
index 4f40aeaa06..0000000000
--- a/cps-ri/src/test/resources/application.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-# ============LICENSE_START=======================================================
-# Copyright (C) 2021 Pantheon.tech
-# Modifications Copyright (C) 2022 Nordix Foundation.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=========================================================
-
-spring:
- jpa:
- ddl-auto: create
- show-sql: false
- properties:
- hibernate:
- enable_lazy_load_no_trans: true
- dialect: org.hibernate.dialect.PostgreSQLDialect
- format_sql: true
- show_sql: false
-
- datasource:
- url: ${DB_URL}
- username: ${DB_USERNAME}
- password: ${DB_PASSWORD}
- driverClassName: org.postgresql.Driver
- initialization-mode: always
-
- liquibase:
- change-log: classpath:changelog/changelog-master.yaml
diff --git a/cps-ri/src/test/resources/data/anchor.sql b/cps-ri/src/test/resources/data/anchor.sql
deleted file mode 100644
index a15d5aed21..0000000000
--- a/cps-ri/src/test/resources/data/anchor.sql
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2020 Pantheon.tech
- Modifications Copyright (C) 2020-2023 Nordix Foundation.
- Modifications Copyright (C) 2021-2022 Bell Canada.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'DATASPACE-001'),
- (1002, 'DATASPACE-002-NO-DATA'),
- (1003, 'DATASPACE-003');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
- (2001, 'SCHEMA-SET-001', 1001),
- (2002, 'SCHEMA-SET-002', 1001),
- (2003, 'SCHEMA-SET-002-NO-ANCHORS', 1003);
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
- (3001, 'ANCHOR-001', 1001, 2001),
- (3002, 'ANCHOR-002', 1001, 2002),
- (3003, 'ANCHOR-003', 1001, 2002);
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (4001, 3001, null, '/xpath', '{}');
diff --git a/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql b/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql
deleted file mode 100644
index 65b3a48ca5..0000000000
--- a/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2021-2022 Nordix Foundation.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'dataspace-1'), (1002, 'dataspace-2');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
- (2001, 'schema-set-1', 1001),
- (2002, 'schema-set-2', 1001),
- (2003, 'schema-set-3', 1001),
- (2004, 'schema-set-4', 1002);
-
-INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES
- (3001, 'module1@revA.yang', 'some-content', 'checksum1','module-name-1','revA'),
- (3002, 'module2@revA.yang', 'some-content', 'checksum2','module-name-2','revA'),
- (3003, 'module2@revB.yang', 'some-content', 'checksum3','module-name-2','revB'),
- (3004, 'module3@revA.yang', 'some-content', 'checksum4','module-name-3','revA');
-
-INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES
- (2001, 3001), --schema-set-1(anchor-1) has modules module1@revA, module2@revA
- (2001, 3002),
- (2002, 3001), --schema-set-2(anchor-2) has modules module1@revA, module2@revB
- (2002, 3003),
- (2003, 3002), --schema-set-3(anchor-3) has modules module2@revA, module2@revB
- (2003, 3003),
- (2004, 3001); --schema-set-4(anchor-4) has module module1@revA but in other dataspace
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
- (6001, 'anchor-1', 1001, 2001),
- (6002, 'anchor-2', 1001, 2002),
- (6003, 'anchor-3', 1001, 2003),
- (6005, 'anchor-4', 1002, 2004);
diff --git a/cps-ri/src/test/resources/data/clear-all.sql b/cps-ri/src/test/resources/data/clear-all.sql
deleted file mode 100644
index 07c8a7aab5..0000000000
--- a/cps-ri/src/test/resources/data/clear-all.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2020-2021 Pantheon.tech
- Modifications Copyright (C) 2020,2022 Nordix Foundation.
- Modifications Copyright (C) 2020 Bell Canada.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-DELETE FROM FRAGMENT;
-DELETE FROM ANCHOR;
-DELETE FROM DATASPACE;
-DELETE FROM YANG_RESOURCE
--- following tables are cleared by CASCADE constraint: SCHEMA_SET, SCHEMA_SET_YANG_RESOURCES
-
diff --git a/cps-ri/src/test/resources/data/fragment.sql b/cps-ri/src/test/resources/data/fragment.sql
deleted file mode 100755
index 4980073433..0000000000
--- a/cps-ri/src/test/resources/data/fragment.sql
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2021-2023 Nordix Foundation.
- Modifications Copyright (C) 2021 Pantheon.tech
- Modifications Copyright (C) 2021-2022 Bell Canada.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'DATASPACE-001'),
- (1002, 'NCMP-Admin');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
- (2001, 'SCHEMA-SET-001', 1001);
-
-INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES
- (4001, 'TEST','', 'SAMPLECHECKSUM','TESTMODULENAME', 'SAMPLEREVISION');
-
-UPDATE YANG_RESOURCE SET
-content = 'module stores {
- yang-version 1.1;
- namespace "org:onap:ccsdk:sample";
-
- prefix book-store;
-
- revision "2020-09-15" {
- description
- "Sample Model";
- }
- }
-'
-where ID = 4001;
-
-INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES
- (2001, 4001);
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
- (3001, 'ANCHOR-001', 1001, 2001),
- (3003, 'ANCHOR-003', 1001, 2001),
- (3004, 'ncmp-dmi-registry', 1002, 2001),
- (3005, 'ANCHOR-005', 1001, 2001),
- (3006, 'ANCHOR-006', 1001, 2001);
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES
- (4001, 3001, null, '/parent-1'),
- (4002, 3001, null, '/parent-2'),
- (4003, 3001, null, '/parent-3'),
- (4004, 3001, 4001, '/parent-1/child-1'),
- (4005, 3001, 4002, '/parent-2/child-2'),
- (4006, 3001, 4004, '/parent-1/child-1/grandchild-1');
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (5009, 3005, null, '/parent-207', '{"parent-leaf": "parent-leaf value"}'),
- (5010, 3005, 5009, '/parent-207/child-001', '{"first-child-leaf": "first-child-leaf value"}'),
- (5011, 3005, 5009, '/parent-207/child-002', '{"second-child-leaf": "second-child-leaf value"}'),
- (5012, 3005, 5011, '/parent-207/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}');
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (5013, 3006, null, '/parent-208', '{"parent-leaf-1": "parent-leaf value-1"}'),
- (5014, 3006, 5013, '/parent-208/child-001', '{"first-child-leaf": "first-child-leaf value"}'),
- (5015, 3006, 5013, '/parent-208/child-002', '{"second-child-leaf": "second-child-leaf value"}'),
- (5016, 3006, 5015, '/parent-208/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}'),
- (5017, 3006, null, '/parent-209', '{"parent-leaf-2": "parent-leaf value-2"}'),
- (5018, 3006, 5017, '/parent-209/child-001', '{"first-child-leaf": "first-child-leaf value"}'),
- (5019, 3006, 5017, '/parent-209/child-002', '{"second-child-leaf": "second-child-leaf value"}'),
- (5020, 3006, 5019, '/parent-209/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}');
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (4201, 3003, null, '/parent-200', '{"leaf-value": "original"}'),
- (4202, 3003, 4201, '/parent-200/child-201', '{"leaf-value": "original"}'),
- (4203, 3003, 4202, '/parent-200/child-201/grand-child', '{"leaf-value": "original"}'),
- (4206, 3003, null, '/parent-201', '{"leaf-value": "original"}'),
- (4207, 3003, 4206, '/parent-201/child-203', '{}'),
- (4208, 3003, 4206, '/parent-201/child-204[@key=''A'']', '{"key": "A"}'),
- (4209, 3003, 4206, '/parent-201/child-204[@key=''B'']', '{"key": "B"}'),
- (4211, 3003, null, '/parent-202', '{"leaf-value": "original"}'),
- (4212, 3003, 4211, '/parent-202/child-205[@key=''A'' and @key2=''B'']', '{"key": "A", "key2": "B"}'),
- (4213, 3003, 4211, '/parent-202/child-206[@key=''A'']', '{"key": "A"}'),
- (4214, 3003, null, '/parent-203', '{"leaf-value": "original"}'),
- (4215, 3003, 4214, '/parent-203/child-203', '{}'),
- (4216, 3003, 4214, '/parent-203/child-204[@key=''A'']', '{"key": "A"}'),
- (4217, 3003, 4214, '/parent-203/child-204[@key=''B'']', '{"key": "B"}'),
- (4218, 3003, 4217, '/parent-203/child-204[@key=''B'']/grand-child-204[@key2=''Y'']', '{"key": "B", "key2": "Y"}'),
- (4226, 3003, null, '/parent-206', '{"leaf-value": "original"}'),
- (4227, 3003, 4226, '/parent-206/child-206', '{}'),
- (4228, 3003, 4227, '/parent-206/child-206/grand-child-206', '{}'),
- (4229, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''A'']', '{"key": "A"}'),
- (4230, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''X'']', '{"key": "X"}'),
- (4231, 3003, null, '/parent-206[@key=''A'']', '{"key": "A"}'),
- (4232, 3003, 4231, '/parent-206[@key=''A'']/child-206', '{}'),
- (4233, 3003, null, '/parent-206[@key=''B'']', '{"key": "B"}');
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (5000, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo'']', '{"id": "PNFDemo", "dmi-service-name": "http://172.21.235.14:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
- (5001, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo2'']', '{"id": "PNFDemo2", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
- (5002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo3'']', '{"id": "PNFDemo3", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
- (5003, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo4'']', '{"id": "PNFDemo4", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
- (5004, 3004, 5000, '/dmi-registry/cm-handles[@id=''PNFDemo'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
- (5005, 3004, 5001, '/dmi-registry/cm-handles[@id=''PNFDemo2'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
- (5006, 3004, 5002, '/dmi-registry/cm-handles[@id=''PNFDemo3'']/public-properties[@name=''Contact'']', '{"name": "Contact3", "value": "PNF3@bookstore.com"}'),
- (5007, 3004, 5003, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
- (5008, 3004, 5004, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact2'']', '{"name": "Contact2", "value": "newemailforstore2@bookstore.com"}');
diff --git a/cps-ri/src/test/resources/data/perf-test.sql b/cps-ri/src/test/resources/data/perf-test.sql
deleted file mode 100644
index 48e8b1fbb5..0000000000
--- a/cps-ri/src/test/resources/data/perf-test.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2022-2023 Nordix Foundation.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES (9001, 'PERF-DATASPACE');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES (9002, 'PERF-SCHEMA-SET', 9001);
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES (9003, 'PERF-ANCHOR', 9001, 9002);
-
-INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES (0, 9003, null, '/perf-parent-1');
-
diff --git a/cps-ri/src/test/resources/data/schemaset.sql b/cps-ri/src/test/resources/data/schemaset.sql
deleted file mode 100644
index e5bf63b701..0000000000
--- a/cps-ri/src/test/resources/data/schemaset.sql
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2020-2021 Pantheon.tech
- Modifications Copyright (C) 2020-2023 Nordix Foundation.
- Modifications Copyright (C) 2020-2021 Bell Canada.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'DATASPACE-001'), (1002, 'DATASPACE-002');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
- (2001, 'SCHEMA-SET-001', 1001),
- (2002, 'SCHEMA-SET-002', 1001),
- (2100, 'SCHEMA-SET-100', 1001), -- for removal, not referenced by anchors
- (2101, 'SCHEMA-SET-101', 1001), -- for removal, having anchor and data associated
- (2003, 'SCHEMA-SET-003', 1002),
- (2004, 'SCHEMA-SET-004', 1002),
- (2005, 'SCHEMA-SET-005', 1001);
-
-INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES
- (3001, 'module1@2020-02-02.yang', 'CONTENT-001', 'e8bdda931099310de66532e08c3fafec391db29f55c81927b168f6aa8f81b73b',null,null),
- (3002, 'module2@2020-02-02.yang', 'CONTENT-002', '7e7d48afbe066ed0a890a09081859046d3dde52300dfcdb13be5b20780353a11','MODULE-NAME-002','REVISION-002'),
- (3003, 'module3@2020-02-02.yang', 'CONTENT-003', 'ca20c45fec8547633f05ff8905c48ffa7b02b94ec3ad4ed79922e6ba40779df3','MODULE-NAME-003','REVISION-002'),
- (3004, 'module4@2020-02-02.yang', 'CONTENT-004', 'f6ed09d343562e4d4ae5140f3c6a55df9c53f6da8e30dda8cbd9eaf9cd449be0','MODULE-NAME-004','REVISION-004'),
- (3100, 'orphan@2020-02-02.yang', 'ORPHAN', 'checksum',null,null), -- for auto-removal as orphan
- (3005, 'module5@2020-02-02.yang', 'CONTENT-005', 'checksum-005','MODULE-NAME-005','REVISION-002'),
- (3006, 'module6@2020-02-02.yang', 'CONTENT-006', 'checksum-006','MODULE-NAME-006','REVISION-006');
-
-INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES
- (2001, 3001), (2001, 3002),
- (2002, 3003), (2005, 3004),
- (2100, 3003), (2100, 3100), -- orphan removal case
- (2101, 3003), (2101, 3004),
- (2003, 3005), (2004, 3006);
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES -- anchors for removal
- (6001, 'ANCHOR1', 1001, 2101),
- (6002, 'ANCHOR2', 1001, 2101),
- (6003, 'ANCHOR3', 1001, 2005);
-
-INSERT INTO FRAGMENT (ID, XPATH, ANCHOR_ID) VALUES
- (7001, '/XPATH', 6001);
diff --git a/cps-ri/src/test/resources/hibernate.cfg.xml b/cps-ri/src/test/resources/hibernate.cfg.xml
deleted file mode 100644
index fae9275ddc..0000000000
--- a/cps-ri/src/test/resources/hibernate.cfg.xml
+++ /dev/null
@@ -1,16 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE hibernate-configuration PUBLIC
- "-//Hibernate/Hibernate Configuration DTD 3.0//EN"
- "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
-
-<hibernate-configuration>
- <session-factory>
- <property name="hibernate.connection.driver_class">org.postgresql.Driver</property>
- <property name="hibernate.connection.url">${DB_URL}</property>
- <property name="hibernate.connection.username">${DB_USERNAME}</property>
- <property name="hibernate.connection.password">${DB_PASSWORD}</property>
- <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
- <property name="show_sql">true</property>
- <property name="hibernate.hbm2ddl.auto">none</property>
- </session-factory>
-</hibernate-configuration> \ No newline at end of file
diff --git a/cps-service/pom.xml b/cps-service/pom.xml
index 45c4d303e4..158eebfd6b 100644
--- a/cps-service/pom.xml
+++ b/cps-service/pom.xml
@@ -29,7 +29,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/docs/api/swagger/ncmp/openapi.yaml b/docs/api/swagger/ncmp/openapi.yaml
index 5cc443b062..ea9786ef7f 100644
--- a/docs/api/swagger/ncmp/openapi.yaml
+++ b/docs/api/swagger/ncmp/openapi.yaml
@@ -625,7 +625,7 @@ paths:
dmi-response:
http-code: 400
body: Bad Request
- /v1/batch/data/ds/{datastore-name}:
+ /v1/data:
post:
tags:
- network-cm-proxy
@@ -635,51 +635,6 @@ paths:
to identify the relevant messages.
operationId: getResourceDataForCmHandleBatch
parameters:
- - name: datastore-name
- in: path
- description: The type of the requested data
- required: true
- schema:
- type: string
- example: ncmp-datastore:running
- - name: resourceIdentifier
- in: query
- description: The format of resource identifier depend on the associated DMI
- Plugin implementation. For ONAP DMI Plugin it will be RESTConf paths but
- it can really be anything.
- required: true
- allowReserved: true
- schema:
- type: string
- examples:
- sample 1:
- value:
- resourceIdentifier: \shops\bookstore
- sample 2:
- value:
- resourceIdentifier: "\\shops\\bookstore\\categories[@code=1]"
- sample 3:
- value:
- resourceIdentifier: "parent=shops,child=bookstore"
- - name: options
- in: query
- description: "options parameter in query, it is mandatory to wrap key(s)=value(s)\
- \ in parenthesis'()'. The format of options parameter depend on the associated\
- \ DMI Plugin implementation."
- required: false
- allowReserved: true
- schema:
- type: string
- examples:
- sample 1:
- value:
- options: (depth=3)
- sample 2:
- value:
- options: (fields=book)
- sample 3:
- value:
- options: "(depth=2,fields=book/authors)"
- name: topic
in: query
description: mandatory topic parameter in query.
@@ -691,18 +646,11 @@ paths:
sample 1:
value:
topic: my-topic-name
- - name: include-descendants
- in: query
- description: Determines if descendants are included in response
- required: false
- schema:
- type: boolean
- default: false
requestBody:
content:
application/json:
schema:
- type: object
+ $ref: '#/components/schemas/ResourceDataBatchRequest'
required: true
responses:
"200":
@@ -1485,6 +1433,43 @@ components:
example: Bad Gateway Error Message NCMP
dmi-response:
$ref: '#/components/schemas/DmiErrorMessage_dmiresponse'
+ ResourceDataBatchRequest:
+ title: get resource data for given array of operations
+ type: object
+ properties:
+ operations:
+ type: array
+ description: contains batch request details
+ items:
+ $ref: '#/components/schemas/BatchOperationDefinition'
+ BatchOperationDefinition:
+ required:
+ - datastore
+ - operation
+ - operationId
+ properties:
+ operation:
+ type: string
+ example: read
+ operationId:
+ type: string
+ example: "12"
+ datastore:
+ type: string
+ example: ncmp-datastore:passthrough-operational
+ options:
+ type: string
+ example: (fields=schemas/schema)
+ resourceIdentifier:
+ type: string
+ example: parent/child
+ targetIds:
+ type: array
+ example:
+ - da310eecdb8d44c2acc0ddaae01174b1
+ - c748c58f8e0b438f9fd1f28370b17d47
+ items:
+ type: string
RestModuleReference:
title: Module reference details
type: object
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index 0b2d2a4612..60050c1a3c 100755
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -16,6 +16,33 @@ CPS Release Notes
.. * * * MONTREAL * * *
.. ========================
+Version: 3.3.3
+==============
+
+Release Data
+------------
+
++--------------------------------------+--------------------------------------------------------+
+| **CPS Project** | |
+| | |
++--------------------------------------+--------------------------------------------------------+
+| **Docker images** | onap/cps-and-ncmp:3.3.3 |
+| | |
++--------------------------------------+--------------------------------------------------------+
+| **Release designation** | 3.3.3 Montreal |
+| | |
++--------------------------------------+--------------------------------------------------------+
+| **Release date** | Not yet released |
+| | |
++--------------------------------------+--------------------------------------------------------+
+
+Bug Fixes
+---------
+3.3.3
+
+Features
+--------
+
Version: 3.3.2
==============
@@ -32,18 +59,22 @@ Release Data
| **Release designation** | 3.3.2 Montreal |
| | |
+--------------------------------------+--------------------------------------------------------+
-| **Release date** | Not yet released |
+| **Release date** | 2023 June 15 |
| | |
+--------------------------------------+--------------------------------------------------------+
Bug Fixes
---------
3.3.2
- - None
+ - `CPS-1716 <https://jira.onap.org/browse/CPS-1716>`_ NCMP: Java Heap OutOfMemory errors and slow registration in case of 20k cmhandles
Features
--------
- `CPS-1006 <https://jira.onap.org/browse/CPS-1006>`_ Extend CPS PATCH API to allow update of leaves for multiple data nodes
+ - `CPS-1273 <https://jira.onap.org/browse/CPS-1273>`_ Add <,> operators support to cps-path
+ - `CPS-1664 <https://jira.onap.org/browse/CPS-1664>`_ Use recursive SQL to fetch descendants in CpsPath queries to improve query performance
+ - `CPS-1676 <https://jira.onap.org/browse/CPS-1676>`_ Entity ID types do not match types in database definition
+ - `CPS-1677 <https://jira.onap.org/browse/CPS-1677>`_ Remove dataspace_id column from Fragment table
Version: 3.3.1
==============
diff --git a/integration-test/pom.xml b/integration-test/pom.xml
index fa403a9789..5dd64cbb2f 100644
--- a/integration-test/pom.xml
+++ b/integration-test/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy
index b942a43af2..a1e03529c3 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy
@@ -25,10 +25,12 @@ import org.onap.cps.api.impl.CpsAdminServiceImpl
import org.onap.cps.api.impl.CpsDataServiceImpl
import org.onap.cps.api.impl.CpsModuleServiceImpl
import org.onap.cps.integration.DatabaseTestContainer
+import org.onap.cps.spi.config.CpsSessionFactory
import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.model.DataNode
import org.onap.cps.spi.repository.DataspaceRepository
import org.onap.cps.spi.impl.utils.CpsValidatorImpl
+import org.onap.cps.spi.utils.SessionManager
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.autoconfigure.EnableAutoConfiguration
import org.springframework.boot.autoconfigure.domain.EntityScan
@@ -42,12 +44,12 @@ import spock.lang.Specification
import java.time.OffsetDateTime
-@SpringBootTest(classes = [TestConfig, CpsAdminServiceImpl, CpsValidatorImpl])
+@SpringBootTest(classes = [TestConfig, CpsAdminServiceImpl, CpsValidatorImpl, SessionManager, CpsSessionFactory])
@Testcontainers
@EnableAutoConfiguration
@EnableJpaRepositories(basePackageClasses = [DataspaceRepository])
-@ComponentScan(basePackages = ["org.onap.cps.api", "org.onap.cps.spi.repository"])
-@EntityScan("org.onap.cps.spi.entities")
+@ComponentScan(basePackages = ['org.onap.cps.api', 'org.onap.cps.spi.repository'])
+@EntityScan('org.onap.cps.spi.entities')
class CpsIntegrationSpecBase extends Specification {
@Shared
@@ -69,6 +71,10 @@ class CpsIntegrationSpecBase extends Specification {
@Lazy
CpsQueryService cpsQueryService
+ @Autowired
+ @Lazy
+ SessionManager sessionManager
+
def static GENERAL_TEST_DATASPACE = 'generalTestDataspace'
def static BOOKSTORE_SCHEMA_SET = 'bookstoreSchemaSet'
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/TestConfig.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/TestConfig.groovy
index 18a2941615..e39e114405 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/base/TestConfig.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/TestConfig.groovy
@@ -34,6 +34,7 @@ import org.onap.cps.spi.repository.ModuleReferenceRepository
import org.onap.cps.spi.repository.SchemaSetRepository
import org.onap.cps.spi.repository.YangResourceRepository
import org.onap.cps.spi.utils.SessionManager
+import org.onap.cps.spi.utils.TimeLimiterProvider
import org.onap.cps.utils.JsonObjectMapper
import org.onap.cps.utils.TimedYangParser
import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder
@@ -75,7 +76,7 @@ class TestConfig extends Specification{
@Autowired
@Lazy
- SessionManager stubbedSessionManager
+ SessionManager sessionManager
@Bean
CpsAdminPersistenceServiceImpl cpsAdminPersistenceService() {
@@ -84,7 +85,7 @@ class TestConfig extends Specification{
@Bean
CpsDataPersistenceService cpsDataPersistenceService() {
- return (CpsDataPersistenceService) new CpsDataPersistenceServiceImpl(dataspaceRepository, anchorRepository, fragmentRepository, jsonObjectMapper, stubbedSessionManager)
+ return (CpsDataPersistenceService) new CpsDataPersistenceServiceImpl(dataspaceRepository, anchorRepository, fragmentRepository, jsonObjectMapper, sessionManager)
}
@Bean
@@ -103,11 +104,6 @@ class TestConfig extends Specification{
}
@Bean
- SessionManager sessionManager() {
- return Stub(SessionManager)
- }
-
- @Bean
TimedYangParser timedYangParser() {
return new TimedYangParser()
}
@@ -117,4 +113,9 @@ class TestConfig extends Specification{
return new TimedYangTextSchemaSourceSetBuilder()
}
+ @Bean
+ TimeLimiterProvider timeLimiterProvider() {
+ return new TimeLimiterProvider()
+ }
+
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy
index ceb9dd4cf3..e0a2602b23 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,28 +18,21 @@
* ============LICENSE_END=========================================================
*/
-package org.onap.cps.spi.utils
+package org.onap.cps.integration.functional
-import org.onap.cps.spi.config.CpsSessionFactory
+import org.onap.cps.integration.base.FunctionalSpecBase
import org.onap.cps.spi.exceptions.SessionManagerException
-import org.onap.cps.spi.impl.CpsPersistenceSpecBase
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
+import org.onap.cps.spi.utils.SessionManager
-class SessionManagerIntegrationSpec extends CpsPersistenceSpecBase{
+class SessionManagerIntegrationSpec extends FunctionalSpecBase {
- final static String SET_DATA = '/data/anchor.sql'
-
- @Autowired
SessionManager objectUnderTest
- @Autowired
- CpsSessionFactory cpsSessionFactory
-
- def sessionId
def shortTimeoutForTesting = 300L
+ def sessionId
- def setup(){
+ def setup() {
+ objectUnderTest = sessionManager
sessionId = objectUnderTest.startSession()
}
@@ -47,35 +40,32 @@ class SessionManagerIntegrationSpec extends CpsPersistenceSpecBase{
objectUnderTest.closeSession(sessionId, objectUnderTest.WITH_COMMIT)
}
- @Sql([CLEAR_DATA, SET_DATA])
def 'Lock anchor.'(){
when: 'session tries to acquire anchor lock by passing anchor entity details'
- objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting)
then: 'no exception is thrown'
noExceptionThrown()
}
- @Sql([CLEAR_DATA, SET_DATA])
def 'Attempt to lock anchor when another session is holding the lock.'(){
given: 'another session that holds an anchor lock'
def otherSessionId = objectUnderTest.startSession()
- objectUnderTest.lockAnchor(otherSessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(otherSessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting)
when: 'a session tries to acquire the same anchor lock'
- objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting)
then: 'a session manager exception is thrown specifying operation reached timeout'
def thrown = thrown(SessionManagerException)
thrown.message.contains('Timeout')
then: 'when the other session holding the lock is closed, lock can finally be acquired'
objectUnderTest.closeSession(otherSessionId, objectUnderTest.WITH_COMMIT)
- objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting)
}
- @Sql([CLEAR_DATA, SET_DATA])
def 'Lock anchor twice using the same session.'(){
given: 'session that already holds an anchor lock'
- objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting)
when: 'same session tries to acquire same anchor lock'
- objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting)
+ objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting)
then: 'no exception is thrown'
noExceptionThrown()
}
diff --git a/jacoco-report/pom.xml b/jacoco-report/pom.xml
index 375c7fcffb..6c8fdcf290 100644
--- a/jacoco-report/pom.xml
+++ b/jacoco-report/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/pom.xml b/pom.xml
index ae70b6cabd..9e4603d42c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -32,7 +32,7 @@
<groupId>org.onap.cps</groupId>
<artifactId>cps-aggregator</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<packaging>pom</packaging>
<name>cps</name>
diff --git a/releases/3.3.2-container.yaml b/releases/3.3.2-container.yaml
new file mode 100644
index 0000000000..a02cd39f60
--- /dev/null
+++ b/releases/3.3.2-container.yaml
@@ -0,0 +1,8 @@
+distribution_type: container
+container_release_tag: 3.3.2
+project: cps
+log_dir: cps-maven-docker-stage-master/921/
+ref: 5cec532ddc9079739d93ef10f1441f8c9fd75c22
+containers:
+ - name: 'cps-and-ncmp'
+ version: '3.3.2-20230615T111304Z'
diff --git a/releases/3.3.2.yaml b/releases/3.3.2.yaml
new file mode 100644
index 0000000000..b80be72223
--- /dev/null
+++ b/releases/3.3.2.yaml
@@ -0,0 +1,4 @@
+distribution_type: maven
+log_dir: cps-maven-stage-master/929/
+project: cps
+version: 3.3.2 \ No newline at end of file
diff --git a/spotbugs/pom.xml b/spotbugs/pom.xml
index 19d8b430cf..874aa04c68 100644
--- a/spotbugs/pom.xml
+++ b/spotbugs/pom.xml
@@ -25,7 +25,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.onap.cps</groupId>
<artifactId>spotbugs</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<properties>
<nexusproxy>https://nexus.onap.org</nexusproxy>
diff --git a/version.properties b/version.properties
index 20a0ce4b81..f6c96da8b9 100755
--- a/version.properties
+++ b/version.properties
@@ -22,7 +22,7 @@
major=3
minor=3
-patch=2
+patch=3
base_version=${major}.${minor}.${patch}