summaryrefslogtreecommitdiffstats
path: root/cps-ncmp-service/src
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ncmp-service/src')
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy46
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy35
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy23
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy21
4 files changed, 70 insertions, 55 deletions
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
index bb082fd7fd..fba1f953f4 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
@@ -23,7 +23,6 @@ package org.onap.cps.ncmp.api.impl.async
import io.cloudevents.core.builder.CloudEventBuilder
import org.onap.cps.events.EventsPublisher
import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig
-
import org.onap.cps.ncmp.api.kafka.ConsumerBaseSpec
import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent
import org.spockframework.spring.SpringBean
@@ -32,6 +31,7 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
+import spock.util.concurrent.PollingConditions
import java.util.concurrent.TimeUnit
@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig])
@@ -50,28 +50,34 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec {
def topic
def 'Legacy event consumer with cloud event.'() {
- given: 'a cloud event of type: #eventType'
+ given: 'a data operation cloud event type'
def cloudEvent = CloudEventBuilder.v1().withId('some id')
.withType('DataOperationEvent')
.withSource(URI.create('some-source'))
.build()
when: 'send the cloud event'
cloudEventKafkaTemplate.send(topic, cloudEvent)
- and: 'wait a little for async processing of message'
+ then: 'wait a little for async processing of message (must wait to try to avoid false positives)'
TimeUnit.MILLISECONDS.sleep(300)
- then: 'event is not consumed'
+ and: 'event is not consumed'
0 * mockEventsPublisher.publishEvent(*_)
}
def 'Legacy event consumer with valid legacy event.'() {
- given: 'a cloud event of type: #eventType'
+ given: 'a legacy event'
DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget')
+ and: 'a flag to track the publish event call'
+ def publishEventMethodCalled = false
+ and: 'the (mocked) events publisher will use the flag to indicate if it is called'
+ mockEventsPublisher.publishEvent(*_) >> {
+ publishEventMethodCalled = true
+ }
when: 'send the cloud event'
legacyEventKafkaTemplate.send(topic, legacyEvent)
- and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(300)
then: 'the event is consumed by the (legacy) AsynRestRequest consumer'
- 1 * mockEventsPublisher.publishEvent(*_)
+ new PollingConditions().within(1) {
+ assert publishEventMethodCalled == true
+ }
}
def 'Filtering Cloud Events on Type.'() {
@@ -80,17 +86,23 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec {
.withType(eventType)
.withSource(URI.create('some-source'))
.build()
+ and: 'a flag to track the publish event call'
+ def publishEventMethodCalled = false
+ and: 'the (mocked) events publisher will use the flag to indicate if it is called'
+ mockEventsPublisher.publishCloudEvent(*_) >> {
+ publishEventMethodCalled = true
+ }
when: 'send the cloud event'
cloudEventKafkaTemplate.send(topic, cloudEvent)
- and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has only been forwarded for the correct type'
- expectedNUmberOfCallsToPublishForwardedEvent * mockEventsPublisher.publishCloudEvent(*_)
+ new PollingConditions(initialDelay: 0.3).within(1) {
+ assert publishEventMethodCalled == expectCallToPublishEventMethod
+ }
where: 'the following event types are used'
- eventType || expectedNUmberOfCallsToPublishForwardedEvent
- 'DataOperationEvent' || 1
- 'other type' || 0
- 'any type contain the word "DataOperationEvent"' || 1
+ eventType || expectCallToPublishEventMethod
+ 'DataOperationEvent' || true
+ 'other type' || false
+ 'any type contain the word "DataOperationEvent"' || true
}
//TODO Toine, add positive test with data to prove event is converted correctly (using correct factory)
@@ -98,9 +110,9 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec {
def 'Non cloud events on same Topic.'() {
when: 'sending a non-cloud event on the same topic'
legacyEventKafkaTemplate.send(topic, 'simple string event')
- and: 'wait a little for async processing of message'
+ then: 'wait a little for async processing of message (must wait to try to avoid false positives)'
TimeUnit.MILLISECONDS.sleep(300)
- then: 'the event is not processed by this consumer'
+ and: 'the event is not processed by this consumer'
0 * mockEventsPublisher.publishCloudEvent(*_)
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy
index 212c6734e5..ee89333006 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy
@@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper
import io.cloudevents.core.builder.CloudEventBuilder
import org.onap.cps.events.EventsPublisher
import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig
-
import org.onap.cps.ncmp.api.kafka.ConsumerBaseSpec
import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent
import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent
@@ -38,8 +37,7 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
-
-import java.util.concurrent.TimeUnit
+import spock.util.concurrent.PollingConditions
@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig])
@DirtiesContext
@@ -59,33 +57,38 @@ class SerializationIntegrationSpec extends ConsumerBaseSpec {
@Value('${app.ncmp.async-m2m.topic}')
def topic
- def capturedForwardedEvent
-
def 'Forwarding DataOperation Event Data.'() {
given: 'a data operation cloud event'
def cloudEvent = createCloudEvent()
+ and: 'a flag to track the publish cloud event call'
+ def publishCloudEventMethodCalled = false
+ and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the cloud event'
+ mockEventsPublisher.publishCloudEvent('some client topic', 'some-correlation-id', cloudEvent) >> {
+ publishCloudEventMethodCalled = true
+ }
when: 'send the event'
cloudEventKafkaTemplate.send(topic, cloudEvent)
- and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has been forwarded'
- 1 * mockEventsPublisher.publishCloudEvent('some client topic', 'some-correlation-id', _) >> { args -> { capturedForwardedEvent = args[2] } }
- and: 'the forwarded event is identical to the event that was sent'
- assert capturedForwardedEvent == cloudEvent
+ new PollingConditions().within(1) {
+ assert publishCloudEventMethodCalled == true
+ }
}
def 'Forwarding AsyncRestRequestResponse Event Data.'() {
given: 'async request response legacy event'
def dmiAsyncRequestResponseEvent = new DmiAsyncRequestResponseEvent(eventId: 'my-event-id',eventTarget: 'some client topic')
+ and: 'a flag to track the publish event call'
+ def publishEventMethodCalled = false
+ and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the event'
+ mockEventsPublisher.publishEvent(*_) >> {
+ publishEventMethodCalled = true
+ }
when: 'send the event'
legacyEventKafkaTemplate.send(topic, dmiAsyncRequestResponseEvent)
- and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has been forwarded'
- 1 * mockEventsPublisher.publishEvent('some client topic', 'my-event-id', _) >> { args -> { capturedForwardedEvent = args[2] } }
- and: 'the captured id and target of the forwarded event is same as the one that was sent'
- assert capturedForwardedEvent.eventId == dmiAsyncRequestResponseEvent.eventId
- assert capturedForwardedEvent.eventTarget == dmiAsyncRequestResponseEvent.eventTarget
+ new PollingConditions().within(1) {
+ assert publishEventMethodCalled == true
+ }
}
def createCloudEvent() {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
index 501714a2be..3b1709d802 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START========================================================
- * Copyright (C) 2022-2023 Nordix Foundation
+ * Copyright (C) 2022-2024 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -28,6 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.context.ContextConfiguration
import spock.lang.Specification
+import spock.util.concurrent.PollingConditions
import java.util.concurrent.BlockingQueue
import java.util.concurrent.TimeUnit
@@ -121,8 +122,10 @@ class SynchronizationCacheConfigSpec extends Specification {
moduleSyncStartedOnCmHandles.put('testKeyModuleSync', 'toBeExpired' as Object, 1, TimeUnit.SECONDS)
then: 'the entry is present in the map'
assert moduleSyncStartedOnCmHandles.get('testKeyModuleSync') != null
- and: 'the entry expires in less then 2 seconds'
- waitMax2SecondsForKeyExpiration(moduleSyncStartedOnCmHandles, 'testKeyModuleSync')
+ and: 'the entry expires'
+ new PollingConditions().within(10) {
+ assert moduleSyncStartedOnCmHandles.get('testKeyModuleSync') == null
+ }
}
def 'Time to Live Verify for Data Sync Semaphore'() {
@@ -130,16 +133,10 @@ class SynchronizationCacheConfigSpec extends Specification {
dataSyncSemaphores.put('testKeyDataSync', Boolean.TRUE, 1, TimeUnit.SECONDS)
then: 'the entry is present in the map'
assert dataSyncSemaphores.get('testKeyDataSync') != null
- and: 'the entry expires in less then 2 seconds'
- waitMax2SecondsForKeyExpiration(dataSyncSemaphores, 'testKeyDataSync')
- }
-
- def waitMax2SecondsForKeyExpiration(map, key) {
- def count = 0
- while ( map.get(key)!=null && ++count <= 20 ) {
- sleep(100)
- }
- return count < 20 // Should have expired in less the 20 x 100ms = 2 seconds!
+ and: 'the entry expires'
+ new PollingConditions().within(10) {
+ assert dataSyncSemaphores.get('testKeyDataSync') == null
+ }
}
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
index 3f2148f801..2229b32b0c 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
@@ -21,8 +21,6 @@
package org.onap.cps.ncmp.api.impl.operations
-import org.onap.cps.events.EventsPublisher
-
import static org.onap.cps.ncmp.api.impl.events.mapper.CloudEventMapper.toTargetEvent
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
@@ -33,8 +31,8 @@ import static org.onap.cps.ncmp.api.NcmpResponseStatus.UNABLE_TO_READ_RESOURCE_D
import static org.onap.cps.ncmp.api.NcmpResponseStatus.DMI_SERVICE_NOT_RESPONDING
import com.fasterxml.jackson.databind.ObjectMapper
+import org.onap.cps.events.EventsPublisher
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration
-
import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException
import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder
import org.onap.cps.ncmp.api.impl.utils.context.CpsApplicationContext
@@ -49,6 +47,7 @@ import org.springframework.http.ResponseEntity
import org.springframework.test.context.ContextConfiguration
import org.springframework.http.HttpStatus
import spock.lang.Shared
+import spock.util.concurrent.PollingConditions
import java.util.concurrent.TimeoutException
@SpringBootTest
@@ -101,20 +100,24 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
def dataOperationBatchRequestJsonData = TestUtils.getResourceFileContent('dataOperationRequest.json')
def dataOperationRequest = spiedJsonObjectMapper.convertJsonString(dataOperationBatchRequestJsonData, DataOperationRequest.class)
dataOperationRequest.dataOperationDefinitions[0].cmHandleIds = [cmHandleId]
- def requestBodyAsJsonStringArg = null
and: 'a positive response from DMI service when it is called with valid request parameters'
def responseFromDmi = new ResponseEntity<Object>(HttpStatus.ACCEPTED)
def expectedDmiBatchResourceDataUrl = "ncmp/v1/data/topic=my-topic-name"
def expectedBatchRequestAsJson = '{"operations":[{"operation":"read","operationId":"operational-14","datastore":"ncmp-datastore:passthrough-operational","options":"some option","resourceIdentifier":"some resource identifier","cmHandles":[{"id":"some-cm-handle","cmHandleProperties":{"prop1":"val1"}}]}]}'
mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, _, READ.operationName) >> responseFromDmi
dmiServiceUrlBuilder.getDataOperationRequestUrl(_, _) >> expectedDmiBatchResourceDataUrl
+ and: ' a flag to track the post operation call'
+ def postOperationWithJsonDataMethodCalled = false
+ and: 'the (mocked) dmi rest client will use the flag to indicate it is called and capture the request body'
+ mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, expectedBatchRequestAsJson, READ) >> {
+ postOperationWithJsonDataMethodCalled = true
+ }
when: 'get resource data for group of cm handles are invoked'
objectUnderTest.requestResourceDataFromDmi('my-topic-name', dataOperationRequest, 'requestId')
- then: 'wait a little to allow execution of service method by task executor (on separate thread)'
- Thread.sleep(100)
- then: 'validate ncmp generated dmi request body json args'
- 1 * mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, _, READ) >> { args -> requestBodyAsJsonStringArg = args[1] }
- assert requestBodyAsJsonStringArg == expectedBatchRequestAsJson
+ then: 'validate the post operation was called and ncmp generated dmi request body json args'
+ new PollingConditions().within(1) {
+ assert postOperationWithJsonDataMethodCalled == true
+ }
}
def 'Execute (async) data operation from DMI service for #scenario.'() {