From cc1c83c740d3bde18654098013f2b76eb8fa0567 Mon Sep 17 00:00:00 2001 From: JvD_Ericsson Date: Thu, 1 Feb 2024 13:55:50 +0000 Subject: Replace sleep with PollingConditions Issue-ID: CPS-1950 Signed-off-by: JvD_Ericsson Change-Id: I0e23c697ef6b5a134295728f352f2dff7be05a62 --- .../NcmpDatastoreRequestHandlerSpec.groovy | 6 +-- .../rest/executor/CpsNcmpTaskExecutorSpec.groovy | 25 ++++++------ .../async/FilterStrategiesIntegrationSpec.groovy | 46 ++++++++++++++-------- .../impl/async/SerializationIntegrationSpec.groovy | 35 ++++++++-------- .../SynchronizationCacheConfigSpec.groovy | 23 +++++------ .../impl/operations/DmiDataOperationsSpec.groovy | 21 +++++----- 6 files changed, 86 insertions(+), 70 deletions(-) diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/handlers/NcmpDatastoreRequestHandlerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/handlers/NcmpDatastoreRequestHandlerSpec.groovy index f06af6c10a..4edbf3569f 100644 --- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/handlers/NcmpDatastoreRequestHandlerSpec.groovy +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/handlers/NcmpDatastoreRequestHandlerSpec.groovy @@ -44,7 +44,7 @@ class NcmpDatastoreRequestHandlerSpec extends Specification { def 'Attempt to execute async get request with #scenario.'() { given: 'notification feature is turned on/off' objectUnderTest.notificationFeatureEnabled = notificationFeatureEnabled - and: ' a flag to track the network service call' + and: 'a flag to track the network service call' def networkServiceMethodCalled = false and: 'the (mocked) service will use the flag to indicate if it is called' mockNetworkCmProxyDataService.getResourceDataForCmHandle('ds', 'ch1', 'resource1', 'options', _, _) >> { @@ -54,7 +54,7 @@ class NcmpDatastoreRequestHandlerSpec extends Specification { objectUnderTest.executeRequest('ds', 'ch1', 'resource1', 'options', topic, false) then: 'the task is executed in an async fashion or not' expectedCalls * spiedCpsNcmpTaskExecutor.executeTask(*_) - and: 'the service request is always invoked within 1 seconds' + and: 'the service request is invoked' new PollingConditions().within(1) { assert networkServiceMethodCalled == true } @@ -97,7 +97,7 @@ class NcmpDatastoreRequestHandlerSpec extends Specification { objectUnderTest.executeRequest('myTopic', dataOperationRequest) then: 'the task is executed in an async fashion' 1 * spiedCpsNcmpTaskExecutor.executeTask(*_) - and: 'the network service is invoked within 1 seconds' + and: 'the network service is invoked' new PollingConditions().within(1) { assert networkServiceMethodCalled == true } diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/executor/CpsNcmpTaskExecutorSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/executor/CpsNcmpTaskExecutorSpec.groovy index 55590168d9..5f62aca684 100644 --- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/executor/CpsNcmpTaskExecutorSpec.groovy +++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/executor/CpsNcmpTaskExecutorSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation + * Copyright (C) 2023-2024 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import org.junit.jupiter.api.AfterEach import org.junit.jupiter.api.BeforeEach import org.slf4j.LoggerFactory import spock.lang.Specification +import spock.util.concurrent.PollingConditions class CpsNcmpTaskExecutorSpec extends Specification { @@ -37,23 +38,23 @@ class CpsNcmpTaskExecutorSpec extends Specification { @BeforeEach void setup() { - ((Logger) LoggerFactory.getLogger(CpsNcmpTaskExecutor.class)).addAppender(logger); - logger.start(); + ((Logger) LoggerFactory.getLogger(CpsNcmpTaskExecutor.class)).addAppender(logger) + logger.start() } @AfterEach void teardown() { - ((Logger) LoggerFactory.getLogger(CpsNcmpTaskExecutor.class)).detachAndStopAllAppenders(); + ((Logger) LoggerFactory.getLogger(CpsNcmpTaskExecutor.class)).detachAndStopAllAppenders() } def 'Execute successful task.'() { when: 'task is executed' objectUnderTest.executeTask(taskSupplier(), enoughTime) - and: 'wait a little for async execution completion' - Thread.sleep(10) then: 'an event is logged with level INFO' - def loggingEvent = getLoggingEvent() - assert loggingEvent.level == Level.INFO + new PollingConditions().within(1) { + def loggingEvent = getLoggingEvent() + assert loggingEvent.level == Level.INFO + } and: 'the log indicates the task completed successfully' assert loggingEvent.formattedMessage == 'Async task completed successfully.' } @@ -61,11 +62,11 @@ class CpsNcmpTaskExecutorSpec extends Specification { def 'Execute failing task.'() { when: 'task is executed' objectUnderTest.executeTask(taskSupplierForFailingTask(), enoughTime) - and: 'wait a little for async execution completion' - Thread.sleep(10) then: 'an event is logged with level ERROR' - def loggingEvent = getLoggingEvent() - assert loggingEvent.level == Level.ERROR + new PollingConditions().within(1) { + def loggingEvent = getLoggingEvent() + assert loggingEvent.level == Level.ERROR + } and: 'the original error message is logged' assert loggingEvent.formattedMessage.contains('original exception message') } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy index bb082fd7fd..fba1f953f4 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy @@ -23,7 +23,6 @@ package org.onap.cps.ncmp.api.impl.async import io.cloudevents.core.builder.CloudEventBuilder import org.onap.cps.events.EventsPublisher import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig - import org.onap.cps.ncmp.api.kafka.ConsumerBaseSpec import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent import org.spockframework.spring.SpringBean @@ -32,6 +31,7 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration import org.springframework.boot.test.context.SpringBootTest import org.springframework.test.annotation.DirtiesContext import org.testcontainers.spock.Testcontainers +import spock.util.concurrent.PollingConditions import java.util.concurrent.TimeUnit @SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) @@ -50,28 +50,34 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { def topic def 'Legacy event consumer with cloud event.'() { - given: 'a cloud event of type: #eventType' + given: 'a data operation cloud event type' def cloudEvent = CloudEventBuilder.v1().withId('some id') .withType('DataOperationEvent') .withSource(URI.create('some-source')) .build() when: 'send the cloud event' cloudEventKafkaTemplate.send(topic, cloudEvent) - and: 'wait a little for async processing of message' + then: 'wait a little for async processing of message (must wait to try to avoid false positives)' TimeUnit.MILLISECONDS.sleep(300) - then: 'event is not consumed' + and: 'event is not consumed' 0 * mockEventsPublisher.publishEvent(*_) } def 'Legacy event consumer with valid legacy event.'() { - given: 'a cloud event of type: #eventType' + given: 'a legacy event' DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget') + and: 'a flag to track the publish event call' + def publishEventMethodCalled = false + and: 'the (mocked) events publisher will use the flag to indicate if it is called' + mockEventsPublisher.publishEvent(*_) >> { + publishEventMethodCalled = true + } when: 'send the cloud event' legacyEventKafkaTemplate.send(topic, legacyEvent) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) then: 'the event is consumed by the (legacy) AsynRestRequest consumer' - 1 * mockEventsPublisher.publishEvent(*_) + new PollingConditions().within(1) { + assert publishEventMethodCalled == true + } } def 'Filtering Cloud Events on Type.'() { @@ -80,17 +86,23 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { .withType(eventType) .withSource(URI.create('some-source')) .build() + and: 'a flag to track the publish event call' + def publishEventMethodCalled = false + and: 'the (mocked) events publisher will use the flag to indicate if it is called' + mockEventsPublisher.publishCloudEvent(*_) >> { + publishEventMethodCalled = true + } when: 'send the cloud event' cloudEventKafkaTemplate.send(topic, cloudEvent) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) then: 'the event has only been forwarded for the correct type' - expectedNUmberOfCallsToPublishForwardedEvent * mockEventsPublisher.publishCloudEvent(*_) + new PollingConditions(initialDelay: 0.3).within(1) { + assert publishEventMethodCalled == expectCallToPublishEventMethod + } where: 'the following event types are used' - eventType || expectedNUmberOfCallsToPublishForwardedEvent - 'DataOperationEvent' || 1 - 'other type' || 0 - 'any type contain the word "DataOperationEvent"' || 1 + eventType || expectCallToPublishEventMethod + 'DataOperationEvent' || true + 'other type' || false + 'any type contain the word "DataOperationEvent"' || true } //TODO Toine, add positive test with data to prove event is converted correctly (using correct factory) @@ -98,9 +110,9 @@ class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec { def 'Non cloud events on same Topic.'() { when: 'sending a non-cloud event on the same topic' legacyEventKafkaTemplate.send(topic, 'simple string event') - and: 'wait a little for async processing of message' + then: 'wait a little for async processing of message (must wait to try to avoid false positives)' TimeUnit.MILLISECONDS.sleep(300) - then: 'the event is not processed by this consumer' + and: 'the event is not processed by this consumer' 0 * mockEventsPublisher.publishCloudEvent(*_) } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy index 212c6734e5..ee89333006 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/SerializationIntegrationSpec.groovy @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper import io.cloudevents.core.builder.CloudEventBuilder import org.onap.cps.events.EventsPublisher import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig - import org.onap.cps.ncmp.api.kafka.ConsumerBaseSpec import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent @@ -38,8 +37,7 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration import org.springframework.boot.test.context.SpringBootTest import org.springframework.test.annotation.DirtiesContext import org.testcontainers.spock.Testcontainers - -import java.util.concurrent.TimeUnit +import spock.util.concurrent.PollingConditions @SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) @DirtiesContext @@ -59,33 +57,38 @@ class SerializationIntegrationSpec extends ConsumerBaseSpec { @Value('${app.ncmp.async-m2m.topic}') def topic - def capturedForwardedEvent - def 'Forwarding DataOperation Event Data.'() { given: 'a data operation cloud event' def cloudEvent = createCloudEvent() + and: 'a flag to track the publish cloud event call' + def publishCloudEventMethodCalled = false + and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the cloud event' + mockEventsPublisher.publishCloudEvent('some client topic', 'some-correlation-id', cloudEvent) >> { + publishCloudEventMethodCalled = true + } when: 'send the event' cloudEventKafkaTemplate.send(topic, cloudEvent) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) then: 'the event has been forwarded' - 1 * mockEventsPublisher.publishCloudEvent('some client topic', 'some-correlation-id', _) >> { args -> { capturedForwardedEvent = args[2] } } - and: 'the forwarded event is identical to the event that was sent' - assert capturedForwardedEvent == cloudEvent + new PollingConditions().within(1) { + assert publishCloudEventMethodCalled == true + } } def 'Forwarding AsyncRestRequestResponse Event Data.'() { given: 'async request response legacy event' def dmiAsyncRequestResponseEvent = new DmiAsyncRequestResponseEvent(eventId: 'my-event-id',eventTarget: 'some client topic') + and: 'a flag to track the publish event call' + def publishEventMethodCalled = false + and: 'the (mocked) events publisher will use the flag to indicate if it is called and will capture the event' + mockEventsPublisher.publishEvent(*_) >> { + publishEventMethodCalled = true + } when: 'send the event' legacyEventKafkaTemplate.send(topic, dmiAsyncRequestResponseEvent) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) then: 'the event has been forwarded' - 1 * mockEventsPublisher.publishEvent('some client topic', 'my-event-id', _) >> { args -> { capturedForwardedEvent = args[2] } } - and: 'the captured id and target of the forwarded event is same as the one that was sent' - assert capturedForwardedEvent.eventId == dmiAsyncRequestResponseEvent.eventId - assert capturedForwardedEvent.eventTarget == dmiAsyncRequestResponseEvent.eventTarget + new PollingConditions().within(1) { + assert publishEventMethodCalled == true + } } def createCloudEvent() { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy index 501714a2be..3b1709d802 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================== - * Copyright (C) 2022-2023 Nordix Foundation + * Copyright (C) 2022-2024 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.test.context.SpringBootTest import org.springframework.test.context.ContextConfiguration import spock.lang.Specification +import spock.util.concurrent.PollingConditions import java.util.concurrent.BlockingQueue import java.util.concurrent.TimeUnit @@ -121,8 +122,10 @@ class SynchronizationCacheConfigSpec extends Specification { moduleSyncStartedOnCmHandles.put('testKeyModuleSync', 'toBeExpired' as Object, 1, TimeUnit.SECONDS) then: 'the entry is present in the map' assert moduleSyncStartedOnCmHandles.get('testKeyModuleSync') != null - and: 'the entry expires in less then 2 seconds' - waitMax2SecondsForKeyExpiration(moduleSyncStartedOnCmHandles, 'testKeyModuleSync') + and: 'the entry expires' + new PollingConditions().within(10) { + assert moduleSyncStartedOnCmHandles.get('testKeyModuleSync') == null + } } def 'Time to Live Verify for Data Sync Semaphore'() { @@ -130,16 +133,10 @@ class SynchronizationCacheConfigSpec extends Specification { dataSyncSemaphores.put('testKeyDataSync', Boolean.TRUE, 1, TimeUnit.SECONDS) then: 'the entry is present in the map' assert dataSyncSemaphores.get('testKeyDataSync') != null - and: 'the entry expires in less then 2 seconds' - waitMax2SecondsForKeyExpiration(dataSyncSemaphores, 'testKeyDataSync') - } - - def waitMax2SecondsForKeyExpiration(map, key) { - def count = 0 - while ( map.get(key)!=null && ++count <= 20 ) { - sleep(100) - } - return count < 20 // Should have expired in less the 20 x 100ms = 2 seconds! + and: 'the entry expires' + new PollingConditions().within(10) { + assert dataSyncSemaphores.get('testKeyDataSync') == null + } } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy index 3f2148f801..2229b32b0c 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy @@ -21,8 +21,6 @@ package org.onap.cps.ncmp.api.impl.operations -import org.onap.cps.events.EventsPublisher - import static org.onap.cps.ncmp.api.impl.events.mapper.CloudEventMapper.toTargetEvent import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING @@ -33,8 +31,8 @@ import static org.onap.cps.ncmp.api.NcmpResponseStatus.UNABLE_TO_READ_RESOURCE_D import static org.onap.cps.ncmp.api.NcmpResponseStatus.DMI_SERVICE_NOT_RESPONDING import com.fasterxml.jackson.databind.ObjectMapper +import org.onap.cps.events.EventsPublisher import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration - import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder import org.onap.cps.ncmp.api.impl.utils.context.CpsApplicationContext @@ -49,6 +47,7 @@ import org.springframework.http.ResponseEntity import org.springframework.test.context.ContextConfiguration import org.springframework.http.HttpStatus import spock.lang.Shared +import spock.util.concurrent.PollingConditions import java.util.concurrent.TimeoutException @SpringBootTest @@ -101,20 +100,24 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { def dataOperationBatchRequestJsonData = TestUtils.getResourceFileContent('dataOperationRequest.json') def dataOperationRequest = spiedJsonObjectMapper.convertJsonString(dataOperationBatchRequestJsonData, DataOperationRequest.class) dataOperationRequest.dataOperationDefinitions[0].cmHandleIds = [cmHandleId] - def requestBodyAsJsonStringArg = null and: 'a positive response from DMI service when it is called with valid request parameters' def responseFromDmi = new ResponseEntity(HttpStatus.ACCEPTED) def expectedDmiBatchResourceDataUrl = "ncmp/v1/data/topic=my-topic-name" def expectedBatchRequestAsJson = '{"operations":[{"operation":"read","operationId":"operational-14","datastore":"ncmp-datastore:passthrough-operational","options":"some option","resourceIdentifier":"some resource identifier","cmHandles":[{"id":"some-cm-handle","cmHandleProperties":{"prop1":"val1"}}]}]}' mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, _, READ.operationName) >> responseFromDmi dmiServiceUrlBuilder.getDataOperationRequestUrl(_, _) >> expectedDmiBatchResourceDataUrl + and: ' a flag to track the post operation call' + def postOperationWithJsonDataMethodCalled = false + and: 'the (mocked) dmi rest client will use the flag to indicate it is called and capture the request body' + mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, expectedBatchRequestAsJson, READ) >> { + postOperationWithJsonDataMethodCalled = true + } when: 'get resource data for group of cm handles are invoked' objectUnderTest.requestResourceDataFromDmi('my-topic-name', dataOperationRequest, 'requestId') - then: 'wait a little to allow execution of service method by task executor (on separate thread)' - Thread.sleep(100) - then: 'validate ncmp generated dmi request body json args' - 1 * mockDmiRestClient.postOperationWithJsonData(expectedDmiBatchResourceDataUrl, _, READ) >> { args -> requestBodyAsJsonStringArg = args[1] } - assert requestBodyAsJsonStringArg == expectedBatchRequestAsJson + then: 'validate the post operation was called and ncmp generated dmi request body json args' + new PollingConditions().within(1) { + assert postOperationWithJsonDataMethodCalled == true + } } def 'Execute (async) data operation from DMI service for #scenario.'() { -- cgit 1.2.3-korg