aboutsummaryrefslogtreecommitdiffstats
path: root/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/data/async/FilterStrategiesIntegrationSpec.groovy
blob: 0643dbcf170823d6eb04126a2b97b1c6950090dc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
/*
 * ============LICENSE_START=======================================================
 * Copyright (c) 2023-2024 Nordix Foundation.
 *  ================================================================================
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *        http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an 'AS IS' BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *  SPDX-License-Identifier: Apache-2.0
 *  ============LICENSE_END=========================================================
 */

package org.onap.cps.ncmp.impl.data.async

import io.cloudevents.core.builder.CloudEventBuilder
import org.onap.cps.events.EventsPublisher
import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig
import org.onap.cps.ncmp.api.kafka.ConsumerBaseSpec
import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent
import org.spockframework.spring.SpringBean
import org.springframework.beans.factory.annotation.Value
import org.springframework.boot.autoconfigure.EnableAutoConfiguration
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import spock.util.concurrent.PollingConditions

import java.util.concurrent.TimeUnit

@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig])
@DirtiesContext
@Testcontainers
@EnableAutoConfiguration
class FilterStrategiesIntegrationSpec extends ConsumerBaseSpec {

    @SpringBean
    EventsPublisher mockEventsPublisher = Mock()

    @SpringBean
    NcmpAsyncRequestResponseEventMapper mapper = Stub()

    @Value('${app.ncmp.async-m2m.topic}')
    def topic

    def 'Legacy event consumer with cloud event.'() {
        given: 'a data operation cloud event type'
            def cloudEvent = CloudEventBuilder.v1().withId('some id')
                .withType('DataOperationEvent')
                .withSource(URI.create('some-source'))
                .build()
        when: 'send the cloud event'
            cloudEventKafkaTemplate.send(topic, cloudEvent)
        then: 'wait a little for async processing of message (must wait to try to avoid false positives)'
            TimeUnit.MILLISECONDS.sleep(300)
        and: 'event is not consumed'
            0 * mockEventsPublisher.publishEvent(*_)
    }

    def 'Legacy event consumer with valid legacy event.'() {
        given: 'a legacy event'
            DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget')
        and: 'a flag to track the publish event call'
            def publishEventMethodCalled = false
        and: 'the (mocked) events publisher will use the flag to indicate if it is called'
            mockEventsPublisher.publishEvent(*_) >> {
                publishEventMethodCalled = true
            }
        when: 'send the cloud event'
            legacyEventKafkaTemplate.send(topic, legacyEvent)
        then: 'the event is consumed by the (legacy) AsynRestRequest consumer'
            new PollingConditions().within(1) {
                assert publishEventMethodCalled == true
            }
    }

    def 'Filtering Cloud Events on Type.'() {
        given: 'a cloud event of type: #eventType'
            def cloudEvent = CloudEventBuilder.v1().withId('some id')
                .withType(eventType)
                .withSource(URI.create('some-source'))
                .build()
        and: 'a flag to track the publish event call'
            def publishEventMethodCalled = false
        and: 'the (mocked) events publisher will use the flag to indicate if it is called'
            mockEventsPublisher.publishCloudEvent(*_) >> {
                publishEventMethodCalled = true
            }
        when: 'send the cloud event'
            cloudEventKafkaTemplate.send(topic, cloudEvent)
        then: 'the event has only been forwarded for the correct type'
            new PollingConditions(initialDelay: 0.3).within(1) {
                assert publishEventMethodCalled == expectCallToPublishEventMethod
            }
        where: 'the following event types are used'
            eventType                                        || expectCallToPublishEventMethod
            'DataOperationEvent'                             || true
            'other type'                                     || false
            'any type contain the word "DataOperationEvent"' || true
    }

    //TODO Toine, add positive test with data to prove event is converted correctly (using correct factory)

    def 'Non cloud events on same Topic.'() {
        when: 'sending a non-cloud event on the same topic'
            legacyEventKafkaTemplate.send(topic, 'simple string event')
        then: 'wait a little for async processing of message (must wait to try to avoid false positives)'
            TimeUnit.MILLISECONDS.sleep(300)
        and: 'the event is not processed by this consumer'
            0 * mockEventsPublisher.publishCloudEvent(*_)
    }

}