diff options
author | kjaniak <kornel.janiak@nokia.com> | 2019-06-26 15:57:29 +0200 |
---|---|---|
committer | kjaniak <kornel.janiak@nokia.com> | 2019-07-02 14:53:54 +0200 |
commit | 3e4f108d66fb31e5ea713fef1cf7bc99c0510c21 (patch) | |
tree | 74ca728ac2c4b43d6bbd1d39ce4335deca4f6040 /sources/hv-collector-kafka-consumer/src/test | |
parent | 31dffbef6a4aab8e19d2c5a9f5ac8531d931197a (diff) |
Reorganize kafka module
Change-Id: I2eb7a8a6e92c9d89586b877f4cae438497b62ae2
Issue-ID: DCAEGEN2-1635
Signed-off-by: kjaniak <kornel.janiak@nokia.com>
Diffstat (limited to 'sources/hv-collector-kafka-consumer/src/test')
3 files changed, 168 insertions, 10 deletions
diff --git a/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/impl/KafkaSourceTest.kt b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/impl/KafkaSourceTest.kt new file mode 100644 index 00000000..b0eb7a52 --- /dev/null +++ b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/impl/KafkaSourceTest.kt @@ -0,0 +1,154 @@ +/* + * ============LICENSE_START======================================================= + * dcaegen2-collectors-veshv + * ================================================================================ + * Copyright (C) 2019 NOKIA + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.dcae.collectors.veshv.kafkaconsumer.impl + +import com.nhaarman.mockitokotlin2.argumentCaptor +import com.nhaarman.mockitokotlin2.mock +import com.nhaarman.mockitokotlin2.times +import com.nhaarman.mockitokotlin2.verify +import com.nhaarman.mockitokotlin2.verifyZeroInteractions +import com.nhaarman.mockitokotlin2.whenever +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.cancelAndJoin +import kotlinx.coroutines.test.TestCoroutineDispatcher +import kotlinx.coroutines.test.runBlockingTest +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.common.TopicPartition +import org.assertj.core.api.Assertions.assertThat +import org.jetbrains.spek.api.Spek +import org.jetbrains.spek.api.dsl.given +import org.jetbrains.spek.api.dsl.it +import org.jetbrains.spek.api.dsl.on +import org.onap.dcae.collectors.veshv.kafkaconsumer.state.OffsetConsumer + +@ExperimentalCoroutinesApi +object KafkaSourceTest : Spek({ + given("KafkaSource") { + val testDispatcher = TestCoroutineDispatcher() + val mockedKafkaConsumer = mock<KafkaConsumer<ByteArray, ByteArray>>() + afterEachTest { + testDispatcher.cleanupTestCoroutines() + } + given("single topicName and partition") { + val topics = setOf("topicName") + val kafkaSource = KafkaSource(mockedKafkaConsumer, topics, testDispatcher) + val mockedOffsetConsumer = mock<OffsetConsumer>() + on("started KafkaSource") { + val topicPartition = createTopicPartition("topicName") + val topicPartitions = setOf(topicPartition) + whenever(mockedKafkaConsumer.assignment()).thenReturn(topicPartitions) + whenever(mockedKafkaConsumer.endOffsets(topicPartitions)) + .thenReturn(mapOf<TopicPartition, Long>(topicPartition to newOffset)) + + runBlockingTest { + val job = kafkaSource.start(mockedOffsetConsumer, updateIntervalInMs) + job.cancelAndJoin() + } + + it("should call update function on topicName") { + verify(mockedOffsetConsumer).update(topicPartition, newOffset) + } + } + } + + given("two topics with partition") { + val topics = setOf(topicName1, topicName2) + val kafkaSource = KafkaSource(mockedKafkaConsumer, topics, testDispatcher) + val mockedOffsetConsumer = mock<OffsetConsumer>() + + on("started KafkaSource for two iteration of while loop") { + val topicPartitionArgumentCaptor = argumentCaptor<TopicPartition>() + val offsetArgumentCaptor = argumentCaptor<Long>() + val topicPartitionArgumentCaptorAfterInterval = argumentCaptor<TopicPartition>() + val offsetArgumentCaptorAfterInterval = argumentCaptor<Long>() + val topicPartition1 = createTopicPartition(topicName1) + val topicPartition2 = createTopicPartition(topicName2) + val topicPartitions = setOf(topicPartition1, topicPartition2) + whenever(mockedKafkaConsumer.assignment()).thenReturn(topicPartitions) + val partitionToOffset1 = + mapOf(topicPartition1 to newOffset, + topicPartition2 to anotherNewOffset) + val partitionToOffset2 = + mapOf(topicPartition1 to anotherNewOffset, + topicPartition2 to newOffset) + whenever(mockedKafkaConsumer.endOffsets(topicPartitions)) + .thenReturn(partitionToOffset1, partitionToOffset2) + + runBlockingTest { + val job = kafkaSource.start(mockedOffsetConsumer, updateIntervalInMs) + verify(mockedOffsetConsumer, times(topicsAmount)).update(topicPartitionArgumentCaptor.capture(), + offsetArgumentCaptor.capture()) + + testDispatcher.advanceTimeBy(updateIntervalInMs) + + verify(mockedOffsetConsumer, times(topicsAmountAfterInterval)) + .update(topicPartitionArgumentCaptorAfterInterval.capture(), offsetArgumentCaptorAfterInterval.capture()) + + it("should calls update function with proper arguments - before interval") { + assertThat(topicPartitionArgumentCaptor.firstValue).isEqualTo(topicPartition1) + assertThat(offsetArgumentCaptor.firstValue).isEqualTo(newOffset) + assertThat(topicPartitionArgumentCaptor.secondValue).isEqualTo(topicPartition2) + assertThat(offsetArgumentCaptor.secondValue).isEqualTo(anotherNewOffset) + } + it("should calls update function with proper arguments - after interval") { + assertThat(topicPartitionArgumentCaptorAfterInterval.thirdValue).isEqualTo(topicPartition1) + assertThat(offsetArgumentCaptorAfterInterval.thirdValue).isEqualTo(anotherNewOffset) + assertThat(topicPartitionArgumentCaptorAfterInterval.lastValue).isEqualTo(topicPartition2) + assertThat(offsetArgumentCaptorAfterInterval.lastValue).isEqualTo(newOffset) + } + job.cancelAndJoin() + } + } + } + + given("empty topicName list") { + val emptyTopics = emptySet<String>() + val kafkaSource = KafkaSource(mockedKafkaConsumer, emptyTopics, testDispatcher) + val mockedOffsetConsumer = mock<OffsetConsumer>() + + on("call of function start") { + val emptyTopicPartitions = setOf(null) + whenever(mockedKafkaConsumer.assignment()).thenReturn(emptyTopicPartitions) + whenever(mockedKafkaConsumer.endOffsets(emptyTopicPartitions)) + .thenReturn(emptyMap()) + + runBlockingTest { + val job = kafkaSource.start(mockedOffsetConsumer, updateIntervalInMs) + job.cancelAndJoin() + } + + it("should not interact with OffsetConsumer") { + verifyZeroInteractions(mockedOffsetConsumer) + } + } + } + + } +}) + +private const val updateIntervalInMs = 10L +private const val partitionNumber = 0 +private const val newOffset = 2L +private const val anotherNewOffset = 10L +private const val topicName1 = "topicName1" +private const val topicName2 = "topicName2" +private const val topicsAmount = 2 +private const val topicsAmountAfterInterval = 4 +fun createTopicPartition(topic: String) = TopicPartition(topic, partitionNumber)
\ No newline at end of file diff --git a/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/metrics/MicrometerMetricsTest.kt b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/metrics/MicrometerMetricsTest.kt index 41587867..96ba588f 100644 --- a/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/metrics/MicrometerMetricsTest.kt +++ b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/metrics/MicrometerMetricsTest.kt @@ -74,7 +74,7 @@ object MicrometerMetricsTest : Spek({ val offset = 966L it("should update $gaugeName") { - cut.notifyOffsetChanged(offset) + cut.notifyOffsetChanged(offset, "sample_topic", 1) registry.verifyGauge(gaugeName) { assertThat(it.value()).isCloseTo(offset.toDouble(), doublePrecision) diff --git a/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/state/OffsetConsumerTest.kt b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/state/OffsetConsumerTest.kt index 6fb42d81..242f27be 100644 --- a/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/state/OffsetConsumerTest.kt +++ b/sources/hv-collector-kafka-consumer/src/test/kotlin/org/onap/dcae/collectors/veshv/kafkaconsumer/state/OffsetConsumerTest.kt @@ -21,10 +21,10 @@ package org.onap.dcae.collectors.veshv.kafkaconsumer.state import com.nhaarman.mockitokotlin2.mock import com.nhaarman.mockitokotlin2.verify -import com.nhaarman.mockitokotlin2.whenever -import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.TopicPartition import org.jetbrains.spek.api.Spek import org.jetbrains.spek.api.dsl.describe +import org.jetbrains.spek.api.dsl.given import org.jetbrains.spek.api.dsl.it import org.jetbrains.spek.api.dsl.on import org.onap.dcae.collectors.veshv.kafkaconsumer.metrics.Metrics @@ -33,16 +33,20 @@ object OffsetConsumerTest : Spek({ describe("OffsetConsumer with metrics") { val mockedMetrics = mock<Metrics>() val offsetConsumer = OffsetConsumer(mockedMetrics) + given("topicName with partition"){ + val topicPartition = TopicPartition(topicName, partitionNumber) - on("new update method call") { - val consumerRecord = mock<ConsumerRecord<ByteArray, ByteArray>>() - whenever(consumerRecord.offset()).thenReturn(1) + on("new update method call") { + offsetConsumer.update(topicPartition, newOffset) - offsetConsumer.update(consumerRecord) - - it("should notify message offset metric") { - verify(mockedMetrics).notifyOffsetChanged(1) + it("should notify message newOffset metric") { + verify(mockedMetrics).notifyOffsetChanged(newOffset, topicName, partitionNumber) + } } } } }) + +private const val partitionNumber = 1 +private const val newOffset: Long = 99 +private const val topicName = "sample-topicName" |