1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
|
/*
* ============LICENSE_START=======================================================
* Copyright (C) 2024 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
package org.onap.cps.integration;
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.utility.DockerImageName;
/**
* The Apache Kafka test container wrapper.
* Allow to use specific image and version with Singleton design pattern.
* This ensures only one instance of Kafka container across the integration tests.
* Avoid unnecessary resource and time consumption.
*/
@Slf4j
public class KafkaTestContainer extends KafkaContainer {
private static final String IMAGE_NAME_AND_VERSION = "registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1";
private static volatile KafkaTestContainer kafkaTestContainer;
private KafkaTestContainer() {
super(DockerImageName.parse(IMAGE_NAME_AND_VERSION).asCompatibleSubstituteFor("confluentinc/cp-kafka"));
}
/**
* Provides an instance of Kafka test container wrapper.
* This will allow to initialize Kafka messaging support before any integration test run.
*
* @return KafkaTestContainer the unique Kafka instance
*/
public static KafkaTestContainer getInstance() {
if (kafkaTestContainer == null) {
synchronized (KafkaTestContainer.class) {
if (kafkaTestContainer == null) {
kafkaTestContainer = new KafkaTestContainer();
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
log.info("Shutting down KafkaTestContainer...");
kafkaTestContainer.stop();
}));
}
}
}
return kafkaTestContainer;
}
public static KafkaConsumer getConsumer(final String consumerGroupId, final Object valueDeserializer) {
return new KafkaConsumer<>(consumerProperties(consumerGroupId, valueDeserializer));
}
@Override
public void start() {
if (!isRunning()) {
super.start();
System.setProperty("spring.kafka.properties.bootstrap.servers", getBootstrapServers());
log.info("KafkaTestContainer started at {}", getBootstrapServers());
}
}
@Override
public void stop() {
// Method intentionally left blank
}
private static Map<String, Object> consumerProperties(final String consumerGroupId,
final Object valueDeserializer) {
final Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaTestContainer.getBootstrapServers());
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer);
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroupId);
configProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, Integer.MAX_VALUE);
return configProps;
}
}
|