aboutsummaryrefslogtreecommitdiffstats
path: root/policy-endpoints/src/main/java/org/onap/policy/common/endpoints/event/comm/bus/internal/BusPublisher.java
blob: 1b57e48ea3e125ec3c3dd50c14fabbe3fa382ca5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
/*
 * ============LICENSE_START=======================================================
 * policy-endpoints
 * ================================================================================
 * Copyright (C) 2017-2021 AT&T Intellectual Property. All rights reserved.
 * Modifications Copyright (C) 2018 Samsung Electronics Co., Ltd.
 * Modifications Copyright (C) 2020,2023 Bell Canada. All rights reserved.
 * Modifications Copyright (C) 2022-2024 Nordix Foundation.
 * ================================================================================
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * ============LICENSE_END=========================================================
 */

package org.onap.policy.common.endpoints.event.comm.bus.internal;

import io.opentelemetry.instrumentation.kafkaclients.v2_6.TracingProducerInterceptor;
import java.util.Properties;
import java.util.UUID;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public interface BusPublisher {

    String NO_MESSAGE_PROVIDED = "No message provided";
    String LOG_CLOSE = "{}: CLOSE";
    String LOG_CLOSE_FAILED = "{}: CLOSE FAILED";

    /**
     * sends a message.
     *
     * @param partitionId id
     * @param message the message
     * @return true if success, false otherwise
     * @throws IllegalArgumentException if no message provided
     */
    boolean send(String partitionId, String message);

    /**
     * closes the publisher.
     */
    void close();

    /**
     * Kafka based library publisher.
     */
    class KafkaPublisherWrapper implements BusPublisher {

        private static final Logger logger = LoggerFactory.getLogger(KafkaPublisherWrapper.class);
        private static final String KEY_SERIALIZER = "org.apache.kafka.common.serialization.StringSerializer";

        private final String topic;

        /**
         * Kafka publisher.
         */
        private final Producer<String, String> producer;
        protected Properties kafkaProps;

        /**
         * Kafka Publisher Wrapper.
         *
         * @param busTopicParams topic parameters
         */
        protected KafkaPublisherWrapper(BusTopicParams busTopicParams) {

            if (busTopicParams.isTopicInvalid()) {
                throw new IllegalArgumentException("No topic for Kafka");
            }

            this.topic = busTopicParams.getTopic();

            // Setup Properties for consumer
            kafkaProps = new Properties();
            kafkaProps.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, busTopicParams.getServers().get(0));
            if (busTopicParams.isAdditionalPropsValid()) {
                kafkaProps.putAll(busTopicParams.getAdditionalProps());
            }
            if (kafkaProps.get(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG) == null) {
                kafkaProps.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KEY_SERIALIZER);
            }
            if (kafkaProps.get(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG) == null) {
                kafkaProps.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KEY_SERIALIZER);
            }

            if (busTopicParams.isAllowTracing()) {
                kafkaProps.setProperty(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,
                        TracingProducerInterceptor.class.getName());
            }

            producer = new KafkaProducer<>(kafkaProps);
        }

        @Override
        public boolean send(String partitionId, String message) {
            if (message == null) {
                throw new IllegalArgumentException(NO_MESSAGE_PROVIDED);
            }

            try {
                // Create the record
                ProducerRecord<String, String> producerRecord =
                        new ProducerRecord<>(topic, UUID.randomUUID().toString(), message);

                this.producer.send(producerRecord);
                producer.flush();
            } catch (Exception e) {
                logger.warn("{}: SEND of {} cannot be performed because of {}", this, message, e.getMessage(), e);
                return false;
            }
            return true;
        }

        @Override
        public void close() {
            logger.info(LOG_CLOSE, this);

            try {
                this.producer.close();
            } catch (Exception e) {
                logger.warn("{}: CLOSE FAILED because of {}", this, e.getMessage(), e);
            }
        }

        @Override
        public String toString() {
            return "KafkaPublisherWrapper []";
        }

    }
}