diff options
author | Andreas Geissler <andreas-geissler@telekom.de> | 2024-05-22 14:12:49 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@onap.org> | 2024-05-22 14:12:49 +0000 |
commit | f3a138370598cb3eb841185592643ce04434ccec (patch) | |
tree | cf1b4a53371b3686187acdef02f1efdd652ae9df /kubernetes/aai/components | |
parent | e05d9d505e41d24065dcab5ab09d4da9aec5ae01 (diff) | |
parent | bc2df7b0c60ff7aa256dc523336b952c41a0bf90 (diff) |
Merge "[AAI] Helm changes for DMaaP deprecation"
Diffstat (limited to 'kubernetes/aai/components')
15 files changed, 265 insertions, 64 deletions
diff --git a/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties b/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties index b63cd83158..83689da093 100644 --- a/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties +++ b/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties @@ -55,8 +55,15 @@ server.ssl.enabled=false # JMS bind address host port jms.bind.address=tcp://localhost:61649 -dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904 -dmaap.ribbon.transportType=http + +# dmaap is deprecated now kafka is used +spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS} +spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT +spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512 +spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG} +spring.kafka.producer.retries=3 # Schema related attributes for the oxm and edges # Any additional schema related attributes should start with prefix schema diff --git a/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml b/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml index f30fd2da60..5825a722ae 100644 --- a/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml +++ b/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml @@ -201,13 +201,13 @@ <includeCallerData>true</includeCallerData> <appender-ref ref="translog"/> </appender> - <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <level>WARN</level> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern> + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> </rollingPolicy> @@ -215,15 +215,15 @@ <pattern>${"errorPattern"}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd} + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd} </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -232,15 +232,15 @@ <pattern>${debugPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>DEBUG</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern> + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> </rollingPolicy> @@ -248,15 +248,15 @@ <pattern>${debugPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern> + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> </rollingPolicy> @@ -1035,12 +1035,12 @@ </if> <appender-ref ref="STDOUT"/> </logger> - <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false"> + <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false"> <if condition='property("logToFileEnabled").contains("true")'> <then> - <appender-ref ref="dmaapAAIEventConsumer"/> - <appender-ref ref="dmaapAAIEventConsumerDebug"/> - <appender-ref ref="dmaapAAIEventConsumerMetric"/> + <appender-ref ref="kafkaAAIEventConsumer"/> + <appender-ref ref="kafkaAAIEventConsumerDebug"/> + <appender-ref ref="kafkaAAIEventConsumerMetric"/> </then> </if> <appender-ref ref="STDOUT"/> diff --git a/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml b/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml new file mode 100644 index 0000000000..b028df7807 --- /dev/null +++ b/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml @@ -0,0 +1,31 @@ +{{/* +# Copyright © 2022-23 Nordix Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +*/}} +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }} + labels: + strimzi.io/cluster: {{ include "common.release" . }}-strimzi +spec: + authentication: + type: scram-sha-512 + authorization: + type: simple + acls: + - resource: + type: topic + name: AAI-EVENT + operation: All
\ No newline at end of file diff --git a/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml b/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml index cad213ab9e..9a0ca764bf 100644 --- a/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml +++ b/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml @@ -118,6 +118,13 @@ spec: value: {{ .Values.service.internalPort2 | quote }} - name: INTERNAL_PORT_3 value: {{ .Values.service.internalPort3 | quote }} + - name: BOOTSTRAP_SERVERS + value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092 + - name: JAAS_CONFIG + valueFrom: + secretKeyRef: + name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }} + key: sasl.jaas.config volumeMounts: - mountPath: /opt/app/aai-graphadmin/resources/etc/appprops/janusgraph-realtime.properties name: config diff --git a/kubernetes/aai/components/aai-graphadmin/values.yaml b/kubernetes/aai/components/aai-graphadmin/values.yaml index d3609a9110..89d27a82cc 100644 --- a/kubernetes/aai/components/aai-graphadmin/values.yaml +++ b/kubernetes/aai/components/aai-graphadmin/values.yaml @@ -25,6 +25,8 @@ # Declare variables to be passed into your templates. global: # global defaults nodePortPrefix: 302 + kafkaBootstrap: strimzi-kafka-bootstrap + aaiGraphKafkaUser: aai-graph-kafka-user cassandra: #This will instantiate AAI cassandra cluster, default:shared cassandra. localCluster: false @@ -123,9 +125,11 @@ config: # Specify the profiles for the graphadmin microservice profiles: - - active: dmaap - + active: kafka + kafkaBootstrap: strimzi-kafka-bootstrap + jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}' + someConfig: graphrandom + aaiTopic: AAI-EVENT # Specifies the timeout limit for the REST API requests timeout: enabled: true @@ -295,3 +299,20 @@ log: root: DEBUG base: DEBUG logConfigMapNamePrefix: '{{ include "common.fullname" . }}' +################################################################# +# Secrets metaconfig +################################################################# +secrets: + - uid: aai-graph-kafka-user + externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}' + type: genericKV + envs: + - name: sasl.jaas.config + value: '{{ .Values.config.someConfig }}' + policy: generate +kafkaUser: + authenticationType: scram-sha-512 + acls: + - name: AAI-EVENT + type: topic + operations: [Read, Write]
\ No newline at end of file diff --git a/kubernetes/aai/components/aai-resources/resources/config/application.properties b/kubernetes/aai/components/aai-resources/resources/config/application.properties index 1b7bdf8ff6..5762460a02 100644 --- a/kubernetes/aai/components/aai-resources/resources/config/application.properties +++ b/kubernetes/aai/components/aai-resources/resources/config/application.properties @@ -58,8 +58,15 @@ server.ssl.enabled=false # JMS bind address host port jms.bind.address=tcp://localhost:61647 -dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904 -dmaap.ribbon.transportType=http + +# dmaap is deprecated now kafka is used +spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS} +spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT +spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512 +spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG} +spring.kafka.producer.retries=3 # Schema related attributes for the oxm and edges # Any additional schema related attributes should start with prefix schema diff --git a/kubernetes/aai/components/aai-resources/resources/config/logback.xml b/kubernetes/aai/components/aai-resources/resources/config/logback.xml index 8d18640281..441539361f 100644 --- a/kubernetes/aai/components/aai-resources/resources/config/logback.xml +++ b/kubernetes/aai/components/aai-resources/resources/config/logback.xml @@ -196,14 +196,14 @@ <appender-ref ref="translog"/> </appender> - <appender name="dmaapAAIEventConsumer" + <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <level>WARN</level> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -214,16 +214,16 @@ </appender> - <appender name="dmaapAAIEventConsumerDebug" + <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>DEBUG</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -232,16 +232,16 @@ <pattern>${debugPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerInfo" + <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}.zip + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}.zip </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -250,16 +250,16 @@ <pattern>${auditPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerMetric" + <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -395,13 +395,13 @@ <appender-ref ref="asyncMETRIC"/> </logger> <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO"> - <appender-ref ref="dmaapAAIEventConsumerMetric"/> + <appender-ref ref="kafkaAAIEventConsumerMetric"/> </logger> <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN"> <appender-ref ref="asyncERROR"/> </logger> <logger name="com.att.nsa.mr" level="INFO"> - <appender-ref ref="dmaapAAIEventConsumerInfo"/> + <appender-ref ref="kafkaAAIEventConsumerInfo"/> </logger> </then> </if> @@ -415,11 +415,11 @@ <appender-ref ref="STDOUT"/> </logger> - <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false"> + <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false"> <if condition='property("logToFileEnabled").contains("true")'> <then> - <appender-ref ref="dmaapAAIEventConsumer"/> - <appender-ref ref="dmaapAAIEventConsumerDebug"/> + <appender-ref ref="kafkaAAIEventConsumer"/> + <appender-ref ref="kafkaAAIEventConsumerDebug"/> </then> </if> <appender-ref ref="STDOUT"/> diff --git a/kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml b/kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml new file mode 100644 index 0000000000..e4fa84a041 --- /dev/null +++ b/kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml @@ -0,0 +1,31 @@ +{{/* +# Copyright © 2022-23 Nordix Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +*/}} +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }} + labels: + strimzi.io/cluster: {{ include "common.release" . }}-strimzi +spec: + authentication: + type: scram-sha-512 + authorization: + type: simple + acls: + - resource: + type: topic + name: AAI-EVENT + operation: All
\ No newline at end of file diff --git a/kubernetes/aai/components/aai-resources/templates/deployment.yaml b/kubernetes/aai/components/aai-resources/templates/deployment.yaml index 122e522bb1..7cccfb11a8 100644 --- a/kubernetes/aai/components/aai-resources/templates/deployment.yaml +++ b/kubernetes/aai/components/aai-resources/templates/deployment.yaml @@ -162,6 +162,13 @@ spec: value: {{ .Values.service.internalPort2 | quote }} - name: INTERNAL_PORT_3 value: {{ .Values.service.internalPort3 | quote }} + - name: BOOTSTRAP_SERVERS + value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092 + - name: JAAS_CONFIG + valueFrom: + secretKeyRef: + name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }} + key: sasl.jaas.config volumeMounts: - mountPath: /opt/app/aai-resources/resources/etc/appprops/janusgraph-realtime.properties name: {{ include "common.fullname" . }}-config diff --git a/kubernetes/aai/components/aai-resources/values.yaml b/kubernetes/aai/components/aai-resources/values.yaml index 3023d86f19..329c487ee7 100644 --- a/kubernetes/aai/components/aai-resources/values.yaml +++ b/kubernetes/aai/components/aai-resources/values.yaml @@ -20,6 +20,8 @@ # Declare variables to be passed into your templates. global: # global defaults nodePortPrefix: 302 + kafkaBootstrap: strimzi-kafka-bootstrap + aaiKafkaUser: aai-kafka-user cassandra: #Service Name of the cassandra cluster to connect to. #Override it to aai-cassandra if localCluster is enabled. @@ -50,7 +52,7 @@ global: # global defaults # Active spring profiles for the resources microservice profiles: - active: production,dmaap + active: production,kafka # Notification event specific properties notification: @@ -96,6 +98,10 @@ global: # global defaults # Specifies which clients should always default to realtime graph connection realtime: clients: SDNC,MSO,SO,robot-ete + kafkaBootstrap: strimzi-kafka-bootstrap + jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}' + someConfig: random + aaiTopic: AAI-EVENT api_list: - 11 @@ -123,7 +129,7 @@ aai_enpoints: url: external-system # application image -image: onap/aai-resources:1.13.0 +image: onap/aai-resources:1.13.4 pullPolicy: Always restartPolicy: Always flavor: small @@ -377,3 +383,20 @@ accessLogback: logToFileEnabled: false maxHistory: 7 totalSizeCap: 1GB +################################################################# +# Secrets metaconfig +################################################################# +secrets: + - uid: aai-kafka-user + externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}' + type: genericKV + envs: + - name: sasl.jaas.config + value: '{{ .Values.config.someConfig }}' + policy: generate +kafkaUser: + authenticationType: scram-sha-512 + acls: + - name: AAI-EVENT + type: topic + operations: [Read, Write]
\ No newline at end of file diff --git a/kubernetes/aai/components/aai-traversal/resources/config/application.properties b/kubernetes/aai/components/aai-traversal/resources/config/application.properties index 276dbfe6d7..1b58ad6167 100644 --- a/kubernetes/aai/components/aai-traversal/resources/config/application.properties +++ b/kubernetes/aai/components/aai-traversal/resources/config/application.properties @@ -49,8 +49,15 @@ server.ssl.enabled=false # JMS bind address host port jms.bind.address=tcp://localhost:61647 -dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904 -dmaap.ribbon.transportType=http + +# dmaap is deprecated now kafka is used +spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS} +spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT +spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512 +spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer +spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG} +spring.kafka.producer.retries=3 # Schema related attributes for the oxm and edges # Any additional schema related attributes should start with prefix schema diff --git a/kubernetes/aai/components/aai-traversal/resources/config/logback.xml b/kubernetes/aai/components/aai-traversal/resources/config/logback.xml index acdc893f55..21f48f7c93 100644 --- a/kubernetes/aai/components/aai-traversal/resources/config/logback.xml +++ b/kubernetes/aai/components/aai-traversal/resources/config/logback.xml @@ -208,13 +208,13 @@ <includeCallerData>true</includeCallerData> <appender-ref ref="translog" /> </appender> - <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <level>WARN</level> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd} + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd} </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -223,15 +223,15 @@ <pattern>${errorPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>DEBUG</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd} + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd} </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -240,15 +240,15 @@ <pattern>${debugPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd} + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd} </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -257,15 +257,15 @@ <pattern>${auditPattern}</pattern> </encoder> </appender> - <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender"> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> - <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File> + <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> - <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd} + <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd} </fileNamePattern> <maxHistory>${maxHistory}</maxHistory> <totalSizeCap>${totalSizeCap}</totalSizeCap> @@ -381,7 +381,7 @@ <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO" additivity="false"> <if condition='property("logToFileEnabled").contains("true")'> <then> - <appender-ref ref="dmaapAAIEventConsumerMetric" /> + <appender-ref ref="kafkaAAIEventConsumerMetric" /> </then> </if> <appender-ref ref="STDOUT" /> @@ -405,11 +405,11 @@ <appender-ref ref="STDOUT" /> </logger> - <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false"> + <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false"> <if condition='property("logToFileEnabled").contains("true")'> <then> - <appender-ref ref="dmaapAAIEventConsumer" /> - <appender-ref ref="dmaapAAIEventConsumerDebug" /> + <appender-ref ref="kafkaAAIEventConsumer" /> + <appender-ref ref="kafkaAAIEventConsumerDebug" /> </then> </if> <appender-ref ref="STDOUT" /> @@ -418,7 +418,7 @@ <logger name="com.att.nsa.mr" level="INFO"> <if condition='property("logToFileEnabled").contains("true")'> <then> - <appender-ref ref="dmaapAAIEventConsumerInfo" /> + <appender-ref ref="kafkaAAIEventConsumerInfo" /> </then> </if> <appender-ref ref="STDOUT" /> diff --git a/kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml b/kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml new file mode 100644 index 0000000000..1754227d7f --- /dev/null +++ b/kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml @@ -0,0 +1,31 @@ +{{/* +# Copyright © 2022-23 Nordix Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +*/}} +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }} + labels: + strimzi.io/cluster: {{ include "common.release" . }}-strimzi +spec: + authentication: + type: scram-sha-512 + authorization: + type: simple + acls: + - resource: + type: topic + name: AAI-EVENT + operation: All
\ No newline at end of file diff --git a/kubernetes/aai/components/aai-traversal/templates/deployment.yaml b/kubernetes/aai/components/aai-traversal/templates/deployment.yaml index 6627a801b3..67e1b996e3 100644 --- a/kubernetes/aai/components/aai-traversal/templates/deployment.yaml +++ b/kubernetes/aai/components/aai-traversal/templates/deployment.yaml @@ -183,6 +183,13 @@ spec: value: {{ .Values.service.internalPort2 | quote }} - name: INTERNAL_PORT_3 value: {{ .Values.service.internalPort3 | quote }} + - name: BOOTSTRAP_SERVERS + value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092 + - name: JAAS_CONFIG + valueFrom: + secretKeyRef: + name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }} + key: sasl.jaas.config volumeMounts: - mountPath: /opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties name: {{ include "common.fullname" . }}-config diff --git a/kubernetes/aai/components/aai-traversal/values.yaml b/kubernetes/aai/components/aai-traversal/values.yaml index f6194b8ac0..dd06a28a95 100644 --- a/kubernetes/aai/components/aai-traversal/values.yaml +++ b/kubernetes/aai/components/aai-traversal/values.yaml @@ -20,7 +20,8 @@ # Declare variables to be passed into your templates. global: # global defaults nodePortPrefix: 302 - + kafkaBootstrap: strimzi-kafka-bootstrap + aaiTravKafkaUser: aai-trav-kafka-user cassandra: #Service Name of the cassandra cluster to connect to. #Override it to aai-cassandra if localCluster is enabled. @@ -59,7 +60,7 @@ global: # global defaults # Active spring profiles for the resources microservice profiles: - active: production,dmaap + active: production,kafka # Notification event specific properties notification: @@ -105,9 +106,13 @@ global: # global defaults # Specifies which clients should always default to realtime graph connection realtime: clients: SDNC,MSO,SO,robot-ete + kafkaBootstrap: strimzi-kafka-bootstrap + jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}' + someConfig: random + aaiTopic: AAI-EVENT # application image -image: onap/aai-traversal:1.12.3 +image: onap/aai-traversal:1.13.4 pullPolicy: Always restartPolicy: Always flavor: small @@ -372,3 +377,20 @@ log: root: DEBUG base: DEBUG # base package (org.onap.aai) logConfigMapNamePrefix: '{{ include "common.fullname" . }}' +################################################################# +# Secrets metaconfig +################################################################# +secrets: + - uid: aai-trav-kafka-user + externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}' + type: genericKV + envs: + - name: sasl.jaas.config + value: '{{ .Values.config.someConfig }}' + policy: generate +kafkaUser: + authenticationType: scram-sha-512 + acls: + - name: AAI-EVENT + type: topic + operations: [Read, Write]
\ No newline at end of file |