aboutsummaryrefslogtreecommitdiffstats
path: root/kubernetes/aai/components/aai-graphadmin
diff options
context:
space:
mode:
Diffstat (limited to 'kubernetes/aai/components/aai-graphadmin')
-rw-r--r--kubernetes/aai/components/aai-graphadmin/resources/config/application.properties11
-rw-r--r--kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml32
-rw-r--r--kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml31
-rw-r--r--kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml7
-rw-r--r--kubernetes/aai/components/aai-graphadmin/values.yaml27
5 files changed, 87 insertions, 21 deletions
diff --git a/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties b/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties
index b63cd83158..83689da093 100644
--- a/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties
+++ b/kubernetes/aai/components/aai-graphadmin/resources/config/application.properties
@@ -55,8 +55,15 @@ server.ssl.enabled=false
# JMS bind address host port
jms.bind.address=tcp://localhost:61649
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
# Schema related attributes for the oxm and edges
# Any additional schema related attributes should start with prefix schema
diff --git a/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml b/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml
index fd79f7043a..875b1dee4a 100644
--- a/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml
+++ b/kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml
@@ -201,13 +201,13 @@
<includeCallerData>true</includeCallerData>
<appender-ref ref="translog"/>
</appender>
- <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>WARN</level>
</filter>
- <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+ <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
<maxHistory>${maxHistory}</maxHistory>
<totalSizeCap>${totalSizeCap}</totalSizeCap>
</rollingPolicy>
@@ -215,15 +215,15 @@
<pattern>${"errorPattern"}</pattern>
</encoder>
</appender>
- <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
- <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+ <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}
+ <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
</fileNamePattern>
<maxHistory>${maxHistory}</maxHistory>
<totalSizeCap>${totalSizeCap}</totalSizeCap>
@@ -232,15 +232,15 @@
<pattern>${debugPattern}</pattern>
</encoder>
</appender>
- <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
- <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+ <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
<maxHistory>${maxHistory}</maxHistory>
<totalSizeCap>${totalSizeCap}</totalSizeCap>
</rollingPolicy>
@@ -248,15 +248,15 @@
<pattern>${debugPattern}</pattern>
</encoder>
</appender>
- <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
- <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+ <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
<maxHistory>${maxHistory}</maxHistory>
<totalSizeCap>${totalSizeCap}</totalSizeCap>
</rollingPolicy>
@@ -1035,12 +1035,12 @@
</if>
<appender-ref ref="STDOUT"/>
</logger>
- <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+ <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
<if condition='property("logToFileEnabled").contains("true")'>
<then>
- <appender-ref ref="dmaapAAIEventConsumer"/>
- <appender-ref ref="dmaapAAIEventConsumerDebug"/>
- <appender-ref ref="dmaapAAIEventConsumerMetric"/>
+ <appender-ref ref="kafkaAAIEventConsumer"/>
+ <appender-ref ref="kafkaAAIEventConsumerDebug"/>
+ <appender-ref ref="kafkaAAIEventConsumerMetric"/>
</then>
</if>
<appender-ref ref="STDOUT"/>
diff --git a/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml b/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml
new file mode 100644
index 0000000000..b028df7807
--- /dev/null
+++ b/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml
@@ -0,0 +1,31 @@
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+ name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+ labels:
+ strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+ authentication:
+ type: scram-sha-512
+ authorization:
+ type: simple
+ acls:
+ - resource:
+ type: topic
+ name: AAI-EVENT
+ operation: All \ No newline at end of file
diff --git a/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml b/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml
index cad213ab9e..9a0ca764bf 100644
--- a/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml
+++ b/kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml
@@ -118,6 +118,13 @@ spec:
value: {{ .Values.service.internalPort2 | quote }}
- name: INTERNAL_PORT_3
value: {{ .Values.service.internalPort3 | quote }}
+ - name: BOOTSTRAP_SERVERS
+ value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+ - name: JAAS_CONFIG
+ valueFrom:
+ secretKeyRef:
+ name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+ key: sasl.jaas.config
volumeMounts:
- mountPath: /opt/app/aai-graphadmin/resources/etc/appprops/janusgraph-realtime.properties
name: config
diff --git a/kubernetes/aai/components/aai-graphadmin/values.yaml b/kubernetes/aai/components/aai-graphadmin/values.yaml
index 8b6b5f906a..46e81c83b6 100644
--- a/kubernetes/aai/components/aai-graphadmin/values.yaml
+++ b/kubernetes/aai/components/aai-graphadmin/values.yaml
@@ -25,6 +25,8 @@
# Declare variables to be passed into your templates.
global: # global defaults
nodePortPrefix: 302
+ kafkaBootstrap: strimzi-kafka-bootstrap
+ aaiGraphKafkaUser: aai-graph-kafka-user
cassandra:
#This will instantiate AAI cassandra cluster, default:shared cassandra.
localCluster: false
@@ -123,9 +125,11 @@ config:
# Specify the profiles for the graphadmin microservice
profiles:
-
- active: dmaap
-
+ active: kafka
+ kafkaBootstrap: strimzi-kafka-bootstrap
+ jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}'
+ someConfig: graphrandom
+ aaiTopic: AAI-EVENT
# Specifies the timeout limit for the REST API requests
timeout:
enabled: true
@@ -292,3 +296,20 @@ serviceAccount:
log:
path: /var/log/onap
logConfigMapNamePrefix: '{{ include "common.fullname" . }}'
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+ - uid: aai-graph-kafka-user
+ externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+ type: genericKV
+ envs:
+ - name: sasl.jaas.config
+ value: '{{ .Values.config.someConfig }}'
+ policy: generate
+kafkaUser:
+ authenticationType: scram-sha-512
+ acls:
+ - name: AAI-EVENT
+ type: topic
+ operations: [Read, Write] \ No newline at end of file