summaryrefslogtreecommitdiffstats
path: root/components/datalake-handler
diff options
context:
space:
mode:
Diffstat (limited to 'components/datalake-handler')
-rw-r--r--components/datalake-handler/des/Dockerfile47
-rw-r--r--components/datalake-handler/des/pom.xml8
-rw-r--r--components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db.sql18
-rw-r--r--components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db_data.sql46
-rw-r--r--components/datalake-handler/des/src/assembly/run.sh2
-rw-r--r--components/datalake-handler/feeder/Dockerfile50
-rw-r--r--components/datalake-handler/feeder/pom.xml10
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db.sql258
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db_data.sql117
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/10_check-db-exist31
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/11_create-database9
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/15_db-adduser34
-rw-r--r--components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/20_db-initdb9
-rw-r--r--components/datalake-handler/feeder/src/assembly/run.sh13
-rw-r--r--components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/Application.java16
-rw-r--r--components/datalake-handler/feeder/src/main/resources/application.properties6
16 files changed, 318 insertions, 356 deletions
diff --git a/components/datalake-handler/des/Dockerfile b/components/datalake-handler/des/Dockerfile
index 584ba9d6..46683a1d 100644
--- a/components/datalake-handler/des/Dockerfile
+++ b/components/datalake-handler/des/Dockerfile
@@ -1,6 +1,7 @@
# ============LICENSE_START===================================================
# Copyright (C) 2020 China Mobile.
# Copyright (C) 2021 Wipro Limited
+# Copyright (C) 2021 Samsung Electronics.
# ============================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,36 +17,34 @@
#
# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=====================================================
-
FROM nexus3.onap.org:10001/onap/integration-java11:8.0.0
-MAINTAINER Kai Lu <lukai@chinamobile.com>
+LABEL maintainer="Kai Lu <lukai@chinamobile.com>"
+
+ARG user=datalake
+ARG group=datalake
EXPOSE 1681
USER root
-RUN addgroup datalake && adduser -G datalake -h /home/datalake -D datalake
-RUN mkdir /home/datalake/db_init
-
-WORKDIR /home/datalake
+WORKDIR /datalake
+RUN addgroup $group && adduser --system --shell /bin/false --disabled-password --no-create-home --ingroup $group $user && \
+ mkdir -p /datalake/db_init && \
+ chown -R $user:$group /datalake && \
+ chmod g+s /datalake
#add the fat jar
-COPY target/${JAR_FILE} .
-COPY src/assembly/run.sh .
-COPY src/assembly/init_db/db_scripts/init_db.sql .
-COPY src/assembly/init_db/db_scripts/init_db_data.sql .
-
-WORKDIR /home/datalake/db_init
-ADD src/assembly/init_db/scripts/db_init .
-
-WORKDIR /home/datalake
-
-RUN chmod -R 0755 ./* && \
- chown -R datalake:datalake /home/datalake
-
-RUN apk --update add postgresql-client curl
-
-USER datalake
-
-ENTRYPOINT /home/datalake/run.sh
+COPY --chown=$user:$group target/${JAR_FILE} .
+COPY --chown=$user:$group src/assembly/run.sh .
+COPY --chown=$user:$group src/assembly/init_db/db_scripts/init_db.sql .
+COPY --chown=$user:$group src/assembly/init_db/db_scripts/init_db_data.sql .
+COPY --chown=$user:$group src/assembly/init_db/scripts/db_init ./db_init/
+
+RUN chmod -R 0755 *.sql && \
+ chmod u+x run.sh && \
+ apk add --no-cache postgresql-client curl
+
+USER $user
+ENTRYPOINT ["/bin/sh"]
+CMD ["run.sh"]
diff --git a/components/datalake-handler/des/pom.xml b/components/datalake-handler/des/pom.xml
index e23c0c72..4beba3ce 100644
--- a/components/datalake-handler/des/pom.xml
+++ b/components/datalake-handler/des/pom.xml
@@ -313,6 +313,14 @@
</dependency>
</dependencies>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.8</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
</plugins>
</build>
</project>
diff --git a/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db.sql b/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db.sql
index 9b391d0f..34e4ece8 100644
--- a/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db.sql
+++ b/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db.sql
@@ -32,7 +32,7 @@ CREATE TABLE db_type (
id varchar(255) NOT NULL,
default_port int DEFAULT NULL,
name varchar(255) NOT NULL,
- tool bit(1) NOT NULL,
+ tool boolean NOT NULL,
PRIMARY KEY (id)
);
CREATE SEQUENCE db_seq;
@@ -40,8 +40,8 @@ CREATE SEQUENCE db_seq;
CREATE TABLE db (
id int NOT NULL DEFAULT NEXTVAL ('db_seq'),
database_name varchar(255) DEFAULT NULL,
- enabled bit(1) NOT NULL,
- encrypt bit(1) DEFAULT NULL,
+ enabled boolean NOT NULL,
+ encrypt boolean DEFAULT NULL,
host varchar(255) DEFAULT NULL,
login varchar(255) DEFAULT NULL,
name varchar(255) DEFAULT NULL,
@@ -74,7 +74,7 @@ CREATE TABLE design (
body text DEFAULT NULL,
name varchar(255) DEFAULT NULL,
note varchar(255) DEFAULT NULL,
- submitted bit(1) DEFAULT NULL,
+ submitted boolean DEFAULT NULL,
design_type_id varchar(255) NOT NULL,
topic_name_id varchar(255) NOT NULL,
PRIMARY KEY (id),
@@ -90,14 +90,14 @@ CREATE TABLE kafka (
id int NOT NULL DEFAULT NEXTVAL ('kafka_seq'),
broker_list varchar(255) NOT NULL,
consumer_count int DEFAULT 3,
- enabled bit(1) NOT NULL,
+ enabled boolean NOT NULL,
excluded_topic varchar(1023) DEFAULT '__consumer_offsets,__transaction_state',
"group" varchar(255) DEFAULT 'datalake',
included_topic varchar(255) DEFAULT NULL,
login varchar(255) DEFAULT NULL,
name varchar(255) NOT NULL,
pass varchar(255) DEFAULT NULL,
- secure bit(1) DEFAULT b'0',
+ secure boolean DEFAULT FALSE,
security_protocol varchar(255) DEFAULT NULL,
timeout_sec int DEFAULT 10,
zk varchar(255) NOT NULL,
@@ -108,14 +108,14 @@ CREATE SEQUENCE topic_seq;
CREATE TABLE topic (
id int NOT NULL DEFAULT NEXTVAL ('topic_seq'),
aggregate_array_path varchar(255) DEFAULT NULL,
- correlate_cleared_message bit(1) NOT NULL DEFAULT b'0',
+ correlate_cleared_message boolean NOT NULL DEFAULT FALSE,
data_format varchar(255) DEFAULT NULL,
- enabled bit(1) NOT NULL,
+ enabled boolean NOT NULL,
flatten_array_path varchar(255) DEFAULT NULL,
login varchar(255) DEFAULT NULL,
message_id_path varchar(255) DEFAULT NULL,
pass varchar(255) DEFAULT NULL,
- save_raw bit(1) NOT NULL DEFAULT b'0',
+ save_raw boolean NOT NULL DEFAULT FALSE,
ttl_day int DEFAULT NULL,
topic_name_id varchar(255) NOT NULL,
PRIMARY KEY (id),
diff --git a/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db_data.sql b/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db_data.sql
index 0e80e088..7defefdc 100644
--- a/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db_data.sql
+++ b/components/datalake-handler/des/src/assembly/init_db/db_scripts/init_db_data.sql
@@ -38,59 +38,59 @@ INSERT INTO kafka(
1
,'main Kafka cluster' -- name - IN varchar (255)
,3 -- consumer_count - IN int(11)
- ,B'1' -- enabled - IN bit(1)
+ ,true -- enabled - IN bit(1)
,'dlgroup' -- group - IN varchar(255)
,'message-router-kafka:9092' -- host_port - IN varchar(255)
,'' -- included_topic - IN varchar(255)
,'admin' -- login - IN varchar(255)
,'admin-secret' -- pass - IN varchar(255)
- ,B'0' -- secure - IN bit(1)
+ ,false -- secure - IN bit(1)
,'SASL_PLAINTEXT' -- security_protocol - IN varchar(255)
,10 -- timeout_sec - IN int(11)
,'message-router-zookeeper:2181' -- zk - IN varchar(255)
);
-insert into db_type (id, name, tool) values ('CB', 'Couchbase', B'0');
-insert into db_type (id, name, tool) values ('ES', 'Elasticsearch', B'0');
-insert into db_type (id, name, tool,default_port) values ('MONGO', 'MongoDB', B'0', 27017);
-insert into db_type (id, name, tool) values ('DRUID', 'Druid', B'0');
-insert into db_type (id, name, tool) values ('HDFS', 'HDFS', B'0');
-insert into db_type (id, name, tool) values ('KIBANA', 'Kibana', B'1');
-insert into db_type (id, name, tool) values ('SUPERSET', 'Apache Superset', B'1');
-insert into db (id, db_type_id, enabled, encrypt, name,host,login,pass,database_name) values (1, 'CB', B'1', B'1', 'Couchbase 1','dl-couchbase','dl','dl1234','datalake');
-insert into db (id, db_type_id, enabled, encrypt, name,host) values (2, 'ES', B'1', B'1', 'Elasticsearch','dl-es');
-insert into db (id, db_type_id, enabled, encrypt, name,host,port,database_name,presto_catalog) values (3, 'MONGO', B'1', B'1', 'MongoDB 1','dl-mongodb',27017,'datalake','mongodb');
-insert into db (id, db_type_id, enabled, encrypt, name,host) values (4, 'DRUID', B'1',B'1', 'Druid','dl-druid');
-insert into db (id, db_type_id, enabled, encrypt, name,host,login) values (5, 'HDFS', B'1', B'1', 'Hadoop Cluster','dl-hdfs','dl');
-insert into db (id, db_type_id, enabled, encrypt, name,host) values (6, 'KIBANA', B'1', B'0', 'Kibana demo','dl-es');
-insert into db (id, db_type_id, enabled, encrypt, name,host) values (7, 'SUPERSET', B'1', B'0', 'Superset demo','dl-druid');
+insert into db_type (id, name, tool) values ('CB', 'Couchbase', false);
+insert into db_type (id, name, tool) values ('ES', 'Elasticsearch', false);
+insert into db_type (id, name, tool,default_port) values ('MONGO', 'MongoDB', false, 27017);
+insert into db_type (id, name, tool) values ('DRUID', 'Druid', false);
+insert into db_type (id, name, tool) values ('HDFS', 'HDFS', false);
+insert into db_type (id, name, tool) values ('KIBANA', 'Kibana', true);
+insert into db_type (id, name, tool) values ('SUPERSET', 'Apache Superset', true);
+insert into db (id, db_type_id, enabled, encrypt, name,host,login,pass,database_name) values (1, 'CB', true, true, 'Couchbase 1','dl-couchbase','dl','dl1234','datalake');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (2, 'ES', true, true, 'Elasticsearch','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, name,host,port,database_name,presto_catalog) values (3, 'MONGO', true, true, 'MongoDB 1','dl-mongodb',27017,'datalake','mongodb');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (4, 'DRUID', true,true, 'Druid','dl-druid');
+insert into db (id, db_type_id, enabled, encrypt, name,host,login) values (5, 'HDFS', true, true, 'Hadoop Cluster','dl-hdfs','dl');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (6, 'KIBANA', true, false, 'Kibana demo','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (7, 'SUPERSET', true, false, 'Superset demo','dl-druid');
insert into topic_name (id) values ('_DL_DEFAULT_');
insert into topic_name (id) values ('unauthenticated.SEC_FAULT_OUTPUT');
insert into topic_name (id) values ('unauthenticated.VES_MEASUREMENT_OUTPUT');
insert into topic_name (id) values ('EPC');
insert into topic_name (id) values ('HW');
-- in production, default enabled should be off
-insert into topic(id, topic_name_id,enabled,save_raw,ttl_day,data_format) values (1, '_DL_DEFAULT_',B'1',B'0',3650,'JSON');
+insert into topic(id, topic_name_id,enabled,save_raw,ttl_day,data_format) values (1, '_DL_DEFAULT_',true,false,3650,'JSON');
insert into topic(id, topic_name_id,correlate_cleared_message,enabled, message_id_path,data_format)
-values (2, 'unauthenticated.SEC_FAULT_OUTPUT',B'1',B'1','/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON');
+values (2, 'unauthenticated.SEC_FAULT_OUTPUT',true,true,'/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON');
insert into topic(id, topic_name_id,enabled, aggregate_array_path,flatten_array_path,data_format)
-values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',B'1',
+values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',true,
'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
'/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface',
'JSON');
insert into topic(id, topic_name_id,enabled, flatten_array_path,data_format)
-values (4, 'EPC',B'1', '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON');
+values (4, 'EPC',true, '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON');
insert into topic(id, topic_name_id,enabled, aggregate_array_path,data_format)
-values (5, 'HW',B'1',
+values (5, 'HW',true,
'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
'JSON');
-insert into map_db_topic(db_id,topic_id) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=B'0';
+insert into map_db_topic(db_id,topic_id) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=false;
insert into map_kafka_topic(kafka_id,topic_id) select kafka.id, topic.id from kafka, topic;
insert into design_type (id, name, db_type_id) values ('KIBANA_DB', 'Kibana Dashboard', 'KIBANA');
insert into design_type (id, name, db_type_id) values ('KIBANA_SEARCH', 'Kibana Search', 'KIBANA');
insert into design_type (id, name, db_type_id) values ('KIBANA_VISUAL', 'Kibana Visualization', 'KIBANA');
insert into design_type (id, name, db_type_id) values ('ES_MAPPING', 'Elasticsearch Field Mapping Template', 'ES');
insert into design_type (id, name, db_type_id) values ('DRUID_KAFKA_SPEC', 'Druid Kafka Indexing Service Supervisor Spec', 'DRUID');
-insert into design (id, name,topic_name_id, submitted,body, design_type_id) values (1, 'Kibana Dashboard on EPC test1', 'EPC', B'0', 'body here', 'KIBANA_DB');
+insert into design (id, name,topic_name_id, submitted,body, design_type_id) values (1, 'Kibana Dashboard on EPC test1', 'EPC',false, 'body here', 'KIBANA_DB');
insert into map_db_design (design_id,db_id ) values (1, 6);
insert into data_exposure(id,note,sql_template,db_id) values ('totalBandwidth','KPI bandwidth history','select from_unixtime(commonEventHeader.lastEpochMicrosec/1000) as timeStamp, sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as bandwidth from upf where commonEventHeader.sourceId = ''${id}'' and ( from_unixtime(commonEventHeader.lastEpochMicrosec/1000) between from_iso8601_timestamp( ''${timeStamp}'') - interval ''${hour}'' hour and from_iso8601_timestamp( ''${timeStamp}'') ) group by commonEventHeader.lastEpochMicrosec order by commonEventHeader.lastEpochMicrosec desc ',3);
insert into data_exposure(id,note,sql_template,db_id) values ('totalTraffic','KPI sum over history','select commonEventHeader.sourceId as id, sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as totalTraffic from upf where commonEventHeader.sourceId = ''${id}'' and from_unixtime(commonEventHeader.lastEpochMicrosec/1000) <= from_iso8601_timestamp( ''${timeStamp}'') ',3);
diff --git a/components/datalake-handler/des/src/assembly/run.sh b/components/datalake-handler/des/src/assembly/run.sh
index e6df159d..c8014d2a 100644
--- a/components/datalake-handler/des/src/assembly/run.sh
+++ b/components/datalake-handler/des/src/assembly/run.sh
@@ -28,7 +28,7 @@ sh db_init/20_db-initdb
echo "finish init db"
-cmd=`find . -name des*-execute.jar`
+cmd=`find . -name "des*-execute.jar"`
if [ -n "$cmd" ]; then
java -jar "$cmd"
else
diff --git a/components/datalake-handler/feeder/Dockerfile b/components/datalake-handler/feeder/Dockerfile
index 1a853986..769d21b8 100644
--- a/components/datalake-handler/feeder/Dockerfile
+++ b/components/datalake-handler/feeder/Dockerfile
@@ -1,33 +1,31 @@
-FROM openjdk:11-jre-slim
+FROM nexus3.onap.org:10001/onap/integration-java11:8.0.0
-MAINTAINER Guobiao Mo <guobiaomo@chinamobile.com>
+LABEL maintainer="Guobiao Mo <guobiaomo@chinamobile.com>"
-EXPOSE 1680
-
-RUN groupadd -r datalake && useradd -r -g datalake datalake
-RUN mkdir /home/datalake
+ARG user=datalake
+ARG group=datalake
-USER datalake
-
-WORKDIR /home/datalake
-
-#add the fat jar
-COPY target/${JAR_FILE} /home/datalake/
-COPY src/assembly/run.sh /home/datalake/
+EXPOSE 1680
-WORKDIR /home/datalake/db_init
-ADD src/assembly/init_db/scripts/db_init .
USER root
-RUN chmod 0755 ./*
-WORKDIR /home/datalake
-COPY src/assembly/init_db/db_scripts/init_db.sql .
-COPY src/assembly/init_db/db_scripts/init_db_data.sql .
-
-RUN apt update && \
- apt install -y mariadb-client && \
- apt install -y curl
-USER datalake
-
-CMD ["sh", "run.sh"]
+WORKDIR /datalake
+RUN addgroup $group && adduser --system --shell /bin/false --disabled-password --no-create-home --ingroup $group $user && \
+ chown -R $user:$group /datalake && \
+ chmod g+s /datalake && \
+ mkdir -p /datalake/db_init
+#add the fat jar
+COPY --chown=$user:$group target/${JAR_FILE} .
+COPY --chown=$user:$group src/assembly/run.sh .
+COPY --chown=$user:$group src/assembly/init_db/scripts/db_init ./db_init/
+COPY --chown=$user:$group src/assembly/init_db/db_scripts/init_db.sql .
+COPY --chown=$user:$group src/assembly/init_db/db_scripts/init_db_data.sql .
+
+RUN chmod -R 0755 *.sql && \
+ chmod u+x run.sh && \
+ apk add --no-cache postgresql-client curl
+
+USER $user
+ENTRYPOINT ["/bin/sh"]
+CMD ["run.sh"]
diff --git a/components/datalake-handler/feeder/pom.xml b/components/datalake-handler/feeder/pom.xml
index 2a6ad308..49ad6094 100644
--- a/components/datalake-handler/feeder/pom.xml
+++ b/components/datalake-handler/feeder/pom.xml
@@ -42,11 +42,11 @@
<version>${hadoop.version}</version>
</dependency>
- <dependency>
- <groupId>org.mariadb.jdbc</groupId>
- <artifactId>mariadb-java-client</artifactId>
- <version>2.4.1</version>
- </dependency>
+ <dependency>
+ <groupId>org.postgresql</groupId>
+ <artifactId>postgresql</artifactId>
+ <version>42.2.18</version>
+ </dependency>
<dependency>
<groupId>org.json</groupId>
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db.sql b/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db.sql
index b363988d..30124fa1 100644
--- a/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db.sql
+++ b/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db.sql
@@ -3,6 +3,7 @@
* ONAP : DATALAKE
* ================================================================================
* Copyright 2019-2020 China Mobile
+* Copyright (C) 2021 Wipro Limited
*=================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,125 +19,138 @@
* ============LICENSE_END=========================================================
*/
-create database if not exists datalake;
-use datalake;
-
-CREATE TABLE `topic_name` (
- `id` varchar(255) NOT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `db_type` (
- `id` varchar(255) NOT NULL,
- `default_port` int(11) DEFAULT NULL,
- `name` varchar(255) NOT NULL,
- `tool` bit(1) NOT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `db` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `database_name` varchar(255) DEFAULT NULL,
- `enabled` bit(1) NOT NULL,
- `encrypt` bit(1) DEFAULT NULL,
- `host` varchar(255) DEFAULT NULL,
- `login` varchar(255) DEFAULT NULL,
- `name` varchar(255) DEFAULT NULL,
- `pass` varchar(255) DEFAULT NULL,
- `port` int(11) DEFAULT NULL,
- `property1` varchar(255) DEFAULT NULL,
- `property2` varchar(255) DEFAULT NULL,
- `property3` varchar(255) DEFAULT NULL,
- `db_type_id` varchar(255) NOT NULL,
- PRIMARY KEY (`id`),
- KEY `FK3njadtw43ieph7ftt4kxdhcko` (`db_type_id`),
- CONSTRAINT `FK3njadtw43ieph7ftt4kxdhcko` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `design_type` (
- `id` varchar(255) NOT NULL,
- `name` varchar(255) DEFAULT NULL,
- `note` varchar(255) DEFAULT NULL,
- `db_type_id` varchar(255) NOT NULL,
- PRIMARY KEY (`id`),
- KEY `FKm8rkv2qkq01gsmeq1c3y4w02x` (`db_type_id`),
- CONSTRAINT `FKm8rkv2qkq01gsmeq1c3y4w02x` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `design` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `body` text DEFAULT NULL,
- `name` varchar(255) DEFAULT NULL,
- `note` varchar(255) DEFAULT NULL,
- `submitted` bit(1) DEFAULT NULL,
- `design_type_id` varchar(255) NOT NULL,
- `topic_name_id` varchar(255) NOT NULL,
- PRIMARY KEY (`id`),
- KEY `FKo43yi6aputq6kwqqu8eqbspm5` (`design_type_id`),
- KEY `FKabb8e74230glxpaiai4aqsr34` (`topic_name_id`),
- CONSTRAINT `FKabb8e74230glxpaiai4aqsr34` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`),
- CONSTRAINT `FKo43yi6aputq6kwqqu8eqbspm5` FOREIGN KEY (`design_type_id`) REFERENCES `design_type` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `kafka` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `broker_list` varchar(255) NOT NULL,
- `consumer_count` int(11) DEFAULT 3,
- `enabled` bit(1) NOT NULL,
- `excluded_topic` varchar(1023) DEFAULT '__consumer_offsets,__transaction_state',
- `group` varchar(255) DEFAULT 'datalake',
- `included_topic` varchar(255) DEFAULT NULL,
- `login` varchar(255) DEFAULT NULL,
- `name` varchar(255) NOT NULL,
- `pass` varchar(255) DEFAULT NULL,
- `secure` bit(1) DEFAULT b'0',
- `security_protocol` varchar(255) DEFAULT NULL,
- `timeout_sec` int(11) DEFAULT 10,
- `zk` varchar(255) NOT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `topic` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `aggregate_array_path` varchar(255) DEFAULT NULL,
- `correlate_cleared_message` bit(1) NOT NULL DEFAULT b'0',
- `data_format` varchar(255) DEFAULT NULL,
- `enabled` bit(1) NOT NULL,
- `flatten_array_path` varchar(255) DEFAULT NULL,
- `login` varchar(255) DEFAULT NULL,
- `message_id_path` varchar(255) DEFAULT NULL,
- `pass` varchar(255) DEFAULT NULL,
- `save_raw` bit(1) NOT NULL DEFAULT b'0',
- `ttl_day` int(11) DEFAULT NULL,
- `topic_name_id` varchar(255) NOT NULL,
- PRIMARY KEY (`id`),
- KEY `FKj3pldlfaokdhqjfva8n3pkjca` (`topic_name_id`),
- CONSTRAINT `FKj3pldlfaokdhqjfva8n3pkjca` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `map_db_design` (
- `design_id` int(11) NOT NULL,
- `db_id` int(11) NOT NULL,
- PRIMARY KEY (`design_id`,`db_id`),
- KEY `FKhpn49r94k05mancjtn301m2p0` (`db_id`),
- CONSTRAINT `FKfli240v96cfjbnmjqc0fvvd57` FOREIGN KEY (`design_id`) REFERENCES `design` (`id`),
- CONSTRAINT `FKhpn49r94k05mancjtn301m2p0` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `map_db_topic` (
- `topic_id` int(11) NOT NULL,
- `db_id` int(11) NOT NULL,
- PRIMARY KEY (`db_id`,`topic_id`),
- KEY `FKq1jon185jnrr7dv1dd8214uw0` (`topic_id`),
- CONSTRAINT `FKirro29ojp7jmtqx9m1qxwixcc` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`),
- CONSTRAINT `FKq1jon185jnrr7dv1dd8214uw0` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-CREATE TABLE `map_kafka_topic` (
- `kafka_id` int(11) NOT NULL,
- `topic_id` int(11) NOT NULL,
- PRIMARY KEY (`topic_id`,`kafka_id`),
- KEY `FKtdrme4h7rxfh04u2i2wqu23g5` (`kafka_id`),
- CONSTRAINT `FK5q7jdxy54au5rcrhwa4a5igqi` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`),
- CONSTRAINT `FKtdrme4h7rxfh04u2i2wqu23g5` FOREIGN KEY (`kafka_id`) REFERENCES `kafka` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+CREATE TABLE topic_name (
+ id varchar(255) NOT NULL,
+ PRIMARY KEY (id)
+);
+
+CREATE TABLE db_type (
+ id varchar(255) NOT NULL,
+ default_port int DEFAULT NULL,
+ name varchar(255) NOT NULL,
+ tool boolean NOT NULL,
+ PRIMARY KEY (id)
+);
+
+CREATE SEQUENCE db_seq;
+
+CREATE TABLE db (
+ id int NOT NULL DEFAULT NEXTVAL ('db_seq'),
+ database_name varchar(255) DEFAULT NULL,
+ enabled boolean NOT NULL,
+ encrypt boolean DEFAULT NULL,
+ host varchar(255) DEFAULT NULL,
+ login varchar(255) DEFAULT NULL,
+ name varchar(255) DEFAULT NULL,
+ pass varchar(255) DEFAULT NULL,
+ port int DEFAULT NULL,
+ property1 varchar(255) DEFAULT NULL,
+ property2 varchar(255) DEFAULT NULL,
+ property3 varchar(255) DEFAULT NULL,
+ db_type_id varchar(255) NOT NULL,
+ PRIMARY KEY (id),
+ CONSTRAINT FK3njadtw43ieph7ftt4kxdhcko FOREIGN KEY (db_type_id) REFERENCES db_type (id)
+);
+
+CREATE INDEX FK3njadtw43ieph7ftt4kxdhcko ON db (db_type_id);
+
+CREATE TABLE design_type (
+ id varchar(255) NOT NULL,
+ name varchar(255) DEFAULT NULL,
+ note varchar(255) DEFAULT NULL,
+ db_type_id varchar(255) NOT NULL,
+ PRIMARY KEY (id),
+ CONSTRAINT FKm8rkv2qkq01gsmeq1c3y4w02x FOREIGN KEY (db_type_id) REFERENCES db_type (id)
+);
+
+CREATE INDEX FKm8rkv2qkq01gsmeq1c3y4w02x ON design_type (db_type_id);
+
+CREATE SEQUENCE design_seq;
+
+CREATE TABLE design (
+ id int NOT NULL DEFAULT NEXTVAL ('design_seq'),
+ body text DEFAULT NULL,
+ name varchar(255) DEFAULT NULL,
+ note varchar(255) DEFAULT NULL,
+ submitted boolean DEFAULT NULL,
+ design_type_id varchar(255) NOT NULL,
+ topic_name_id varchar(255) NOT NULL,
+ PRIMARY KEY (id),
+ CONSTRAINT FKabb8e74230glxpaiai4aqsr34 FOREIGN KEY (topic_name_id) REFERENCES topic_name (id),
+ CONSTRAINT FKo43yi6aputq6kwqqu8eqbspm5 FOREIGN KEY (design_type_id) REFERENCES design_type (id)
+);
+
+CREATE INDEX FKo43yi6aputq6kwqqu8eqbspm5 ON design (design_type_id);
+CREATE INDEX FKabb8e74230glxpaiai4aqsr34 ON design (topic_name_id);
+
+CREATE SEQUENCE kafka_seq;
+
+CREATE TABLE kafka (
+ id int NOT NULL DEFAULT NEXTVAL ('kafka_seq'),
+ broker_list varchar(255) NOT NULL,
+ consumer_count int DEFAULT 3,
+ enabled boolean NOT NULL,
+ excluded_topic varchar(1023) DEFAULT '__consumer_offsets,__transaction_state',
+ "group" varchar(255) DEFAULT 'datalake',
+ included_topic varchar(255) DEFAULT NULL,
+ login varchar(255) DEFAULT NULL,
+ name varchar(255) NOT NULL,
+ pass varchar(255) DEFAULT NULL,
+ secure boolean DEFAULT FALSE,
+ security_protocol varchar(255) DEFAULT NULL,
+ timeout_sec int DEFAULT 10,
+ zk varchar(255) NOT NULL,
+ PRIMARY KEY (id)
+);
+
+CREATE SEQUENCE topic_seq;
+
+CREATE TABLE topic (
+ id int NOT NULL DEFAULT NEXTVAL ('topic_seq'),
+ aggregate_array_path varchar(255) DEFAULT NULL,
+ correlate_cleared_message boolean NOT NULL DEFAULT FALSE,
+ data_format varchar(255) DEFAULT NULL,
+ enabled boolean NOT NULL,
+ flatten_array_path varchar(255) DEFAULT NULL,
+ login varchar(255) DEFAULT NULL,
+ message_id_path varchar(255) DEFAULT NULL,
+ pass varchar(255) DEFAULT NULL,
+ save_raw boolean NOT NULL DEFAULT FALSE,
+ ttl_day int DEFAULT NULL,
+ topic_name_id varchar(255) NOT NULL,
+ PRIMARY KEY (id),
+ CONSTRAINT FKj3pldlfaokdhqjfva8n3pkjca FOREIGN KEY (topic_name_id) REFERENCES topic_name (id)
+);
+
+CREATE INDEX FKj3pldlfaokdhqjfva8n3pkjca ON topic (topic_name_id);
+
+CREATE TABLE map_db_design (
+ design_id int NOT NULL,
+ db_id int NOT NULL,
+ PRIMARY KEY (design_id,db_id),
+ CONSTRAINT FKfli240v96cfjbnmjqc0fvvd57 FOREIGN KEY (design_id) REFERENCES design (id),
+ CONSTRAINT FKhpn49r94k05mancjtn301m2p0 FOREIGN KEY (db_id) REFERENCES db (id)
+);
+
+CREATE INDEX FKhpn49r94k05mancjtn301m2p0 ON map_db_design (db_id);
+
+CREATE TABLE map_db_topic (
+ topic_id int NOT NULL,
+ db_id int NOT NULL,
+ PRIMARY KEY (db_id,topic_id),
+ CONSTRAINT FKirro29ojp7jmtqx9m1qxwixcc FOREIGN KEY (db_id) REFERENCES db (id),
+ CONSTRAINT FKq1jon185jnrr7dv1dd8214uw0 FOREIGN KEY (topic_id) REFERENCES topic (id)
+);
+
+CREATE INDEX FKq1jon185jnrr7dv1dd8214uw0 ON map_db_topic (topic_id);
+
+CREATE TABLE map_kafka_topic (
+ kafka_id int NOT NULL,
+ topic_id int NOT NULL,
+ PRIMARY KEY (topic_id,kafka_id),
+ CONSTRAINT FK5q7jdxy54au5rcrhwa4a5igqi FOREIGN KEY (topic_id) REFERENCES topic (id),
+ CONSTRAINT FKtdrme4h7rxfh04u2i2wqu23g5 FOREIGN KEY (kafka_id) REFERENCES kafka (id)
+);
+
+CREATE INDEX FKtdrme4h7rxfh04u2i2wqu23g5 ON map_kafka_topic (kafka_id);
+
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db_data.sql b/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db_data.sql
index bc377348..498230a9 100644
--- a/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db_data.sql
+++ b/components/datalake-handler/feeder/src/assembly/init_db/db_scripts/init_db_data.sql
@@ -3,6 +3,7 @@
* ONAP : DATALAKE
* ================================================================================
* Copyright 2019 China Mobile
+* Copyright (C) 2021 Wipro Limited
*=================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,94 +19,90 @@
* ============LICENSE_END=========================================================
*/
-use datalake;
-
INSERT INTO kafka(
- `id`
- ,`name`
- ,`consumer_count`
- ,`enabled`
- ,`group`
- ,`broker_list`
- ,`included_topic`
- ,`login`
- ,`pass`
- ,`secure`
- ,`security_protocol`
- ,`timeout_sec`
- ,`zk`
+ id
+ ,name
+ ,consumer_count
+ ,enabled
+ ,"group"
+ ,broker_list
+ ,included_topic
+ ,login
+ ,pass
+ ,secure
+ ,security_protocol
+ ,timeout_sec
+ ,zk
) VALUES (
1
- ,'main Kafka cluster' -- name - IN varchar(255)
+ ,'main Kafka cluster' -- name - IN varchar (255)
,3 -- consumer_count - IN int(11)
- ,1 -- enabled - IN bit(1)
+ ,true -- enabled - IN bit(1)
,'dlgroup' -- group - IN varchar(255)
,'message-router-kafka:9092' -- host_port - IN varchar(255)
,'' -- included_topic - IN varchar(255)
,'admin' -- login - IN varchar(255)
,'admin_secret' -- pass - IN varchar(255)
- ,0 -- secure - IN bit(1)
+ ,false -- secure - IN bit(1)
,'SASL_PLAINTEXT' -- security_protocol - IN varchar(255)
,10 -- timeout_sec - IN int(11)
,'message-router-zookeeper:2181' -- zk - IN varchar(255)
);
-use datalake;
-
-insert into db_type (`id`, `name`, tool) values ('CB', 'Couchbase', false);
-insert into db_type (`id`, `name`, tool) values ('ES', 'Elasticsearch', false);
-insert into db_type (`id`, `name`, tool,`default_port`) values ('MONGO', 'MongoDB', false, 27017);
-insert into db_type (`id`, `name`, tool) values ('DRUID', 'Druid', false);
-insert into db_type (`id`, `name`, tool) values ('HDFS', 'HDFS', false);
-insert into db_type (`id`, `name`, tool) values ('KIBANA', 'Kibana', true);
-insert into db_type (`id`, `name`, tool) values ('SUPERSET', 'Apache Superset', true);
-
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`,`login`,`pass`,`database_name`) values (1, 'CB', true, true, 'Couchbase 1','dl-couchbase','dl','dl1234','datalake');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`) values (2, 'ES', true, true, 'Elasticsearch','dl-es');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`,`port`,`database_name`) values (3, 'MONGO', true, true, 'MongoDB 1','dl-mongodb',27017,'datalake');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`) values (4, 'DRUID', true, true, 'Druid','dl-druid');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`,`login`) values (5, 'HDFS', true, true, 'Hadoop Cluster','dl-hdfs','dl');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`) values (6, 'KIBANA', true, false, 'Kibana demo','dl-es');
-insert into db (`id`, `db_type_id`, `enabled`, `encrypt`, `name`,`host`) values (7, 'SUPERSET', true, false, 'Superset demo','dl-druid');
-
-insert into topic_name (`id`) values ('_DL_DEFAULT_');
-insert into topic_name (`id`) values ('unauthenticated.SEC_FAULT_OUTPUT');
-insert into topic_name (`id`) values ('unauthenticated.VES_MEASUREMENT_OUTPUT');
-insert into topic_name (`id`) values ('EPC');
-insert into topic_name (`id`) values ('HW');
+insert into db_type (id, name, tool) values ('CB', 'Couchbase', false);
+insert into db_type (id, name, tool) values ('ES', 'Elasticsearch', false);
+insert into db_type (id, name, tool,default_port) values ('MONGO', 'MongoDB', false, 27017);
+insert into db_type (id, name, tool) values ('DRUID', 'Druid', false);
+insert into db_type (id, name, tool) values ('HDFS', 'HDFS', false);
+insert into db_type (id, name, tool) values ('KIBANA', 'Kibana', true);
+insert into db_type (id, name, tool) values ('SUPERSET', 'Apache Superset', true);
+
+insert into db (id, db_type_id, enabled, encrypt, name,host,login,pass,database_name) values (1, 'CB', true, true, 'Couchbase 1','dl-couchbase','dl','dl1234','datalake');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (2, 'ES', true, true, 'Elasticsearch','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, name,host,port,database_name) values (3, 'MONGO', true, true, 'MongoDB 1','dl-mongodb',27017,'datalake');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (4, 'DRUID', true, true, 'Druid','dl-druid');
+insert into db (id, db_type_id, enabled, encrypt, name,host,login) values (5, 'HDFS', true, true, 'Hadoop Cluster','dl-hdfs','dl');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (6, 'KIBANA', true, false, 'Kibana demo','dl-es');
+insert into db (id, db_type_id, enabled, encrypt, name,host) values (7, 'SUPERSET', true, false, 'Superset demo','dl-druid');
+
+insert into topic_name (id) values ('_DL_DEFAULT_');
+insert into topic_name (id) values ('unauthenticated.SEC_FAULT_OUTPUT');
+insert into topic_name (id) values ('unauthenticated.VES_MEASUREMENT_OUTPUT');
+insert into topic_name (id) values ('EPC');
+insert into topic_name (id) values ('HW');
-- in production, default enabled should be off
-insert into `topic`(`id`, `topic_name_id`,`enabled`,`save_raw`,`ttl_day`,`data_format`) values (1, '_DL_DEFAULT_',1,0,3650,'JSON');
+insert into topic(id, topic_name_id,enabled,save_raw,ttl_day,data_format) values (1, '_DL_DEFAULT_',true,false,3650,'JSON');
-insert into `topic`(`id`, `topic_name_id`,`correlate_cleared_message`,`enabled`, `message_id_path`,`data_format`)
-values (2, 'unauthenticated.SEC_FAULT_OUTPUT',1,1,'/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON');
+insert into topic(id, topic_name_id,correlate_cleared_message,enabled, message_id_path,data_format)
+values (2, 'unauthenticated.SEC_FAULT_OUTPUT',true,true,'/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON');
-insert into `topic`(id, `topic_name_id`,`enabled`, `aggregate_array_path`,`flatten_array_path`,`data_format`)
-values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',1,
+insert into topic(id, topic_name_id,enabled, aggregate_array_path,flatten_array_path,data_format)
+values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',true,
'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
'/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface',
'JSON');
-insert into `topic`(`id`, `topic_name_id`,`enabled`, `flatten_array_path`,`data_format`)
-values (4, 'EPC',1, '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON');
+insert into topic(id, topic_name_id,enabled, flatten_array_path,data_format)
+values (4, 'EPC',true, '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON');
-insert into `topic`(`id`, `topic_name_id`,`enabled`, `aggregate_array_path`,`data_format`)
-values (5, 'HW',1,
+insert into topic(id, topic_name_id,enabled, aggregate_array_path,data_format)
+values (5, 'HW',true,
'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray',
-'JSON');
+'JSON');
-insert into `map_db_topic`(`db_id`,`topic_id`) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=0;
-insert into `map_kafka_topic`(`kafka_id`,`topic_id`) select kafka.id, topic.id from kafka, topic;
+insert into map_db_topic(db_id,topic_id) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=false;
+insert into map_kafka_topic(kafka_id,topic_id) select kafka.id, topic.id from kafka, topic;
-insert into design_type (`id`, `name`, `db_type_id`) values ('KIBANA_DB', 'Kibana Dashboard', 'KIBANA');
-insert into design_type (`id`, `name`, `db_type_id`) values ('KIBANA_SEARCH', 'Kibana Search', 'KIBANA');
-insert into design_type (`id`, `name`, `db_type_id`) values ('KIBANA_VISUAL', 'Kibana Visualization', 'KIBANA');
-insert into design_type (`id`, `name`, `db_type_id`) values ('ES_MAPPING', 'Elasticsearch Field Mapping Template', 'ES');
-insert into design_type (`id`, `name`, `db_type_id`) values ('DRUID_KAFKA_SPEC', 'Druid Kafka Indexing Service Supervisor Spec', 'DRUID');
+insert into design_type (id, name, db_type_id) values ('KIBANA_DB', 'Kibana Dashboard', 'KIBANA');
+insert into design_type (id, name, db_type_id) values ('KIBANA_SEARCH', 'Kibana Search', 'KIBANA');
+insert into design_type (id, name, db_type_id) values ('KIBANA_VISUAL', 'Kibana Visualization', 'KIBANA');
+insert into design_type (id, name, db_type_id) values ('ES_MAPPING', 'Elasticsearch Field Mapping Template', 'ES');
+insert into design_type (id, name, db_type_id) values ('DRUID_KAFKA_SPEC', 'Druid Kafka Indexing Service Supervisor Spec', 'DRUID');
-insert into design (`id`, `name`,`topic_name_id`, `submitted`,`body`, `design_type_id`) values (1, 'Kibana Dashboard on EPC test1', 'EPC', 0, 'body here', 'KIBANA_DB');
+insert into design (id, name,topic_name_id, submitted,body, design_type_id) values (1, 'Kibana Dashboard on EPC test1', 'EPC', false, 'body here', 'KIBANA_DB');
-insert into map_db_design (`design_id`,`db_id` ) values (1, 6);
+insert into map_db_design (design_id,db_id ) values (1, 6);
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/10_check-db-exist b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/10_check-db-exist
deleted file mode 100644
index 4b2130a3..00000000
--- a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/10_check-db-exist
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START==========================================
-# ===================================================================
-# Copyright © 2020 QCT Property. All rights reserved.
-# ===================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-#
-
-result=`mysql -uroot -p$MYSQL_ROOT_PASSWORD -h $MYSQL_HOST -P $MYSQL_PORT -e "USE 'datalake'"`
-
-if [ $? == 0 ] ;
-then
- echo "DATABASE ALREADY EXISTS"
- touch /tmp/db_exist
- exit 1
-else
- echo "DATABASE DOES NOT EXIST"
- exit 0
-fi
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/11_create-database b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/11_create-database
index a43b0cf2..f69f1ab1 100644
--- a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/11_create-database
+++ b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/11_create-database
@@ -2,7 +2,8 @@
#
# ============LICENSE_START==========================================
# ===================================================================
-# Copyright © 2020 QCT Property. All rights reserved.
+# Copyright © 2020 China Mobile. All rights reserved.
+# Copyright (C) 2021 Wipro Limited.
# ===================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -18,8 +19,4 @@
# ============LICENSE_END============================================
#
-if [ -f "/tmp/db_exist" ]; then
- exit 0
-fi
-
-cat /home/datalake/init_db.sql | mysql -uroot -p$MYSQL_ROOT_PASSWORD -h $MYSQL_HOST -P $MYSQL_PORT || exit 1
+psql -h $PG_HOST -U $PG_USER -d $PG_DB -f /home/datalake/init_db.sql
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/15_db-adduser b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/15_db-adduser
deleted file mode 100644
index c691751c..00000000
--- a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/15_db-adduser
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START==========================================
-# ===================================================================
-# Copyright © 2020 QCT Property. All rights reserved.
-# ===================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-#
-
-if [ -f "/tmp/db_exist" ]; then
- exit 0
-fi
-
-echo "Creating datalake user" 1>/tmp/mariadb-datalake-user.log 2>&1
-
-mysql -uroot -p$MYSQL_ROOT_PASSWORD -p$MYSQL_ROOT_PASSWORD -h $MYSQL_HOST -P $MYSQL_PORT << 'EOF' || exit 1
-CREATE USER IF NOT EXISTS 'dl';
-GRANT USAGE ON *.* TO 'dl'@'%' IDENTIFIED BY 'dl1234';
-GRANT SELECT, INSERT, UPDATE, DELETE, EXECUTE, SHOW VIEW ON `datalake`.* TO 'dl'@'%';
-FLUSH PRIVILEGES;
-EOF
-
-echo "Created so user . . ." 1>>/tmp/mariadb-datalake-user.log 2>&1
diff --git a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/20_db-initdb b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/20_db-initdb
index ad8f6f89..32986cda 100644
--- a/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/20_db-initdb
+++ b/components/datalake-handler/feeder/src/assembly/init_db/scripts/db_init/20_db-initdb
@@ -2,7 +2,8 @@
#
# ============LICENSE_START==========================================
# ===================================================================
-# Copyright © 2020 QCT Property. All rights reserved.
+# Copyright © 2020 China Mobile. All rights reserved.
+# Copyright (C) 2021 Wipro Limited.
# ===================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -18,8 +19,4 @@
# ============LICENSE_END============================================
#
-if [ -f "/tmp/db_exist" ]; then
- exit 0
-fi
-
-cat /home/datalake/init_db_data.sql | mysql -uroot -p$MYSQL_ROOT_PASSWORD -h $MYSQL_HOST -P $MYSQL_PORT || exit 1
+psql -h $PG_HOST -U $PG_USER -d $PG_DB -f /home/datalake/init_db_data.sql
diff --git a/components/datalake-handler/feeder/src/assembly/run.sh b/components/datalake-handler/feeder/src/assembly/run.sh
index 68e876e4..cb89a1de 100644
--- a/components/datalake-handler/feeder/src/assembly/run.sh
+++ b/components/datalake-handler/feeder/src/assembly/run.sh
@@ -2,14 +2,21 @@
echo "start init db ..."
-/bin/run-parts /home/datalake/db_init
+export PGPASSWORD=$PG_PASSWORD
+
+sh db_init/11_create-database
+sh db_init/20_db-initdb
echo "finish init db"
-cmd=`find . -regex '\./feeder-[0-9]+\.[0-9]+\.[0-9]+[-SNAPSHOT]+\-exec.jar'`
-cmd1=`find . -regex '\./feeder-[0-9]+\.[0-9]+\.[0-9]+\-exec.jar'`
+cmd=`find . -name "*.jar" | grep -E '.*(feeder)-([0-9]+\.[0-9]+\.[0-9]+)(-SNAPSHOT)(-exec\.jar)$'`
+cmd1=`find . -name "*.jar" | grep -E '.*(feeder)-([0-9]+\.[0-9]+\.[0-9]+)(-exec.jar)$'`
+
if [ -n "$cmd" ]; then
java -jar $cmd
elif [ -n "$cmd1" ]; then
java -jar $cmd1
+else
+ echo "STRING is empty"
+ sleep 10000
fi
diff --git a/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/Application.java b/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/Application.java
index 83f56b1d..22d19192 100644
--- a/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/Application.java
+++ b/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/Application.java
@@ -3,6 +3,7 @@
* ONAP : DataLake
* ================================================================================
* Copyright 2019 China Mobile
+* Copyright (C) 2021 Wipro Limited.
*=================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,10 +21,13 @@
package org.onap.datalake.feeder;
+import javax.sql.DataSource;
+
import org.onap.datalake.feeder.service.PullService;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@@ -41,9 +45,19 @@ public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
-
+
@Bean
public CommandLineRunner commandLineRunner(PullService pullService) {
return args -> pullService.start();
}
+
+ @Bean
+ public DataSource dataSource() {
+
+ String url = "jdbc:postgresql://" + System.getenv("PG_HOST").trim() + ":" + System.getenv("PG_PORT").trim()
+ + "/datalake";
+ return DataSourceBuilder.create().url(url).username(System.getenv("PG_USER").trim())
+ .password(System.getenv("PG_PASSWORD").trim()).build();
+ }
}
+
diff --git a/components/datalake-handler/feeder/src/main/resources/application.properties b/components/datalake-handler/feeder/src/main/resources/application.properties
index bc24cdb2..0e5a81fd 100644
--- a/components/datalake-handler/feeder/src/main/resources/application.properties
+++ b/components/datalake-handler/feeder/src/main/resources/application.properties
@@ -19,11 +19,7 @@ defaultTopicName=_DL_DEFAULT_
spring.jpa.hibernate.ddl-auto=none
spring.jpa.show-sql=false
-#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
-spring.datasource.url=jdbc:mariadb://mariadb-galera:3306/datalake?autoReconnect=true&amp;useUnicode=true&amp;characterEncoding=UTF-8
-spring.datasource.username=dl
-spring.datasource.password=dl1234
-
+spring.datasource.driver-class-name=org.hibernate.dialect.PostgreSQL9Dialect
#####################DMaaP
dmaapZookeeperHostPort=message-router-zookeeper:2181