diff options
Diffstat (limited to 'components/datalake-handler/des/src/assembly')
3 files changed, 273 insertions, 0 deletions
diff --git a/components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql new file mode 100644 index 00000000..e71093aa --- /dev/null +++ b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db.sql @@ -0,0 +1,143 @@ + +/* +* ============LICENSE_START======================================================= +* ONAP : DATALAKE +* ================================================================================ +* Copyright 2020 China Mobile +*================================================================================= +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* ============LICENSE_END========================================================= +*/ +drop DATABASE datalake; +create database datalake; +use datalake; +CREATE TABLE `topic_name` ( + `id` varchar(255) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `db_type` ( + `id` varchar(255) NOT NULL, + `default_port` int(11) DEFAULT NULL, + `name` varchar(255) NOT NULL, + `tool` bit(1) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `db` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `database_name` varchar(255) DEFAULT NULL, + `enabled` bit(1) NOT NULL, + `encrypt` bit(1) DEFAULT NULL, + `host` varchar(255) DEFAULT NULL, + `login` varchar(255) DEFAULT NULL, + `name` varchar(255) DEFAULT NULL, + `pass` varchar(255) DEFAULT NULL, + `port` int(11) DEFAULT NULL, + `property1` varchar(255) DEFAULT NULL, + `property2` varchar(255) DEFAULT NULL, + `property3` varchar(255) DEFAULT NULL, + `db_type_id` varchar(255) NOT NULL, + `presto_catalog` varchar(255) DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `FK3njadtw43ieph7ftt4kxdhcko` (`db_type_id`), + CONSTRAINT `FK3njadtw43ieph7ftt4kxdhcko` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `design_type` ( + `id` varchar(255) NOT NULL, + `name` varchar(255) DEFAULT NULL, + `note` varchar(255) DEFAULT NULL, + `db_type_id` varchar(255) NOT NULL, + PRIMARY KEY (`id`), + KEY `FKm8rkv2qkq01gsmeq1c3y4w02x` (`db_type_id`), + CONSTRAINT `FKm8rkv2qkq01gsmeq1c3y4w02x` FOREIGN KEY (`db_type_id`) REFERENCES `db_type` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `design` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `body` text DEFAULT NULL, + `name` varchar(255) DEFAULT NULL, + `note` varchar(255) DEFAULT NULL, + `submitted` bit(1) DEFAULT NULL, + `design_type_id` varchar(255) NOT NULL, + `topic_name_id` varchar(255) NOT NULL, + PRIMARY KEY (`id`), + KEY `FKo43yi6aputq6kwqqu8eqbspm5` (`design_type_id`), + KEY `FKabb8e74230glxpaiai4aqsr34` (`topic_name_id`), + CONSTRAINT `FKabb8e74230glxpaiai4aqsr34` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`), + CONSTRAINT `FKo43yi6aputq6kwqqu8eqbspm5` FOREIGN KEY (`design_type_id`) REFERENCES `design_type` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `kafka` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `broker_list` varchar(255) NOT NULL, + `consumer_count` int(11) DEFAULT 3, + `enabled` bit(1) NOT NULL, + `excluded_topic` varchar(1023) DEFAULT '__consumer_offsets,__transaction_state', + `group` varchar(255) DEFAULT 'datalake', + `included_topic` varchar(255) DEFAULT NULL, + `login` varchar(255) DEFAULT NULL, + `name` varchar(255) NOT NULL, + `pass` varchar(255) DEFAULT NULL, + `secure` bit(1) DEFAULT b'0', + `security_protocol` varchar(255) DEFAULT NULL, + `timeout_sec` int(11) DEFAULT 10, + `zk` varchar(255) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `topic` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `aggregate_array_path` varchar(255) DEFAULT NULL, + `correlate_cleared_message` bit(1) NOT NULL DEFAULT b'0', + `data_format` varchar(255) DEFAULT NULL, + `enabled` bit(1) NOT NULL, + `flatten_array_path` varchar(255) DEFAULT NULL, + `login` varchar(255) DEFAULT NULL, + `message_id_path` varchar(255) DEFAULT NULL, + `pass` varchar(255) DEFAULT NULL, + `save_raw` bit(1) NOT NULL DEFAULT b'0', + `ttl_day` int(11) DEFAULT NULL, + `topic_name_id` varchar(255) NOT NULL, + PRIMARY KEY (`id`), + KEY `FKj3pldlfaokdhqjfva8n3pkjca` (`topic_name_id`), + CONSTRAINT `FKj3pldlfaokdhqjfva8n3pkjca` FOREIGN KEY (`topic_name_id`) REFERENCES `topic_name` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `map_db_design` ( + `design_id` int(11) NOT NULL, + `db_id` int(11) NOT NULL, + PRIMARY KEY (`design_id`,`db_id`), + KEY `FKhpn49r94k05mancjtn301m2p0` (`db_id`), + CONSTRAINT `FKfli240v96cfjbnmjqc0fvvd57` FOREIGN KEY (`design_id`) REFERENCES `design` (`id`), + CONSTRAINT `FKhpn49r94k05mancjtn301m2p0` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `map_db_topic` ( + `topic_id` int(11) NOT NULL, + `db_id` int(11) NOT NULL, + PRIMARY KEY (`db_id`,`topic_id`), + KEY `FKq1jon185jnrr7dv1dd8214uw0` (`topic_id`), + CONSTRAINT `FKirro29ojp7jmtqx9m1qxwixcc` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`), + CONSTRAINT `FKq1jon185jnrr7dv1dd8214uw0` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `map_kafka_topic` ( + `kafka_id` int(11) NOT NULL, + `topic_id` int(11) NOT NULL, + PRIMARY KEY (`topic_id`,`kafka_id`), + KEY `FKtdrme4h7rxfh04u2i2wqu23g5` (`kafka_id`), + CONSTRAINT `FK5q7jdxy54au5rcrhwa4a5igqi` FOREIGN KEY (`topic_id`) REFERENCES `topic` (`id`), + CONSTRAINT `FKtdrme4h7rxfh04u2i2wqu23g5` FOREIGN KEY (`kafka_id`) REFERENCES `kafka` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; +CREATE TABLE `data_exposure` ( + `id` varchar(255) NOT NULL, + `note` varchar(255) DEFAULT NULL, + `sql_template` varchar(10000) NOT NULL, + `db_id` int(11) NOT NULL, + PRIMARY KEY (`id`), + KEY `FKf5ps4jxauwawk4ac86t5t6xev` (`db_id`), + CONSTRAINT `FKf5ps4jxauwawk4ac86t5t6xev` FOREIGN KEY (`db_id`) REFERENCES `db` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; diff --git a/components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql new file mode 100644 index 00000000..234351fb --- /dev/null +++ b/components/datalake-handler/des/src/assembly/init_db/scripts/init_db_data.sql @@ -0,0 +1,95 @@ + +/* +* ============LICENSE_START======================================================= +* ONAP : DATALAKE +* ================================================================================ +* Copyright 2020 China Mobile +*================================================================================= +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* ============LICENSE_END========================================================= +*/ +INSERT INTO datalake.kafka( + id + ,name + ,consumer_count + ,enabled + ,`group` + ,broker_list + ,included_topic + ,login + ,pass + ,secure + ,security_protocol + ,timeout_sec + ,zk +) VALUES ( + 1 + ,'main Kafka cluster' -- name - IN varchar(255) + ,3 -- consumer_count - IN int(11) + ,1 -- enabled - IN bit(1) + ,'dlgroup' -- group - IN varchar(255) + ,'message-router-kafka:9092' -- host_port - IN varchar(255) + ,'' -- included_topic - IN varchar(255) + ,'admin' -- login - IN varchar(255) + ,'admin-secret' -- pass - IN varchar(255) + ,0 -- secure - IN bit(1) + ,'SASL_PLAINTEXT' -- security_protocol - IN varchar(255) + ,10 -- timeout_sec - IN int(11) + ,'message-router-zookeeper:2181' -- zk - IN varchar(255) +); +insert into db_type (`id`, `name`, tool) values ('CB', 'Couchbase', false); +insert into db_type (`id`, `name`, tool) values ('ES', 'Elasticsearch', false); +insert into db_type (`id`, `name`, tool,`default_port`) values ('MONGO', 'MongoDB', false, 27017); +insert into db_type (`id`, `name`, tool) values ('DRUID', 'Druid', false); +insert into db_type (`id`, `name`, tool) values ('HDFS', 'HDFS', false); +insert into db_type (`id`, `name`, tool) values ('KIBANA', 'Kibana', true); +insert into db_type (`id`, `name`, tool) values ('SUPERSET', 'Apache Superset', true); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`login`,`pass`,`database_name`) values (1, 'CB', true, true, 'Couchbase 1','dl-couchbase','dl','dl1234','datalake'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (2, 'ES', true, true, 'Elasticsearch','dl-es'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`port`,`database_name`,`presto_catalog`) values (3, 'MONGO', true, true, 'MongoDB 1','dl-mongodb',27017,'datalake','mongodb'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (4, 'DRUID', true, true, 'Druid','dl-druid'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`,`login`) values (5, 'HDFS', true, true, 'Hadoop Cluster','dl-hdfs','dl'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (6, 'KIBANA', true, false, 'Kibana demo','dl-es'); +insert into db (id, db_type_id, enabled, encrypt, `name`,`host`) values (7, 'SUPERSET', true, false, 'Superset demo','dl-druid'); +insert into topic_name (id) values ('_DL_DEFAULT_'); +insert into topic_name (id) values ('unauthenticated.SEC_FAULT_OUTPUT'); +insert into topic_name (id) values ('unauthenticated.VES_MEASUREMENT_OUTPUT'); +insert into topic_name (id) values ('EPC'); +insert into topic_name (id) values ('HW'); +-- in production, default enabled should be off +insert into `topic`(id, `topic_name_id`,`enabled`,`save_raw`,`ttl_day`,`data_format`) values (1, '_DL_DEFAULT_',1,0,3650,'JSON'); +insert into `topic`(id, `topic_name_id`,correlate_cleared_message,`enabled`, message_id_path,`data_format`) +values (2, 'unauthenticated.SEC_FAULT_OUTPUT',1,1,'/event/commonEventHeader/eventName,/event/commonEventHeader/reportingEntityName,/event/faultFields/specificProblem,/event/commonEventHeader/eventId','JSON'); +insert into `topic`(id, `topic_name_id`,`enabled`, aggregate_array_path,flatten_array_path,`data_format`) +values (3, 'unauthenticated.VES_MEASUREMENT_OUTPUT',1, +'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray', +'/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', +'JSON'); +insert into `topic`(id, `topic_name_id`,`enabled`, flatten_array_path,`data_format`) +values (4, 'EPC',1, '/event/measurementsForVfScalingFields/astriMeasurement/astriDPMeasurementArray/astriInterface', 'JSON'); +insert into `topic`(id, `topic_name_id`,`enabled`, aggregate_array_path,`data_format`) +values (5, 'HW',1, +'/event/measurementsForVfScalingFields/memoryUsageArray,/event/measurementsForVfScalingFields/diskUsageArray,/event/measurementsForVfScalingFields/cpuUsageArray,/event/measurementsForVfScalingFields/vNicPerformanceArray', +'JSON'); +insert into `map_db_topic`(`db_id`,`topic_id`) select db.id, topic.id from db_type, db, topic where db.db_type_id=db_type.id and db_type.tool=0; +insert into `map_kafka_topic`(`kafka_id`,`topic_id`) select kafka.id, topic.id from kafka, topic; +insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_DB', 'Kibana Dashboard', 'KIBANA'); +insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_SEARCH', 'Kibana Search', 'KIBANA'); +insert into design_type (id, `name`, `db_type_id`) values ('KIBANA_VISUAL', 'Kibana Visualization', 'KIBANA'); +insert into design_type (id, `name`, `db_type_id`) values ('ES_MAPPING', 'Elasticsearch Field Mapping Template', 'ES'); +insert into design_type (id, `name`, `db_type_id`) values ('DRUID_KAFKA_SPEC', 'Druid Kafka Indexing Service Supervisor Spec', 'DRUID'); +insert into design (id, `name`,topic_name_id, `submitted`,`body`, design_type_id) values (1, 'Kibana Dashboard on EPC test1', 'EPC', 0, 'body here', 'KIBANA_DB'); +insert into map_db_design (`design_id`,`db_id` ) values (1, 6); +insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('totalBandwidth','KPI bandwidth history','select from_unixtime(commonEventHeader.lastEpochMicrosec/1000) as timeStamp, sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as bandwidth from upf where commonEventHeader.sourceId = ''${id}'' and ( from_unixtime(commonEventHeader.lastEpochMicrosec/1000) between from_iso8601_timestamp( ''${timeStamp}'') - interval ''${hour}'' hour and from_iso8601_timestamp( ''${timeStamp}'') ) group by commonEventHeader.lastEpochMicrosec order by commonEventHeader.lastEpochMicrosec desc ',3); +insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('totalTraffic','KPI sum over history','select commonEventHeader.sourceId as id, sum(measurementFields.additionalFields."UPF.N3IncPkt._Dnn"+measurementFields.additionalFields."UPF.N3OgPkt._Dnn") as totalTraffic from upf where commonEventHeader.sourceId = ''${id}'' and from_unixtime(commonEventHeader.lastEpochMicrosec/1000) <= from_iso8601_timestamp( ''${timeStamp}'') ',3); +insert into `data_exposure`(`id`,`note`,`sql_template`,`db_id`) values ('userNumber','KPI',' select from_unixtime(commonEventHeader.lastEpochMicrosec/1000) as timeStamp, sum(measurementFields.additionalFields."AMF.RegSub._NS") as userNumber from amf where commonEventHeader.sourceId = ''${id}'' and ( from_unixtime(commonEventHeader.lastEpochMicrosec/1000) between from_iso8601_timestamp( ''${timeStamp}'') - interval ''${hour}'' hour and from_iso8601_timestamp( ''${timeStamp}'') ) group by commonEventHeader.lastEpochMicrosec, commonEventHeader.sourceId order by commonEventHeader.lastEpochMicrosec desc ',3); diff --git a/components/datalake-handler/des/src/assembly/run.sh b/components/datalake-handler/des/src/assembly/run.sh new file mode 100644 index 00000000..363daf6e --- /dev/null +++ b/components/datalake-handler/des/src/assembly/run.sh @@ -0,0 +1,35 @@ +# ============LICENSE_START=================================================== +# Copyright (C) 2020 China Mobile. +# ============================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END===================================================== +#!/bin/sh + +echo "start init db ..." + +/bin/run-parts /home/datalake/db_init + +echo "finish init db" + +cmd=`find . -regex '\./feeder-[0-9]+\.[0-9]+\.[0-9]+[-SNAPSHOT]+\.jar'` +cmd1=`find . -regex '\./feeder-[0-9]+\.[0-9]+\.[0-9]+\.jar'` +if [ -n "$cmd" ]; then + java -jar $cmd +elif [ -n "$cmd1" ]; then + java -jar $cmd1 +else + echo "STRING is empty" + sleep 10000 +fi |