summaryrefslogtreecommitdiffstats
path: root/components/datalake-handler/feeder/src/main/java/org
diff options
context:
space:
mode:
authorRama-Huawei <rama.subba.reddy.s@huawei.com>2019-04-16 09:59:26 +0530
committerRama-Huawei <rama.subba.reddy.s@huawei.com>2019-04-16 10:02:23 +0530
commitbca6895e81ceb77a467d3ceeec45b5536c1712df (patch)
treebe411ff8a7a36b35519f8986c7e26dfd63835c24 /components/datalake-handler/feeder/src/main/java/org
parenta36b7ed7cc57c5dae4c5bd1a21f1143acb0332d6 (diff)
Unit test code for datalake seed code
Fixed the folder related path for generating and reading files from correct folder Issue-ID: DCAEGEN2-1309 Change-Id: I1ec3aae549f484e667eb4048c1ab59b6a60aaf87 Signed-off-by: Rama-Huawei <rama.subba.reddy.s@huawei.com>
Diffstat (limited to 'components/datalake-handler/feeder/src/main/java/org')
-rw-r--r--components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/util/DruidSupervisorGenerator.java208
1 files changed, 102 insertions, 106 deletions
diff --git a/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/util/DruidSupervisorGenerator.java b/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/util/DruidSupervisorGenerator.java
index 31f46362..8a177cc7 100644
--- a/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/util/DruidSupervisorGenerator.java
+++ b/components/datalake-handler/feeder/src/main/java/org/onap/datalake/feeder/util/DruidSupervisorGenerator.java
@@ -1,24 +1,34 @@
/*
-* ============LICENSE_START=======================================================
-* ONAP : DataLake
-* ================================================================================
-* Copyright 2019 China Mobile
-*=================================================================================
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-* ============LICENSE_END=========================================================
-*/
+ * ============LICENSE_START=======================================================
+ * ONAP : DataLake
+ * ================================================================================
+ * Copyright 2019 China Mobile
+ *=================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
package org.onap.datalake.feeder.util;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import lombok.Getter;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.Velocity;
+import org.apache.velocity.runtime.RuntimeConstants;
+import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
+
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
@@ -28,29 +38,15 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
-import org.apache.velocity.Template;
-import org.apache.velocity.VelocityContext;
-import org.apache.velocity.app.Velocity;
-import org.apache.velocity.runtime.RuntimeConstants;
-import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
-import org.onap.datalake.feeder.enumeration.DataFormat;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.JsonNodeType;
-
-import lombok.Getter;
-import lombok.Setter;
-
/*
* read sample json and output supervisor to resources\druid\generated
* need manual edit to be production ready, final versions are in resources\druid
- *
+ *
* http://druid.io/docs/latest/tutorials/tutorial-ingestion-spec.html
* http://druid.io/docs/latest/ingestion/flatten-json
- *
- *
+ *
+ *
* todo:
* reduce the manual editing
* path hard coded
@@ -63,67 +59,67 @@ import lombok.Setter;
@Getter
public class DruidSupervisorGenerator {
- Template template = null;
- VelocityContext context;
+ Template template = null;
+ VelocityContext context;
- List<String[]> dimensions;
+ List<String[]> dimensions;
- public DruidSupervisorGenerator() {
- dimensions = new ArrayList<>();
+ public DruidSupervisorGenerator() {
+ dimensions = new ArrayList<>();
- Velocity.setProperty(RuntimeConstants.RESOURCE_LOADER, "classpath");
- Velocity.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName());
+ Velocity.setProperty(RuntimeConstants.RESOURCE_LOADER, "classpath");
+ Velocity.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName());
- Velocity.init();
+ Velocity.init();
- context = new VelocityContext();
+ context = new VelocityContext();
- context.put("host", "message-router-kafka:9092");//TODO get from config
+ context.put("host", "message-router-kafka:9092");//TODO get from config
- template = Velocity.getTemplate("druid/kafka-supervisor-template.vm");
- }
+ template = Velocity.getTemplate("druid/kafka-supervisor-template.vm");
+ }
- private void printNode(String prefix, JsonNode node) {
+ private void printNode(String prefix, JsonNode node) {
- // lets see what type the node is
- // System.out.println("NodeType=" + node.getNodeType() + ", isContainerNode=" + node.isContainerNode() + ", " + node); // prints OBJECT
+ // lets see what type the node is
+ // System.out.println("NodeType=" + node.getNodeType() + ", isContainerNode=" + node.isContainerNode() + ", " + node); // prints OBJECT
- if (node.isContainerNode()) {
+ if (node.isContainerNode()) {
- Iterator<Entry<String, JsonNode>> fields = node.fields();
+ Iterator<Entry<String, JsonNode>> fields = node.fields();
- while (fields.hasNext()) {
- Entry<String, JsonNode> field = fields.next();
- // System.out.println("--------"+field.getKey()+"--------");
- printNode(prefix + "." + field.getKey(), field.getValue());
- }
+ while (fields.hasNext()) {
+ Entry<String, JsonNode> field = fields.next();
+ // System.out.println("--------"+field.getKey()+"--------");
+ printNode(prefix + "." + field.getKey(), field.getValue());
+ }
- if (node.isArray()) {
- Iterator<JsonNode> elements = node.elements();
- int i = 0;
- while (elements.hasNext()) {
- JsonNode element = elements.next();
- printNode(prefix + "[" + i + "]", element);
- i++;
- }
- }
+ if (node.isArray()) {
+ Iterator<JsonNode> elements = node.elements();
+ int i = 0;
+ while (elements.hasNext()) {
+ JsonNode element = elements.next();
+ printNode(prefix + "[" + i + "]", element);
+ i++;
+ }
+ }
- } else {
- printFlattenSpec(node.getNodeType(), prefix);
- }
+ } else {
+ printFlattenSpec(node.getNodeType(), prefix);
+ }
- }
+ }
- private void printFlattenSpec(JsonNodeType type, String path) {
- String name = path.substring(2).replace('.', ':');
- // lets see what type the node is
- System.out.println("{");
- System.out.println("\"type\": \"path\",");
- System.out.println("\"name\": \"" + name + "\",");
- System.out.println("\"expr\": \"" + path + "\"");
- System.out.println("},");
+ private void printFlattenSpec(JsonNodeType type, String path) {
+ String name = path.substring(2).replace('.', ':');
+ // lets see what type the node is
+ System.out.println("{");
+ System.out.println("\"type\": \"path\",");
+ System.out.println("\"name\": \"" + name + "\",");
+ System.out.println("\"expr\": \"" + path + "\"");
+ System.out.println("},");
- dimensions.add(new String[] { name, path });
+ dimensions.add(new String[]{name, path});
/*
//for dimensionsSpec
if (JsonNodeType.NUMBER.equals(type)) {
@@ -136,41 +132,41 @@ public class DruidSupervisorGenerator {
}
*/
- }
+ }
- public void doTopic(String topic) throws IOException {
- dimensions.clear();
+ public void doTopic(String topic) throws IOException {
+ dimensions.clear();
- String sampleFileName = "C:\\git\\onap\\datalake\\olap\\src\\main\\resources\\druid\\" + topic + "-sample-format.json";//FIXME hard coded path
- String outputFileName = "C:\\git\\onap\\datalake\\olap\\src\\main\\resources\\druid\\generated\\" + topic + "-kafka-supervisor.json";
+ String sampleFileName = "src/main/resources/druid/" + topic + "-sample-format.json";//FIXME hard coded path
+ String outputFileName = "src/main/resources/druid/generated/" + topic + "-kafka-supervisor.json";
- // Get the contents of json as a string using commons IO IOUTils class.
- String sampleJson = Util.getTextFromFile(sampleFileName);
+ // Get the contents of json as a string using commons IO IOUTils class.
+ String sampleJson = Util.getTextFromFile(sampleFileName);
- // create an ObjectMapper instance.
- ObjectMapper mapper = new ObjectMapper();
- // use the ObjectMapper to read the json string and create a tree
- JsonNode root = mapper.readTree(sampleJson);
- printNode("$", root);
+ // create an ObjectMapper instance.
+ ObjectMapper mapper = new ObjectMapper();
+ // use the ObjectMapper to read the json string and create a tree
+ JsonNode root = mapper.readTree(sampleJson);
+ printNode("$", root);
- context.put("topic", topic);
- context.put("timestamp", "event-header:timestamp");//FIXME hard coded, should be topic based
- context.put("timestampFormat", "yyyyMMdd-HH:mm:ss:SSS");//FIXME hard coded, should be topic based
- context.put("dimensions", dimensions);
+ context.put("topic", topic);
+ context.put("timestamp", "event-header:timestamp");//FIXME hard coded, should be topic based
+ context.put("timestampFormat", "yyyyMMdd-HH:mm:ss:SSS");//FIXME hard coded, should be topic based
+ context.put("dimensions", dimensions);
- BufferedWriter out = new BufferedWriter(new FileWriter(outputFileName));
+ BufferedWriter out = new BufferedWriter(new FileWriter(outputFileName));
- template.merge(context, out);
- out.close();
- }
+ template.merge(context, out);
+ out.close();
+ }
- public static void main(String[] args) throws MalformedURLException, IOException {
- String[] topics = new String[] { "AAI-EVENT", "msgrtr.apinode.metrics.dmaap", "unauthenticated.DCAE_CL_OUTPUT", "unauthenticated.SEC_FAULT_OUTPUT" };//FIXME hard coded
+ public static void main(String[] args) throws MalformedURLException, IOException {
+ String[] topics = new String[]{"AAI-EVENT", "msgrtr.apinode.metrics.dmaap", "unauthenticated.DCAE_CL_OUTPUT", "unauthenticated.SEC_FAULT_OUTPUT"};//FIXME hard coded
- DruidSupervisorGenerator p = new DruidSupervisorGenerator();
+ DruidSupervisorGenerator p = new DruidSupervisorGenerator();
- for (String topic : topics) {
- p.doTopic(topic);
- }
- }
+ for (String topic : topics) {
+ p.doTopic(topic);
+ }
+ }
}