summaryrefslogtreecommitdiffstats
path: root/pnda-ztt-app/src/main/scala/com/cisco/ztt/App.scala
blob: 6bc208397beae48506b5cc306908bab219d84f8d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
/*
 * Copyright (c) 2018 Cisco Systems. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.cisco.ztt

import org.apache.spark.streaming.StreamingContext
import com.cisco.pnda.StatReporter
import org.apache.log4j.Logger
import com.cisco.ztt.meta.YamlReader
import org.apache.log4j.BasicConfigurator

object App {

    private[this] val logger = Logger.getLogger(getClass().getName())

    def main(args: Array[String]) {

        BasicConfigurator.configure();

        val props = AppConfig.loadProperties();
        val loggerUrl = props.getProperty("environment.metric_logger_url")
        val appName = props.getProperty("component.application")
        val checkpointDirectory = props.getProperty("app.checkpoint_path");
        val batchSizeSeconds = Integer.parseInt(props.getProperty("app.batch_size_seconds"));

        val metadata = YamlReader.load()
        if (metadata.units.length == 0) {
            logger.error("Trying to run app without metadata")
            System.exit(1)
        }
        val pipeline = new ZttPipeline(metadata)

        // Create the streaming context, or load a saved one from disk
        val ssc = if (checkpointDirectory.length() > 0)
            StreamingContext.getOrCreate(checkpointDirectory, pipeline.create) else pipeline.create();

        sys.ShutdownHookThread {
            logger.info("Gracefully stopping Spark Streaming Application")
            ssc.stop(true, true)
            logger.info("Application stopped")
        }

        if (loggerUrl != null) {
            logger.info("Reporting stats to url: " + loggerUrl)
            ssc.addStreamingListener(new StatReporter(appName, loggerUrl))
        }
        logger.info("Starting spark streaming execution")
        ssc.start()
        ssc.awaitTermination()
    }
}