summaryrefslogtreecommitdiffstats
path: root/vnfs/DAaaS/applications/charts/sample-spark-app/values.yaml
blob: afb48d67e565d4837b5a869615016dbe931b1b86 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# Default values for sample-spark-app.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.


#===========================KUBERNETES POD RELATED CONFIGs========================
image: spark-tf-keras-horo:latest
imagePullPolicy: Never
restartPolicy: Never
volumesName: test-volume
hostpath: /tmp
hostpathType: Directory



#============================SPARK APP RELATED CONFIGs=============================

nameOfTheSparkApp: spark-apache-logs2 
# Python or Scala supported.
programmingLanguageType: Scala
modeOfSparkApp: cluster
mainClassOfTheSparkApp: ApacheLogAnalysis
# can be http path, s3 path, minio path
mainApplicationFileOfTheSparkApp: https://github.com/mohanraj1311/ApacheLogAnalysisJar/raw/master/analysisofapachelogs_2.11-0.1.jar 
argumentsOfTheSparkProgram:
    - hdfs://hdfs-1-namenode-1.hdfs-1-namenode.hdfs1.svc.cluster.local:8020/data/apache-logs 



#============================SPARK DRIVER RELATED CONFIGs=========================
driverCores: 0.1
driverCoreLimit: 200m
driverMemory: 1024m
driverVolumeMountsName: test-volume
driverVolumeMountPath: /tmp 



#============================SPARK EXECUTOR RELATED CONFIGs=======================
executorCores: 1 
executorInstances: 1 
executorMemory: 512m
executorVolumeMountsName: test-volume
executorVolumeMountPath: /tmp



#===========================HADOOP RELATED CONFIGs===============================
# config map of the hdfs
hadoopConfigMap: hdfs-1-config


###################################################################################