-
Notifications
You must be signed in to change notification settings - Fork 30
/
build.sbt
116 lines (99 loc) · 3.55 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
val sparkVersion = "2.4.5"
val sparkLibs = Seq(
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",
"org.apache.spark" %% "spark-sql" % sparkVersion % "provided"
)
lazy val commonSettings = Seq(
organization := "xyz.graphiq",
scalaVersion := "2.12.10",
version := "0.1",
libraryDependencies ++= sparkLibs
)
// Settings
val domain = "graphiq"
// For building the FAT jar
lazy val assemblySettings = Seq(
assembly / assemblyOption := (assemblyOption in assembly).value.copy(includeScala = false),
assembly / assemblyOutputPath := baseDirectory.value / "output" / s"${domain}-${name.value}.jar"
)
val targetDockerJarPath = "/opt/spark/jars"
val baseRegistry = sys.props.getOrElse("baseRegistry", default = "localhost:5000")
// For building the docker image
lazy val dockerSettings = Seq(
imageNames in docker := Seq(
ImageName(s"$domain/${name.value}:latest"),
ImageName(s"$domain/${name.value}:${version.value}"),
),
buildOptions in docker := BuildOptions(
cache = false,
removeIntermediateContainers = BuildOptions.Remove.Always,
pullBaseImage = BuildOptions.Pull.Always
),
dockerfile in docker := {
// The assembly task generates a fat JAR file
val artifact: File = assembly.value
val artifactTargetPath = s"$targetDockerJarPath/$domain-${name.value}.jar"
new Dockerfile {
from(s"$baseRegistry/spark-runner:0.1")
}.add(artifact, artifactTargetPath)
}
)
// Include "provided" dependencies back to default run task
lazy val runLocalSettings = Seq(
// https://stackoverflow.com/questions/18838944/how-to-add-provided-dependencies-back-to-run-test-tasks-classpath/21803413#21803413
Compile / run := Defaults
.runTask(
fullClasspath in Compile,
mainClass in (Compile, run),
runner in (Compile, run)
)
.evaluated
)
lazy val root = (project in file("."))
.enablePlugins(sbtdocker.DockerPlugin)
.enablePlugins(AshScriptPlugin)
.settings(
commonSettings,
assemblySettings,
dockerSettings,
runLocalSettings,
name := "transform-movie-ratings",
Compile / mainClass := Some("xyz.graphiq.BasicSparkJob"),
Compile / resourceGenerators += createImporterHelmChart.taskValue
)
// Task to create helm chart
lazy val createImporterHelmChart: Def.Initialize[Task[Seq[File]]] = Def.task {
val chartFile = baseDirectory.value / "helm" / "Chart.yaml"
val valuesFile = baseDirectory.value / "helm" / "values.yaml"
val chartContents =
s"""# Generated by build.sbt. Please don't manually update
|apiVersion: v1
|name: $domain-${name.value}
|version: ${version.value}
|appVersion: ${version.value}
|description: Sample ETL Job for Medium Post
|home: http://bit.ly/spark-k8s
|sources:
| - https://github.com/TomLous/medium-spark-k8s
|maintainers:
| - name: Tom Lous
| email: tomlous@gmail.com
| url: https://lous.info
|""".stripMargin
val valuesContents =
s"""# Generated by build.sbt. Please don't manually update
|version: ${version.value}
|sparkVersion: ${sparkVersion}
|image: $domain/${name.value}:${version.value}
|jar: local://$targetDockerJarPath/$domain-${name.value}.jar
|mainClass: ${(Compile / run / mainClass).value.getOrElse("__MAIN_CLASS__")}
|fileDependencies: []
|""".stripMargin
IO.write(chartFile, chartContents)
IO.write(valuesFile, valuesContents)
Seq(chartFile, valuesFile)
}
lazy val showVersion = taskKey[Unit]("Show version")
showVersion := {
println((version).value)
}