Skip to content

Commit fa4bc8e

Browse files
dhruveMarcelo Vanzin
authored and
Marcelo Vanzin
committed
[SPARK-14279][BUILD] Pick the spark version from pom
## What changes were proposed in this pull request? Change the way spark picks up version information. Also embed the build information to better identify the spark version running. More context can be found here : apache#12152 ## How was this patch tested? Ran the mvn and sbt builds to verify the version information was being displayed correctly on executing <code>spark-submit --version </code> ![image](https://cloud.githubusercontent.com/assets/7732317/15197251/f7c673a2-1795-11e6-8b2f-88f2a70cf1c1.png) Author: Dhruve Ashar <[email protected]> Closes apache#13061 from dhruve/impr/SPARK-14279.
1 parent 00ad4f0 commit fa4bc8e

File tree

6 files changed

+150
-8
lines changed

6 files changed

+150
-8
lines changed

build/spark-build-info

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
#!/usr/bin/env bash
2+
3+
#
4+
# Licensed to the Apache Software Foundation (ASF) under one or more
5+
# contributor license agreements. See the NOTICE file distributed with
6+
# this work for additional information regarding copyright ownership.
7+
# The ASF licenses this file to You under the Apache License, Version 2.0
8+
# (the "License"); you may not use this file except in compliance with
9+
# the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
#
19+
20+
# This script generates the build info for spark and places it into the spark-version-info.properties file.
21+
# Arguments:
22+
# build_tgt_directory - The target directory where properties file would be created. [./core/target/extra-resources]
23+
# spark_version - The current version of spark
24+
25+
RESOURCE_DIR="$1"
26+
mkdir -p "$RESOURCE_DIR"
27+
SPARK_BUILD_INFO="${RESOURCE_DIR}"/spark-version-info.properties
28+
29+
echo_build_properties() {
30+
echo version=$1
31+
echo user=$USER
32+
echo revision=$(git rev-parse HEAD)
33+
echo branch=$(git rev-parse --abbrev-ref HEAD)
34+
echo date=$(date -u +%Y-%m-%dT%H:%M:%SZ)
35+
echo url=$(git config --get remote.origin.url)
36+
}
37+
38+
echo_build_properties $2 > "$SPARK_BUILD_INFO"

core/pom.xml

+31
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,38 @@
337337
<build>
338338
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
339339
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
340+
<resources>
341+
<resource>
342+
<directory>${project.basedir}/src/main/resources</directory>
343+
</resource>
344+
<resource>
345+
<!-- Include the properties file to provide the build information. -->
346+
<directory>${project.build.directory}/extra-resources</directory>
347+
<filtering>true</filtering>
348+
</resource>
349+
</resources>
340350
<plugins>
351+
<plugin>
352+
<groupId>org.apache.maven.plugins</groupId>
353+
<artifactId>maven-antrun-plugin</artifactId>
354+
<executions>
355+
<execution>
356+
<phase>generate-resources</phase>
357+
<configuration>
358+
<!-- Execute the shell script to generate the spark build information. -->
359+
<tasks>
360+
<exec executable="${project.basedir}/../build/spark-build-info">
361+
<arg value="${project.build.directory}/extra-resources"/>
362+
<arg value="${pom.version}"/>
363+
</exec>
364+
</tasks>
365+
</configuration>
366+
<goals>
367+
<goal>run</goal>
368+
</goals>
369+
</execution>
370+
</executions>
371+
</plugin>
341372
<plugin>
342373
<groupId>org.apache.maven.plugins</groupId>
343374
<artifactId>maven-dependency-plugin</artifactId>

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

+6-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ import org.apache.ivy.plugins.matcher.GlobPatternMatcher
4040
import org.apache.ivy.plugins.repository.file.FileRepository
4141
import org.apache.ivy.plugins.resolver.{ChainResolver, FileSystemResolver, IBiblioResolver}
4242

43-
import org.apache.spark.{SPARK_VERSION, SparkException, SparkUserAppException}
43+
import org.apache.spark.{SPARK_REVISION, SPARK_VERSION, SparkException, SparkUserAppException}
44+
import org.apache.spark.{SPARK_BRANCH, SPARK_BUILD_DATE, SPARK_BUILD_USER, SPARK_REPO_URL}
4445
import org.apache.spark.api.r.RUtils
4546
import org.apache.spark.deploy.rest._
4647
import org.apache.spark.launcher.SparkLauncher
@@ -103,6 +104,10 @@ object SparkSubmit {
103104
/___/ .__/\_,_/_/ /_/\_\ version %s
104105
/_/
105106
""".format(SPARK_VERSION))
107+
printStream.println("Branch %s".format(SPARK_BRANCH))
108+
printStream.println("Compiled by user %s on %s".format(SPARK_BUILD_USER, SPARK_BUILD_DATE))
109+
printStream.println("Revision %s".format(SPARK_REVISION))
110+
printStream.println("Url %s".format(SPARK_REPO_URL))
106111
printStream.println("Type --help for more information.")
107112
exitFn(0)
108113
}

core/src/main/scala/org/apache/spark/package.scala

+53-2
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,58 @@ package org.apache
4141
* level interfaces. These are subject to changes or removal in minor releases.
4242
*/
4343

44+
import java.util.Properties
45+
4446
package object spark {
45-
// For package docs only
46-
val SPARK_VERSION = "2.0.0-SNAPSHOT"
47+
48+
private object SparkBuildInfo {
49+
50+
val (
51+
spark_version: String,
52+
spark_branch: String,
53+
spark_revision: String,
54+
spark_build_user: String,
55+
spark_repo_url: String,
56+
spark_build_date: String) = {
57+
58+
val resourceStream = Thread.currentThread().getContextClassLoader.
59+
getResourceAsStream("spark-version-info.properties")
60+
61+
try {
62+
val unknownProp = "<unknown>"
63+
val props = new Properties()
64+
props.load(resourceStream)
65+
(
66+
props.getProperty("version", unknownProp),
67+
props.getProperty("branch", unknownProp),
68+
props.getProperty("revision", unknownProp),
69+
props.getProperty("user", unknownProp),
70+
props.getProperty("url", unknownProp),
71+
props.getProperty("date", unknownProp)
72+
)
73+
} catch {
74+
case npe: NullPointerException =>
75+
throw new SparkException("Error while locating file spark-version-info.properties", npe)
76+
case e: Exception =>
77+
throw new SparkException("Error loading properties from spark-version-info.properties", e)
78+
} finally {
79+
if (resourceStream != null) {
80+
try {
81+
resourceStream.close()
82+
} catch {
83+
case e: Exception =>
84+
throw new SparkException("Error closing spark build info resource stream", e)
85+
}
86+
}
87+
}
88+
}
89+
}
90+
91+
val SPARK_VERSION = SparkBuildInfo.spark_version
92+
val SPARK_BRANCH = SparkBuildInfo.spark_branch
93+
val SPARK_REVISION = SparkBuildInfo.spark_revision
94+
val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
95+
val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
96+
val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
4797
}
98+

pom.xml

+4-2
Original file line numberDiff line numberDiff line change
@@ -180,6 +180,8 @@
180180
<antlr4.version>4.5.3</antlr4.version>
181181
<jpam.version>1.1</jpam.version>
182182
<selenium.version>2.52.0</selenium.version>
183+
<paranamer.version>2.8</paranamer.version>
184+
<maven-antrun.version>1.8</maven-antrun.version>
183185

184186
<test.java.home>${java.home}</test.java.home>
185187
<test.exclude.tags></test.exclude.tags>
@@ -2061,7 +2063,7 @@
20612063
<plugin>
20622064
<groupId>org.apache.maven.plugins</groupId>
20632065
<artifactId>maven-antrun-plugin</artifactId>
2064-
<version>1.8</version>
2066+
<version>${maven-antrun.version}</version>
20652067
</plugin>
20662068
<plugin>
20672069
<groupId>org.apache.maven.plugins</groupId>
@@ -2184,7 +2186,7 @@
21842186
<pluginExecutionFilter>
21852187
<groupId>org.apache.maven.plugins</groupId>
21862188
<artifactId>maven-antrun-plugin</artifactId>
2187-
<versionRange>[1.8,)</versionRange>
2189+
<versionRange>[${maven-antrun.version},)</versionRange>
21882190
<goals>
21892191
<goal>run</goal>
21902192
</goals>

project/SparkBuild.scala

+18-3
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,9 @@ object SparkBuild extends PomBuild {
360360
enable(MimaBuild.mimaSettings(sparkHome, x))(x)
361361
}
362362

363+
/* Generate and pick the spark build info from extra-resources */
364+
enable(Core.settings)(core)
365+
363366
/* Unsafe settings */
364367
enable(Unsafe.settings)(unsafe)
365368

@@ -448,7 +451,19 @@ object SparkBuild extends PomBuild {
448451
else x.settings(Seq[Setting[_]](): _*)
449452
} ++ Seq[Project](OldDeps.project)
450453
}
454+
}
451455

456+
object Core {
457+
lazy val settings = Seq(
458+
resourceGenerators in Compile += Def.task {
459+
val buildScript = baseDirectory.value + "/../build/spark-build-info"
460+
val targetDir = baseDirectory.value + "/target/extra-resources/"
461+
val command = buildScript + " " + targetDir + " " + version.value
462+
Process(command).!!
463+
val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties"
464+
Seq(propsFile)
465+
}.taskValue
466+
)
452467
}
453468

454469
object Unsafe {
@@ -480,9 +495,9 @@ object DependencyOverrides {
480495
}
481496

482497
/**
483-
This excludes library dependencies in sbt, which are specified in maven but are
484-
not needed by sbt build.
485-
*/
498+
* This excludes library dependencies in sbt, which are specified in maven but are
499+
* not needed by sbt build.
500+
*/
486501
object ExcludedDependencies {
487502
lazy val settings = Seq(
488503
libraryDependencies ~= { libs => libs.filterNot(_.name == "groovy-all") }

0 commit comments

Comments
 (0)