Skip to content

Commit

Permalink
Add Scala 2.13 support
Browse files Browse the repository at this point in the history
  • Loading branch information
alexarchambault committed Jun 9, 2022
1 parent 31b4bc5 commit b7e1a3e
Show file tree
Hide file tree
Showing 12 changed files with 87 additions and 53 deletions.
10 changes: 5 additions & 5 deletions .github/scripts/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ set -e

case "${MASTER:-"local"}" in
local)
./sbt publishLocal test mimaReportBinaryIssues ;;
./sbt +publishLocal +test +mimaReportBinaryIssues ;;
local-distrib)
./with-spark-home.sh ./sbt publishLocal local-spark-distrib-tests/test ;;
./with-spark-home.sh ./sbt +publishLocal +local-spark-distrib-tests/test ;;
standalone)
./with-spark-home.sh ./sbt-with-standalone-cluster.sh publishLocal standalone-tests/test ;;
./with-spark-home.sh ./sbt-with-standalone-cluster.sh +publishLocal +standalone-tests/test ;;
yarn)
./sbt-in-docker-with-yarn-cluster.sh -batch publishLocal yarn-tests/test ;;
./sbt-in-docker-with-yarn-cluster.sh -batch +publishLocal +yarn-tests/test ;;
yarn-distrib)
./with-spark-home.sh ./sbt-in-docker-with-yarn-cluster.sh -batch publishLocal yarn-spark-distrib-tests/test ;;
./with-spark-home.sh ./sbt-in-docker-with-yarn-cluster.sh -batch +publishLocal +yarn-spark-distrib-tests/test ;;
*)
echo "Unrecognized master type $MASTER"
exit 1
Expand Down
8 changes: 6 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ lazy val `spark-stubs_24` = project
.underModules
.settings(
shared,
libraryDependencies += Deps.sparkSql % Provided
libraryDependencies += Deps.sparkSql.value % Provided
)

lazy val `spark-stubs_30` = project
Expand All @@ -36,19 +36,21 @@ lazy val `spark-stubs_32` = project
.underModules
.settings(
shared,
crossScalaVersions += Deps.Scala.scala213,
libraryDependencies += Deps.sparkSql32 % Provided
)

lazy val core = project
.in(file("modules/core"))
.settings(
shared,
crossScalaVersions += Deps.Scala.scala213,
name := "ammonite-spark",
Mima.settings,
generatePropertyFile("org/apache/spark/sql/ammonitesparkinternals/ammonite-spark.properties"),
libraryDependencies ++= Seq(
Deps.ammoniteReplApi % Provided,
Deps.sparkSql % Provided,
Deps.sparkSql.value % Provided,
Deps.jettyServer
)
)
Expand All @@ -58,6 +60,7 @@ lazy val tests = project
.underModules
.settings(
shared,
crossScalaVersions += Deps.Scala.scala213,
skip.in(publish) := true,
generatePropertyFile("ammonite/ammonite-spark.properties"),
generateDependenciesFile,
Expand Down Expand Up @@ -95,6 +98,7 @@ lazy val `yarn-tests` = project
.underModules
.settings(
shared,
crossScalaVersions += Deps.Scala.scala213,
skip.in(publish) := true,
testSettings
)
Expand Down
91 changes: 51 additions & 40 deletions modules/tests/src/main/scala/ammonite/spark/SparkReplTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,16 @@ package ammonite.spark
import ammonite.spark.fromammonite.TestRepl
import utest._

import scala.util.Properties.versionNumberString

class SparkReplTests(
val sparkVersion: String,
val master: String,
val conf: (String, String)*
) extends TestSuite {

private def is212 = versionNumberString.startsWith("2.12.")

// Most of the tests here were adapted from https://github.com/apache/spark/blob/ab18b02e66fd04bc8f1a4fb7b6a7f2773902a494/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala

Init.setupLog4j()
Expand Down Expand Up @@ -181,13 +185,15 @@ class SparkReplTests(
}

"SPARK-1199 two instances of same class don't type check" - {
val expFieldNamePart = if (is212) "" else "exp = "
val exp2FieldNamePart = if (is212) "" else "exp2 = "
sparkSession(
"""
s"""
@ case class Sum(exp: String, exp2: String)
defined class Sum

@ val a = Sum("A", "B")
a: Sum = Sum("A", "B")
a: Sum = Sum(${expFieldNamePart}"A", ${exp2FieldNamePart}"B")

@ def b(a: Sum): String = a match { case Sum(_, _) => "OK" }
defined function b
Expand All @@ -212,9 +218,10 @@ class SparkReplTests(
}

"SPARK-2576 importing implicits" - {
val fieldNamePart = if (is212) "" else "value = "
// FIXME The addOuterScope should be automatically added. (Tweak CodeClassWrapper for that?)
sparkSession(
"""
s"""
@ import spark.implicits._
import spark.implicits._

Expand All @@ -225,7 +232,7 @@ class SparkReplTests(
res: Array[Row] = Array([1], [2], [3], [4], [5], [6], [7], [8], [9], [10])

@ val foo = Seq(TestCaseClass(1)).toDS().collect()
foo: Array[TestCaseClass] = Array(TestCaseClass(1))
foo: Array[TestCaseClass] = Array(TestCaseClass(${fieldNamePart}1))
"""
)
}
Expand Down Expand Up @@ -267,8 +274,9 @@ class SparkReplTests(
}

"SPARK-2632 importing a method from non serializable class and not using it" - {
val fieldNamePart = if (is212) "" else "value = "
sparkSession(
"""
s"""
@ class TestClass() { def testMethod = 3; override def toString = "TestClass" }
defined class TestClass

Expand All @@ -283,55 +291,57 @@ class SparkReplTests(

@ val res = sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect()
res: Array[TestCaseClass] = Array(
TestCaseClass(1),
TestCaseClass(2),
TestCaseClass(3),
TestCaseClass(4),
TestCaseClass(5),
TestCaseClass(6),
TestCaseClass(7),
TestCaseClass(8),
TestCaseClass(9),
TestCaseClass(10)
TestCaseClass(${fieldNamePart}1),
TestCaseClass(${fieldNamePart}2),
TestCaseClass(${fieldNamePart}3),
TestCaseClass(${fieldNamePart}4),
TestCaseClass(${fieldNamePart}5),
TestCaseClass(${fieldNamePart}6),
TestCaseClass(${fieldNamePart}7),
TestCaseClass(${fieldNamePart}8),
TestCaseClass(${fieldNamePart}9),
TestCaseClass(${fieldNamePart}10)
)
"""
)
}

"collecting objects of class defined in repl" - {
val fieldNamePart = if (is212) "" else "i = "
sparkSession(
"""
s"""
@ case class Foo(i: Int)
defined class Foo

@ val res = sc.parallelize((1 to 100).map(Foo), 10).collect()
res: Array[Foo] = Array(
Foo(1),
Foo(2),
Foo(3),
Foo(4),
Foo(5),
Foo(6),
Foo(7),
Foo(8),
Foo(9),
Foo(10),
Foo(${fieldNamePart}1),
Foo(${fieldNamePart}2),
Foo(${fieldNamePart}3),
Foo(${fieldNamePart}4),
Foo(${fieldNamePart}5),
Foo(${fieldNamePart}6),
Foo(${fieldNamePart}7),
Foo(${fieldNamePart}8),
Foo(${fieldNamePart}9),
Foo(${fieldNamePart}10),
...
"""
)
}

"collecting objects of class defined in repl - shuffling" - {
val fieldNamePart = if (is212) "" else "i = "
sparkSession(
"""
s"""
@ case class Foo(i: Int)
defined class Foo

@ val list = List((1, Foo(1)), (1, Foo(2)))
list: List[(Int, Foo)] = List((1, Foo(1)), (1, Foo(2)))
list: List[(Int, Foo)] = List((1, Foo(${fieldNamePart}1)), (1, Foo(${fieldNamePart}2)))

@ val res = sc.parallelize(list).groupByKey().collect().map { case (k, v) => k -> v.toList }
res: Array[(Int, List[Foo])] = Array((1, List(Foo(1), Foo(2))))
res: Array[(Int, List[Foo])] = Array((1, List(Foo(${fieldNamePart}1), Foo(${fieldNamePart}2))))
"""
)
}
Expand Down Expand Up @@ -420,8 +430,9 @@ class SparkReplTests(

// Adapted from https://github.com/apache/spark/blob/3d5c61e5fd24f07302e39b5d61294da79aa0c2f9/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala#L193-L208
"line wrapper only initialized once when used as encoder outer scope" - {
val fieldNamePart = if (is212) "" else "value = "
sparkSession(
"""
s"""
@ val fileName = "repl-test-" + java.util.UUID.randomUUID()

@ val tmpDir = System.getProperty("java.io.tmpdir")
Expand All @@ -444,16 +455,16 @@ class SparkReplTests(

@ val res = sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect()
res: Array[TestCaseClass] = Array(
TestCaseClass(1),
TestCaseClass(2),
TestCaseClass(3),
TestCaseClass(4),
TestCaseClass(5),
TestCaseClass(6),
TestCaseClass(7),
TestCaseClass(8),
TestCaseClass(9),
TestCaseClass(10)
TestCaseClass(${fieldNamePart}1),
TestCaseClass(${fieldNamePart}2),
TestCaseClass(${fieldNamePart}3),
TestCaseClass(${fieldNamePart}4),
TestCaseClass(${fieldNamePart}5),
TestCaseClass(${fieldNamePart}6),
TestCaseClass(${fieldNamePart}7),
TestCaseClass(${fieldNamePart}8),
TestCaseClass(${fieldNamePart}9),
TestCaseClass(${fieldNamePart}10)
)

@ val exists2 = file.exists()
Expand Down
13 changes: 12 additions & 1 deletion project/Deps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ import sbt.Keys._

object Deps {

object Scala {
def scala212 = "2.12.11"
def scala213 = "2.13.8"
}

private def ammoniteVersion = "2.5.4-8-30448e49"
def ammoniteCompiler = ("com.lihaoyi" % "ammonite-compiler" % ammoniteVersion).cross(CrossVersion.full)
def ammoniteReplApi = ("com.lihaoyi" % "ammonite-repl-api" % ammoniteVersion).cross(CrossVersion.full)
Expand All @@ -13,7 +18,13 @@ object Deps {
def jettyServer = "org.eclipse.jetty" % "jetty-server" % "9.4.46.v20220331"
def utest = "com.lihaoyi" %% "utest" % "0.7.11"

def sparkSql = "org.apache.spark" %% "spark-sql" % "2.4.0"
def sparkSql = setting {
val sv = scalaVersion.value
val ver =
if (sv.startsWith("2.12.")) "2.4.0"
else "3.2.0"
"org.apache.spark" %% "spark-sql" % ver
}
def sparkSql3 = "org.apache.spark" %% "spark-sql" % "3.0.0"
def sparkSql32 = "org.apache.spark" %% "spark-sql" % "3.2.0"

Expand Down
12 changes: 11 additions & 1 deletion project/Mima.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,17 @@ object Mima {

def settings = Def.settings(
MimaPlugin.autoImport.mimaPreviousArtifacts := {
binaryCompatibilityVersions
val sv = scalaVersion.value
val binaryCompatibilityVersions0 =
if (sv.startsWith("2.12.")) binaryCompatibilityVersions
else
binaryCompatibilityVersions.filter { v =>
!v.startsWith("0.9.") &&
!v.startsWith("0.10.") &&
!v.startsWith("0.11.") &&
!v.startsWith("0.12.")
}
binaryCompatibilityVersions0
.map { ver =>
(organization.value % moduleName.value % ver)
.cross(crossVersion.value)
Expand Down
6 changes: 2 additions & 4 deletions project/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,9 @@ object Settings {
}
}

private val scala212 = "2.12.11"

lazy val shared = Seq(
scalaVersion := scala212,
crossScalaVersions := Seq(scala212),
scalaVersion := Deps.Scala.scala212,
crossScalaVersions := Seq(Deps.Scala.scala212),
scalacOptions ++= Seq(
"-deprecation",
"-feature",
Expand Down

0 comments on commit b7e1a3e

Please sign in to comment.