Skip to content

Commit

Permalink
Merge pull request #53 from alexarchambault/ammonite-nightly
Browse files Browse the repository at this point in the history
Switch to Ammonite 1.6.9-15-6720d42
  • Loading branch information
alexarchambault committed Jul 17, 2019
2 parents f3a44d9 + e1e9c74 commit 7acd4f3
Show file tree
Hide file tree
Showing 24 changed files with 79 additions and 212 deletions.
2 changes: 1 addition & 1 deletion .travis.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ case "${MASTER:-"local"}" in
local-distrib)
./with-spark-home.sh sbt ++$TRAVIS_SCALA_VERSION'!' publishLocal local-spark-distrib-tests/test ;;
standalone)
./sbt-with-standalone-cluster.sh ++$TRAVIS_SCALA_VERSION'!' publishLocal standalone-tests/test ;;
./with-spark-home.sh ./sbt-with-standalone-cluster.sh ++$TRAVIS_SCALA_VERSION'!' publishLocal standalone-tests/test ;;
yarn)
./sbt-in-docker-with-yarn-cluster.sh -batch ++$TRAVIS_SCALA_VERSION'!' publishLocal yarn-tests/test ;;
yarn-distrib)
Expand Down
18 changes: 7 additions & 11 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
language: scala
language: java
jdk: openjdk8
before_install:
- mkdir .bin
- curl -Lo .bin/sbt https://raw.githubusercontent.com/coursier/sbt-extras/e20d91edd7c2e9f4a17629dfae7d7d6bf9ba72a0/sbt
- chmod +x .bin/sbt
- export PATH="$(pwd)/.bin:$PATH"
- export TRAVIS_SCALA_VERSION=2.12.8
script: ./.travis.sh
sudo: required
cache:
directories:
- $HOME/.cache
- $HOME/.ivy2/cache
- $HOME/.sbt
- $HOME/standalone-stuff
- $HOME/yarn-stuff
Expand All @@ -18,19 +23,10 @@ stages:
jobs:
include:
- env: MASTER=local-distrib
scala: 2.11.12
- env: MASTER=local
scala: 2.11.12
- env: MASTER=local
scala: 2.12.8
- env: MASTER=standalone STANDALONE_CACHE=$HOME/standalone-stuff
scala: 2.11.12
- env: MASTER=yarn-distrib YARN_CACHE=$HOME/yarn-stuff STANDALONE_CACHE=$HOME/yarn-cache
scala: 2.11.12
- env: MASTER=yarn YARN_CACHE=$HOME/yarn-stuff
scala: 2.11.12
- env: MASTER=yarn YARN_CACHE=$HOME/yarn-stuff
scala: 2.12.8
- stage: release
script: sbt ci-release
branches:
Expand Down
30 changes: 4 additions & 26 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -15,32 +15,11 @@ inThisBuild(List(
)
))

lazy val `spark-stubs_20` = project
.underModules
.settings(
shared,
baseDirectory := {
val baseDir = baseDirectory.value

if (Settings.isAtLeast212.value)
baseDir / "target" / "dummy"
else
baseDir
},
libraryDependencies ++= {
if (Settings.isAtLeast212.value)
Nil
else
Seq(Deps.sparkSql20 % "provided")
},
publishArtifact := !Settings.isAtLeast212.value
)

lazy val `spark-stubs_24` = project
.underModules
.settings(
shared,
libraryDependencies += Deps.sparkSql24 % "provided"
libraryDependencies += Deps.sparkSql % "provided"
)

lazy val core = project
Expand All @@ -50,8 +29,8 @@ lazy val core = project
name := "ammonite-spark",
generatePropertyFile("org/apache/spark/sql/ammonitesparkinternals/ammonite-spark.properties"),
libraryDependencies ++= Seq(
Deps.ammoniteReplApi.value % "provided",
Deps.sparkSql.value % "provided",
Deps.ammoniteReplApi % "provided",
Deps.sparkSql % "provided",
Deps.jettyServer
)
)
Expand All @@ -65,7 +44,7 @@ lazy val tests = project
generateDependenciesFile,
testSettings,
libraryDependencies ++= Seq(
Deps.ammoniteRepl.value,
Deps.ammoniteRepl,
Deps.utest
)
)
Expand Down Expand Up @@ -110,7 +89,6 @@ lazy val `ammonite-spark` = project
.in(file("."))
.aggregate(
core,
`spark-stubs_20`,
`spark-stubs_24`,
tests
)
Expand Down

This file was deleted.

10 changes: 0 additions & 10 deletions modules/core/src/main/scala-2.11/ammonite/repl/api/package.scala

This file was deleted.

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ import org.eclipse.jetty.server.handler.AbstractHandler

final class AmmoniteClassServer(host: String, bindTo: String, port: Int, frames: => List[Frame]) {

import Compatibility._

private val socketAddress = InetSocketAddress.createUnresolved(bindTo, port)

private val handler = new AbstractHandler {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import java.nio.file.{Files, Paths}

import ammonite.repl.api.ReplAPI
import ammonite.interp.api.InterpAPI
import coursierapi.Dependency
import org.apache.spark.SparkContext
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql.SparkSession
Expand Down Expand Up @@ -180,7 +181,7 @@ class AmmoniteSparkSessionBuilder

private def loadExtraDependencies(): Unit = {

var deps = List.empty[(String, coursier.Dependency)]
var deps = List.empty[(String, Dependency)]

if (hiveSupport() && !SparkDependencies.sparkHiveFound())
deps = ("spark-hive", SparkDependencies.sparkHiveDependency) :: deps
Expand Down Expand Up @@ -236,9 +237,9 @@ class AmmoniteSparkSessionBuilder
// Loose attempt at using the scala JARs already loaded in Ammonite,
// rather than ones from the spark distribution.
val fromBaseCp = jars.filter { f =>
f.toASCIIString.contains("/scala-library-") ||
f.toASCIIString.contains("/scala-reflect-") ||
f.toASCIIString.contains("/scala-compiler-")
f.toASCIIString.contains("/scala-library") ||
f.toASCIIString.contains("/scala-reflect") ||
f.toASCIIString.contains("/scala-compiler")
}
val fromSparkDistrib = Files.list(Paths.get(sparkHome).resolve("jars"))
.iterator()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,11 @@ package org.apache.spark.sql.ammonitesparkinternals

import java.net.URI

import coursier.util.Task
import coursier._
import coursier.params.ResolutionParams
import coursierapi.{Dependency, Fetch, Module, Repository, ResolutionParams}

import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.ExecutionContext
import scala.util.Properties.{versionNumberString => scalaVersion}
import scala.util.Try

Expand Down Expand Up @@ -100,50 +98,50 @@ object SparkDependencies {
case _ =>
"24"
}
coursier.Dependency(
coursier.Module(org"sh.almond", ModuleName(s"spark-stubs_${suffix}_$sbv")), Properties.version
Dependency.of(
"sh.almond", s"spark-stubs_${suffix}_$sbv", Properties.version
)
}

def sparkYarnDependency =
coursier.Dependency(
coursier.Module(org"org.apache.spark", ModuleName(s"spark-yarn_$sbv")), org.apache.spark.SPARK_VERSION
Dependency.of(
"org.apache.spark", s"spark-yarn_$sbv", org.apache.spark.SPARK_VERSION
)

def sparkHiveDependency =
coursier.Dependency(
coursier.Module(org"org.apache.spark", ModuleName(s"spark-hive_$sbv")), org.apache.spark.SPARK_VERSION
Dependency.of(
"org.apache.spark", s"spark-hive_$sbv", org.apache.spark.SPARK_VERSION
)

private def sparkBaseDependencies() =
Seq(
Dependency(Module(org"org.scala-lang", name"scala-library"), scalaVersion),
Dependency(Module(org"org.scala-lang", name"scala-reflect"), scalaVersion),
Dependency(Module(org"org.scala-lang", name"scala-compiler"), scalaVersion),
Dependency.of("org.scala-lang", "scala-library", scalaVersion),
Dependency.of("org.scala-lang", "scala-reflect", scalaVersion),
Dependency.of("org.scala-lang", "scala-compiler", scalaVersion),
stubsDependency // for ExecutorClassLoader
) ++
sparkModules().map { m =>
Dependency(Module(org"org.apache.spark", ModuleName(s"spark-${m}_$sbv")), org.apache.spark.SPARK_VERSION)
Dependency.of("org.apache.spark", s"spark-${m}_$sbv", org.apache.spark.SPARK_VERSION)
}


def sparkJars(
repositories: Seq[Repository],
profiles: Seq[String]
): Seq[URI] =
Fetch()
Fetch.create()
.addDependencies(sparkBaseDependencies(): _*)
.withRepositories(repositories)
.withRepositories(repositories: _*)
.withResolutionParams(
ResolutionParams()
.addForceVersion(
mod"org.scala-lang:scala-library" -> scalaVersion,
mod"org.scala-lang:scala-reflect" -> scalaVersion,
mod"org.scala-lang:scala-compiler" -> scalaVersion
)
.withProfiles(profiles.toSet)
ResolutionParams.create()
.forceVersion(Module.of("org.scala-lang", "scala-library"), scalaVersion)
.forceVersion(Module.of("org.scala-lang", "scala-reflect"), scalaVersion)
.forceVersion(Module.of("org.scala-lang", "scala-compiler"), scalaVersion)
.withProfiles(profiles.toSet.asJava)
)
.run()
.fetch()
.asScala
.toVector
.map(_.getAbsoluteFile.toURI)

}

This file was deleted.

2 changes: 1 addition & 1 deletion modules/tests/src/main/scala/ammonite/spark/Init.scala
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ object Init {
@ .iterator()
@ .asScala
@ .toVector
@ .filter(f => !f.getFileName.toString.startsWith("scala-compiler") && !f.getFileName.toString.startsWith("scala-reflect") && !f.getFileName.toString.startsWith("scala-library"))
@ .filter(f => !f.getFileName.toString.startsWith("scala-compiler") && !f.getFileName.toString.startsWith("scala-reflect") && !f.getFileName.toString.startsWith("scala-library") && !f.getFileName.toString.startsWith("spark-repl_"))
@ .sortBy(_.getFileName.toString)
@ .map(ammonite.ops.Path(_))
@ }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -519,11 +519,10 @@ class SparkReplTests(
// tests below are custom ones

"algebird" - {
if (scala.util.Properties.versionNumberString.startsWith("2.11."))
// no algebird-spark in scala 2.12 yet
if (scala.util.Properties.versionNumberString.startsWith("2.12."))
sparkSession(
"""
@ import $ivy.`com.twitter::algebird-spark:0.13.0`
@ import $ivy.`com.twitter::algebird-spark:0.13.5`
@ AmmoniteSparkSession.sync()
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

33 changes: 4 additions & 29 deletions project/Deps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,38 +5,13 @@ import sbt.Keys._

object Deps {

private val ammoniteVersion = setting {
val sv = scalaVersion.value
if (sv.startsWith("2.11."))
"1.6.7"
else
"1.6.9-8-2a27ffe"
}
private def ammoniteVersion = "1.6.9-15-6720d42"
def ammoniteReplApi = ("com.lihaoyi" % "ammonite-repl-api" % ammoniteVersion).cross(CrossVersion.full)
def ammoniteRepl = ("com.lihaoyi" % "ammonite-repl" % ammoniteVersion).cross(CrossVersion.full)

def ammoniteReplApi = setting {
val sv = scalaVersion.value
val mod =
if (sv.startsWith("2.11."))
"com.lihaoyi" % "ammonite-repl"
else
"com.lihaoyi" % "ammonite-repl-api"
val ver = ammoniteVersion.value
(mod % ver).cross(CrossVersion.full)
}
def ammoniteRepl = setting {
val ver = ammoniteVersion.value
("com.lihaoyi" % "ammonite-repl" % ver).cross(CrossVersion.full)
}
def jettyServer = "org.eclipse.jetty" % "jetty-server" % "9.4.19.v20190610"
def utest = "com.lihaoyi" %% "utest" % "0.6.7"

def sparkSql20 = "org.apache.spark" %% "spark-sql" % "2.0.2" // no need to bump that version much, to ensure we don't rely on too new stuff
def sparkSql24 = "org.apache.spark" %% "spark-sql" % "2.4.0" // that version's required for scala 2.12
def sparkSql = setting {
if (Settings.isAtLeast212.value)
sparkSql24
else
sparkSql20
}
def sparkSql = "org.apache.spark" %% "spark-sql" % "2.4.0"

}
Loading

0 comments on commit 7acd4f3

Please sign in to comment.