Skip to content

Commit

Permalink
Update libs + fix Apache Spark mirror
Browse files Browse the repository at this point in the history
  • Loading branch information
umayrh committed Dec 29, 2019
1 parent 3a09bc4 commit 1f42b57
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 14 deletions.
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
opentuner==0.8.0
humanfriendly==4.17
chainmap==1.0.2
opentuner==0.8.2
humanfriendly==4.18
chainmap==1.0.3
psutil==5.4.8
requests==2.21.0
requests==2.22.0
requests-mock==1.5.2
16 changes: 8 additions & 8 deletions scripts/travis-setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ set -ex
OS=$(uname -s)

## Service versions
SPARK_VERSION=${SPARK_VERSION:-"2.4.0"}
SPARK_VERSION=${SPARK_VERSION:-"2.4.4"}
HADOOP_VERSION=${HADOOP_VERSION:-"2.7"}
SPARK_MIRROR="http://ftp.wayne.edu/apache/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"

## OS-specific package installation
bootstrap() {
Expand Down Expand Up @@ -34,16 +35,15 @@ createGlobalEnvFile() {
## Installs a specific version of Spark
setupSpark() {
local SPARK_DIR_NAME=spark-${SPARK_VERSION}
SPARK_DIST_NAME=${SPARK_DIR_NAME}-bin-hadoop${HADOOP_VERSION}
if [[ ! -d "$HOME/.cache/${SPARK_DIST_NAME}" ]]; then
if [[ ! -d "$HOME/.cache/${SPARK_DIR_NAME}" ]]; then
cd $HOME/.cache
SPARK_DIST_NAME=${SPARK_DIR_NAME}-bin-hadoop${HADOOP_VERSION}
rm -fr ./${SPARK_DIST_NAME}.tgz*
# Use axel again when https://github.com/axel-download-accelerator/axel/issues/192
# has been fixed.
# axel --quiet http://www-us.apache.org/dist/spark/${SPARK_DIR_NAME}/${SPARK_DIST_NAME}.tgz
wget --quiet http://www-us.apache.org/dist/spark/${SPARK_DIR_NAME}/${SPARK_DIST_NAME}.tgz
ls -alh ${SPARK_DIST_NAME}.tgz
axel --quiet ${SPARK_MIRROR}
tar -xf ./${SPARK_DIST_NAME}.tgz
export SPARK_HOME=`pwd`/${SPARK_DIST_NAME}
# TODO: need a more systematic method for setting up Spark properties
echo "spark.yarn.jars=${SPARK_HOME}/jars/*.jar" > ${SPARK_HOME}/conf/spark-defaults.conf
cd ..
fi
export SPARK_HOME="${HOME}/.cache/${SPARK_DIST_NAME}"
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def load_pinned_deps(self):
description='Package for automating Spark application tuning',
long_description=open('README.txt').read(),
install_requires=[
"opentuner==0.8.0",
"opentuner==0.8.2",
"humanfriendly==4.17",
"chainmap==1.0.2",
"psutil==5.4.8",
Expand Down
1 change: 0 additions & 1 deletion src/sparktuner.egg-info/SOURCES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ gradle/wrapper/gradle-wrapper.jar
gradle/wrapper/gradle-wrapper.properties
scripts/get_requirements.py
scripts/pivy-importer-0.12.8-SNAPSHOT-all.jar
scripts/pivy-importer-0.9.9-all.jar
scripts/travis-setup.sh
src/sparktuner/__init__.py
src/sparktuner/args.py
Expand Down

0 comments on commit 1f42b57

Please sign in to comment.