forked from holdenk/spark-testing-base
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
54 lines (54 loc) · 1.75 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
language: scala
sudo: required
dist: precise
cache:
directories:
- $HOME/.ivy2
- $HOME/spark
- $HOME/.cache/pip
- $HOME/.pip-cache
- $HOME/.sbt/launchers
jdk:
- oraclejdk8
scala:
- 2.10.4
sudo: false
addons:
apt:
packages:
- axel
before_install:
- export PATH=$HOME/.local/bin:$PATH
- pip install --user codecov coverage unittest2
install:
# Download spark 2.2.3
- "[ -f spark ] || mkdir spark && cd spark && axel http://www-us.apache.org/dist/spark/spark-2.2.3/spark-2.2.3-bin-hadoop2.7.tgz && cd .."
- "tar -xf ./spark/spark-2.2.3-bin-hadoop2.7.tgz"
- "export SPARK_HOME=`pwd`/spark-2.2.3-bin-hadoop2.7"
- echo "spark.yarn.jars=$SPARK_HOME/jars/*.jar" > $SPARK_HOME/conf/spark-defaults.conf
# Install Python deps.
# The conda installation steps here are based on http://conda.pydata.org/docs/travis.html
- wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
# Useful for debugging any issues with conda
- conda info -a
- deps='pip requests nose sphinx pycodestyle coverage'
- conda create -p $HOME/py --yes $deps "python=2.7"
- export PATH=$HOME/py/bin:$PATH
- pip install --upgrade unittest2 codecov
script:
- ./sbt/sbt scalastyle
- SPARK_CONF_DIR=./log4j/ ./sbt/sbt clean coverage test coverageReport
- bash <(curl -s https://codecov.io/bash) -cF scala
- cp ./log4j/log4j.properties $SPARK_HOME/conf/
- ./python/run-tests
- bash <(curl -s https://codecov.io/bash) -cF python
- "pycodestyle --ignore=E402 ./python"
notifications:
email:
recipients: