forked from thunder-project/thunder
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
41 lines (36 loc) · 1.22 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
language: python
python:
- "2.7"
jdk:
- openjdk7
before_install:
# install miniconda
- sudo apt-get update
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget http://repo.continuum.io/miniconda/Miniconda-3.7.3-Linux-x86_64.sh -O miniconda.sh;
else
wget http://repo.continuum.io/miniconda/Miniconda3-3.7.3-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
install:
# Setup miniconda
- conda create --yes -q -n test-environment python=$TRAVIS_PYTHON_VERSION nose numpy scipy scikit-learn scikit-image matplotlib
- source activate test-environment
- cd python
- pip install -r requirements.txt
- cd ..
# Install Spark
- wget http://d3kbcqa49mib13.cloudfront.net/spark-1.3.0-bin-hadoop1.tgz
- tar -xzf spark-1.3.0-bin-hadoop1.tgz
# Workaround for Travis issue with POSIX semaphores; see
# https://github.com/travis-ci/travis-cookbooks/issues/155
- "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm"
script:
- export SPARK_HOME=`pwd`/spark-1.3.0-bin-hadoop1
- cd python/test
- ./run_tests.sh