summaryrefslogtreecommitdiff
path: root/tools/travis-test.sh
blob: 705876f564dba728e34bd5b202842a505e36f9db (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
#!/bin/bash
set -ex

# travis boxes give you 1.5 cpus
export NPY_NUM_BUILD_JOBS=2

# setup env
if [ -r /usr/lib/libeatmydata/libeatmydata.so ]; then
  # much faster package installation
  export LD_PRELOAD=/usr/lib/libeatmydata/libeatmydata.so
fi


setup_base()
{
  # We used to use 'setup.py install' here, but that has the terrible
  # behaviour that if a copy of the package is already installed in
  # the install location, then the new copy just gets dropped on top
  # of it. Travis typically has a stable numpy release pre-installed,
  # and if we don't remove it, then we can accidentally end up
  # e.g. running old test modules that were in the stable release but
  # have been removed from master. (See gh-2765, gh-2768.)  Using 'pip
  # install' also has the advantage that it tests that numpy is 'pip
  # install' compatible, see e.g. gh-2766...
if [ -z "$USE_DEBUG" ]; then
  $PIP install .
else
  sysflags="$($PYTHON -c "from distutils import sysconfig; print (sysconfig.get_config_var('CFLAGS'))")"
  # windows compilers have this requirement
  CFLAGS="$sysflags -Werror=declaration-after-statement -Werror=nonnull" $PYTHON setup.py build_ext --inplace
fi
}

setup_chroot()
{
  # this can all be replaced with:
  # apt-get install libpython2.7-dev:i386
  # CC="gcc -m32" LDSHARED="gcc -m32 -shared" LDFLAGS="-m32 -shared" linux32 python setup.py build
  # when travis updates to ubuntu 14.04
  DIR=$1
  # speeds up setup as we don't have eatmydata during bootstrap
  sudo mkdir -p $DIR
  sudo mount -t tmpfs -o size=4G tmpfs $DIR
  set -u
  sudo apt-get update
  sudo apt-get -qq -y --force-yes install debootstrap eatmydata
  sudo debootstrap --variant=buildd --include=fakeroot,build-essential --arch=$ARCH --foreign $DIST $DIR
  sudo chroot $DIR ./debootstrap/debootstrap --second-stage
  sudo rsync -a $TRAVIS_BUILD_DIR $DIR/
  echo deb http://archive.ubuntu.com/ubuntu/ $DIST main restricted universe multiverse | sudo tee -a $DIR/etc/apt/sources.list
  echo deb http://archive.ubuntu.com/ubuntu/ $DIST-updates main restricted universe multiverse | sudo tee -a $DIR/etc/apt/sources.list
  echo deb http://security.ubuntu.com/ubuntu $DIST-security  main restricted universe multiverse | sudo tee -a $DIR/etc/apt/sources.list
  sudo chroot $DIR bash -c "apt-get update"
  sudo chroot $DIR bash -c "apt-get install -qq -y --force-yes eatmydata"
  echo /usr/lib/libeatmydata/libeatmydata.so | sudo tee -a $DIR/etc/ld.so.preload
  sudo chroot $DIR bash -c "apt-get install -qq -y --force-yes libatlas-dev libatlas-base-dev gfortran python3-dev python3-nose python3-pip cython3 cython"
}

setup_bento()
{
  export CI_ROOT=$PWD
  cd ..

  # Waf
  wget https://raw.githubusercontent.com/numpy/numpy-vendor/master/waf-1.7.16.tar.bz2
  tar xjvf waf-1.7.16.tar.bz2
  cd waf-1.7.16
  python waf-light
  export WAFDIR=$PWD
  cd ..

  # Bento
  wget https://github.com/cournape/Bento/archive/master.zip
  unzip master.zip
  cd Bento-master
  python bootstrap.py
  export BENTO_ROOT=$PWD
  cd ..

  cd $CI_ROOT

  # In-place numpy build
  $BENTO_ROOT/bentomaker build -v -i -j

  # Prepend to PYTHONPATH so tests can be run
  export PYTHONPATH=$PWD:$PYTHONPATH
}

run_test()
{
  if [ -n "$USE_DEBUG" ]; then
    export PYTHONPATH=$PWD
  fi

  # We change directories to make sure that python won't find the copy
  # of numpy in the source directory.
  mkdir -p empty
  cd empty
  INSTALLDIR=$($PYTHON -c "import os; import numpy; print(os.path.dirname(numpy.__file__))")
  export PYTHONWARNINGS=default
  $PYTHON ../tools/test-installed-numpy.py # --mode=full
  # - coverage run --source=$INSTALLDIR --rcfile=../.coveragerc $(which $PYTHON) ../tools/test-installed-numpy.py
  # - coverage report --rcfile=../.coveragerc --show-missing
}

# travis venv tests override python
PYTHON=${PYTHON:-python}
PIP=${PIP:-pip}

if [ -n "$USE_DEBUG" ]; then
  sudo apt-get update
  sudo apt-get install -qq -y --force-yes python3-dbg python3-dev python3-nose
  PYTHON=python3-dbg
fi

if [ -n "$PYTHON_OO" ]; then
  PYTHON="$PYTHON -OO"
fi

export PYTHON
export PIP
if [ -n "$USE_WHEEL" ] && [ $# -eq 0 ]; then
  # Build wheel
  $PIP install wheel
  $PYTHON setup.py bdist_wheel
  # Make another virtualenv to install into
  virtualenv --python=python venv-for-wheel
  . venv-for-wheel/bin/activate
  # Move out of source directory to avoid finding local numpy
  pushd dist
  $PIP install --pre --upgrade --find-links . numpy
  $PIP install nose
  popd
  run_test
elif [ "$USE_CHROOT" != "1" ] && [ "$USE_BENTO" != "1" ]; then
  setup_base
  run_test
elif [ -n "$USE_CHROOT" ] && [ $# -eq 0 ]; then
  DIR=/chroot
  setup_chroot $DIR
  # run again in chroot with this time testing
  sudo linux32 chroot $DIR bash -c "cd numpy && PYTHON=python3 PIP=pip3 $0 test"
elif [ -n "$USE_BENTO" ] && [ $# -eq 0 ]; then
  setup_bento
  # run again this time testing
  $0 test
else
  run_test
fi