summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Jesse Hellemn <jesse.hellemn@gmail.com>2018-03-14 12:24:37 -0700
committerGitHub <noreply@github.com>2018-03-14 12:24:37 -0700
commit74f0b270ea8e3fffd4585a7df22b1c3da6fd15bf (patch)
treee2503513c972a5331a4d03f7560f9e90b7eca0b5
parent8a9925f03f8871abcfec14e12c2e2ee83029e6df (diff)
downloadpytorch-74f0b270ea8e3fffd4585a7df22b1c3da6fd15bf.tar.gz
pytorch-74f0b270ea8e3fffd4585a7df22b1c3da6fd15bf.tar.bz2
pytorch-74f0b270ea8e3fffd4585a7df22b1c3da6fd15bf.zip
Fixing conda (#2123)
* Fixing conda * Adding hypothesis and onnx to conda builds * Updates but still not working * Adding required changes to conda_full * Updates * Moving to more general build_anaconda script * Adding check for gcc version * Adding general ways to add/remove packages from meta.yaml? * Changes for specific packages to build on gcc 5.4 * Fix with glog spec * Requiring >numpy 1.12 for python 3 to satisfy opencv dependency * Adding pydot to required testing packages * Adding script to read conda versions for gcc ABI * Trying to fix segfault by installing in env instead * conda activate -> source activate * Trying adding back leveldb * Setting locale for ONNX + conda-search changed its format * read_conda_versions handles libprotobuf * Conda script updates * Adding a protobuf-working test * Removing changes to proto defs b/c they will require internal changes in a separate diff
-rwxr-xr-x.jenkins/build.sh17
-rwxr-xr-x.jenkins/test.sh11
-rw-r--r--caffe2/python/predictor/predictor_exporter_test.py19
-rwxr-xr-xconda/cuda/build.sh2
-rw-r--r--conda/cuda/meta.yaml4
-rwxr-xr-xconda/cuda_full/build.sh14
-rw-r--r--conda/cuda_full/meta.yaml7
-rwxr-xr-xconda/no_cuda/build.sh3
-rw-r--r--conda/no_cuda/conda_build_config.yaml2
-rw-r--r--conda/no_cuda/meta.yaml14
-rwxr-xr-xscripts/build_anaconda.sh113
-rwxr-xr-xscripts/read_conda_versions.sh181
12 files changed, 338 insertions, 49 deletions
diff --git a/.jenkins/build.sh b/.jenkins/build.sh
index d0923624d9..46ddb66b8c 100755
--- a/.jenkins/build.sh
+++ b/.jenkins/build.sh
@@ -51,9 +51,26 @@ if [[ "${BUILD_ENVIRONMENT}" == *-android* ]]; then
exit 0
fi
if [[ "${BUILD_ENVIRONMENT}" == conda* ]]; then
+
+ # click (required by onnx) wants these set
+ export LANG=C.UTF-8
+ export LC_ALL=C.UTF-8
+
+ # SKIP_CONDA_TESTS refers to only the 'test' section of the meta.yaml
export SKIP_CONDA_TESTS=1
export CONDA_INSTALL_LOCALLY=1
"${ROOT_DIR}/scripts/build_anaconda.sh" "$@"
+
+ # The tests all need hypothesis, tabulate, and pydot, which aren't included
+ # in the conda packages
+ conda install -y hypothesis tabulate pydot
+
+ # This build will be tested against onnx tests, which needs onnx installed.
+ # Onnx should be built against the same protobuf that Caffe2 uses, which is
+ # only installed in the conda environment when Caffe2 is.
+ # This path comes from install_anaconda.sh which installs Anaconda into the
+ # docker image
+ PROTOBUF_INCDIR=/opt/conda/include pip install "${ROOT_DIR}/third_party/onnx"
exit 0
fi
diff --git a/.jenkins/test.sh b/.jenkins/test.sh
index debe682684..df473dcb83 100755
--- a/.jenkins/test.sh
+++ b/.jenkins/test.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-set -e
+set -ex
# Figure out which Python to use
PYTHON="python"
@@ -17,12 +17,15 @@ INSTALL_PREFIX="/usr/local/caffe2"
# Anaconda builds have a special install prefix and python
if [[ "$BUILD_ENVIRONMENT" == conda* ]]; then
+ # This path comes from install_anaconda.sh which installs Anaconda into the
+ # docker image
PYTHON="/opt/conda/bin/python"
- INSTALL_PREFIX="/opt/conda"
+ INSTALL_PREFIX="/opt/conda/"
fi
# Add the site-packages in the caffe2 install prefix to the PYTHONPATH
SITE_DIR=$($PYTHON -c "from distutils import sysconfig; print(sysconfig.get_python_lib(prefix=''))")
+INSTALL_SITE_DIR="${INSTALL_PREFIX}/${SITE_DIR}"
LOCAL_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
ROOT_DIR=$(cd "$LOCAL_DIR"/.. && pwd)
@@ -36,7 +39,7 @@ fi
# Set PYTHONPATH and LD_LIBRARY_PATH so that python can find the installed
# Caffe2. This shouldn't be done on Anaconda, as Anaconda should handle this.
if [[ "$BUILD_ENVIRONMENT" != conda* ]]; then
- export PYTHONPATH="${PYTHONPATH}:${INSTALL_PREFIX}/${SITE_DIR}"
+ export PYTHONPATH="${PYTHONPATH}:$INSTALL_SITE_DIR"
export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${INSTALL_PREFIX}/lib"
fi
@@ -91,7 +94,7 @@ for test in ./test/*; do
done
# Get the relative path to where the caffe2 python module was installed
-CAFFE2_PYPATH="$SITE_DIR/caffe2"
+CAFFE2_PYPATH="$INSTALL_SITE_DIR/caffe2"
# Collect additional tests to run (outside caffe2/python)
EXTRA_TESTS=()
diff --git a/caffe2/python/predictor/predictor_exporter_test.py b/caffe2/python/predictor/predictor_exporter_test.py
index c75f95511a..41b70e1305 100644
--- a/caffe2/python/predictor/predictor_exporter_test.py
+++ b/caffe2/python/predictor/predictor_exporter_test.py
@@ -27,7 +27,24 @@ from future.utils import viewitems
from caffe2.python.predictor_constants import predictor_constants as pc
import caffe2.python.predictor.predictor_exporter as pe
import caffe2.python.predictor.predictor_py_utils as pred_utils
-from caffe2.proto import caffe2_pb2
+from caffe2.proto import caffe2_pb2, metanet_pb2
+
+
+class MetaNetDefTest(unittest.TestCase):
+ def test_minimal(self):
+ '''
+ Tests that a NetsMap message can be created with a NetDef message
+ '''
+ # This calls the constructor for a metanet_pb2.NetsMap
+ metanet_pb2.NetsMap(key="test_key", value=caffe2_pb2.NetDef())
+
+ def test_adding_net(self):
+ '''
+ Tests that NetDefs can be added to MetaNetDefs
+ '''
+ meta_net_def = metanet_pb2.MetaNetDef()
+ net_def = caffe2_pb2.NetDef()
+ meta_net_def.nets.add(key="test_key", value=net_def)
class PredictorExporterTest(unittest.TestCase):
def _create_model(self):
diff --git a/conda/cuda/build.sh b/conda/cuda/build.sh
index 70833c38e0..6e717c0cd6 100755
--- a/conda/cuda/build.sh
+++ b/conda/cuda/build.sh
@@ -24,7 +24,7 @@ PYTHON_ARGS="$(python ./scripts/get_python_cmake_flags.py)"
CMAKE_ARGS=()
# Build with minimal required libraries
-CMAKE_ARGS+=("-DUSE_LEVELDB=OFF")
+# Add CMAKE flags here
CMAKE_ARGS+=("-DUSE_MPI=OFF")
# Build with CUDA
diff --git a/conda/cuda/meta.yaml b/conda/cuda/meta.yaml
index a1115058ca..e8b17fe54e 100644
--- a/conda/cuda/meta.yaml
+++ b/conda/cuda/meta.yaml
@@ -11,7 +11,7 @@ build:
number: 0
skip: True # [win]
script_env:
- - CONDA_CMAKE_ARGS
+ - CONDA_CMAKE_BUILD_ARGS
requirements:
build:
@@ -19,6 +19,7 @@ requirements:
- future
- glog
- gflags
+ - leveldb
- lmdb
- numpy
- opencv
@@ -29,6 +30,7 @@ requirements:
- future
- glog
- gflags
+ - leveldb
- lmdb
- numpy
- opencv
diff --git a/conda/cuda_full/build.sh b/conda/cuda_full/build.sh
index a475d0b79a..1a5f93aad4 100755
--- a/conda/cuda_full/build.sh
+++ b/conda/cuda_full/build.sh
@@ -34,13 +34,18 @@ CMAKE_ARGS+=("-DUSE_LMDB=ON")
CMAKE_ARGS+=("-DUSE_NCCL=ON")
CMAKE_ARGS+=("-DUSE_OPENCV=ON")
-# cudnn comes from a module location
-#-DCUDNN_ROOT_DIR=/public/apps/cudnn/v7.0/cuda/ \
+# cuDNN and NCCL come from module locations
+CMAKE_ARGS+=("-DCUDNN_ROOT_DIR=/public/apps/cudnn/v7.0/cuda/")
+CMAKE_ARGS+=("-DNCCL_ROOT_DIR=$NCCL_ROOT_DIR")
# openmpi is needed but can't be included from conda, b/c it's only available
# in conda-forge, which uses gcc 4.8.5
CMAKE_ARGS+=("-DUSE_MPI=ON")
+# Use MKL and hack around a broken eigen op
+CMAKE_ARGS+=("-DBLAS=MKL")
+rm -rf ./caffe2/operators/conv_op_eigen.cc
+
# Explicitly turn unused packages off to prevent cmake from trying to find
# system libraries. If conda packages are built with any system libraries then
# they will not be relocatable.
@@ -52,10 +57,7 @@ CMAKE_ARGS+=("-DUSE_ROCKSDB=OFF")
CMAKE_ARGS+=("-DCMAKE_INSTALL_PREFIX=$PREFIX")
CMAKE_ARGS+=("-DCMAKE_PREFIX_PATH=$PREFIX")
-# No rpaths will work for anaconda?
-# -DCMAKE_SKIP_RPATH=ON \
-
-
+# Build
mkdir -p build
cd build
cmake "${CMAKE_ARGS[@]}" $CONDA_CMAKE_ARGS $PYTHON_ARGS ..
diff --git a/conda/cuda_full/meta.yaml b/conda/cuda_full/meta.yaml
index a1115058ca..f7cb542967 100644
--- a/conda/cuda_full/meta.yaml
+++ b/conda/cuda_full/meta.yaml
@@ -11,7 +11,8 @@ build:
number: 0
skip: True # [win]
script_env:
- - CONDA_CMAKE_ARGS
+ - CONDA_CMAKE_BUILD_ARGS
+ - NCCL_ROOT_DIR
requirements:
build:
@@ -20,6 +21,8 @@ requirements:
- glog
- gflags
- lmdb
+ - mkl
+ - mkl-include
- numpy
- opencv
- python
@@ -30,6 +33,8 @@ requirements:
- glog
- gflags
- lmdb
+ - mkl
+ - mkl-include
- numpy
- opencv
- protobuf
diff --git a/conda/no_cuda/build.sh b/conda/no_cuda/build.sh
index 75ac37065f..e650adfb9b 100755
--- a/conda/no_cuda/build.sh
+++ b/conda/no_cuda/build.sh
@@ -32,7 +32,6 @@ CMAKE_ARGS+=("-DBLAS=MKL")
# Minimal packages
CMAKE_ARGS+=("-DUSE_CUDA=OFF")
-CMAKE_ARGS+=("-DUSE_LEVELDB=OFF")
CMAKE_ARGS+=("-DUSE_MPI=OFF")
CMAKE_ARGS+=("-DUSE_NCCL=OFF")
@@ -42,7 +41,7 @@ CMAKE_ARGS+=("-DCMAKE_PREFIX_PATH=$PREFIX")
mkdir -p build
cd build
-cmake "${CMAKE_ARGS[@]}" $CONDA_CMAKE_ARGS $PYTHON_ARGS ..
+cmake "${CMAKE_ARGS[@]}" $CONDA_CMAKE_BUILD_ARGS $PYTHON_ARGS ..
if [ "$(uname)" == 'Darwin' ]; then
make "-j$(sysctl -n hw.ncpu)"
else
diff --git a/conda/no_cuda/conda_build_config.yaml b/conda/no_cuda/conda_build_config.yaml
index 2081dd9fda..f79998b880 100644
--- a/conda/no_cuda/conda_build_config.yaml
+++ b/conda/no_cuda/conda_build_config.yaml
@@ -1,5 +1,3 @@
-protobuf:
- - 3.4.1
pin_run_as_build:
protobuf:
min_pin: x.x
diff --git a/conda/no_cuda/meta.yaml b/conda/no_cuda/meta.yaml
index 0fa0b79b74..af9677b974 100644
--- a/conda/no_cuda/meta.yaml
+++ b/conda/no_cuda/meta.yaml
@@ -11,26 +11,29 @@ build:
number: 0
skip: True # [win]
script_env:
- - CONDA_CMAKE_ARGS
+ - CONDA_CMAKE_BUILD_ARGS
requirements:
build:
- cmake
- future
- - glog
- gflags
+ - glog
+ - leveldb
- lmdb
- - mkl-include
- mkl
+ - mkl-include
- numpy
- opencv
+ - protobuf
- python
- - protobuf {{ protobuf }}
- six
+ # other packages here
run:
- future
- - glog
- gflags
+ - glog
+ - leveldb
- lmdb
- mkl
- mkl-include
@@ -39,6 +42,7 @@ requirements:
- protobuf
- python
- six
+ # other packages here
test:
imports:
diff --git a/scripts/build_anaconda.sh b/scripts/build_anaconda.sh
index 27bb62bf1a..ec1d8b80bf 100755
--- a/scripts/build_anaconda.sh
+++ b/scripts/build_anaconda.sh
@@ -1,28 +1,59 @@
#!/bin/bash
-#
+
+# NOTE: All parameters to this function are forwared directly to conda-build
+# and so will never be seen by the build.sh
set -ex
+# portable_sed: A wrapper around sed that works on both mac and linux, used to
+# alter conda-build files such as the meta.yaml
+portable_sed () {
+ if [ "$(uname)" == 'Darwin' ]; then
+ sed -i '' "$1" "$2"
+ else
+ sed -i "$1" "$2"
+ fi
+}
+
+remove_package () {
+ portable_sed "/$1/d" "${META_YAML}"
+}
+
+# add_package: Takes a package name and a version and finagles the
+# meta.yaml to ask for that version specifically.
+# NOTE: this assumes that $META_YAML has already been set
+add_package () {
+ remove_package $1
+ # This magic string _M_STR is in the requirements sections of the meta.yaml
+ # The \\"$'\n' is a properly escaped new line
+ # Those 4 spaces are there to properly indent the comment
+ local _M_STR='# other packages here'
+ portable_sed "s/$_M_STR/- ${1} ${2}\\"$'\n'" $_M_STR/" "${META_YAML}"
+}
+
CAFFE2_ROOT="$( cd "$(dirname "$0")"/.. ; pwd -P)"
CONDA_BUILD_ARGS=()
+CMAKE_BUILD_ARGS=()
-# Build for Python 3.6
+# Read gcc and Python versions
+# Find which ABI to build for
+if [ "$(uname)" != 'Darwin' -a -z "${GCC_USE_C11}" ]; then
+ GCC_VERSION="$(gcc --version | grep --only-matching '[0-9]\.[0-9]\.[0-9]*' | head -1)"
+ if [[ "$GCC_VERSION" == 4* ]]; then
+ GCC_USE_C11=0
+ else
+ GCC_USE_C11=1
+ fi
+fi
# Specifically 3.6 because the latest Anaconda version is 3.6, and so it's site
# packages have 3.6 in the name
-PYTHON_FULL_VERSION="$(python --version 2>&1)"
-if [[ "$PYTHON_FULL_VERSION" == *3.6* ]]; then
+PYTHON_VERSION="$(python --version 2>&1 | grep --only-matching '[0-9]\.[0-9]\.[0-9]*')"
+if [[ "$PYTHON_VERSION" == 3.6* ]]; then
+ # This is needed or else conda tries to move packages to python3/site-packages
+ # isntead of python3.6/site-packages
CONDA_BUILD_ARGS+=(" --python 3.6")
fi
-# openmpi is only available in conda-forge (for linux), so conda-forge has to
-# be added as a channel for this 'full' build. This causes the default opencv
-# to be pulled from conda-forge, which will break with a "can't find
-# libopencv_highgui.so", so we also pin opencv version to 3.3.0 to avoid that
-# issue
-#if [[ "${BUILD_ENVIRONMENT}" == *full* ]]; then
-# CONDA_BUILD_ARGS+=(" -c conda-forge")
-#fi
-
# Reinitialize submodules
git submodule update --init
@@ -35,6 +66,7 @@ elif [[ "${BUILD_ENVIRONMENT}" == *cuda* ]]; then
else
CAFFE2_CONDA_BUILD_DIR="${CAFFE2_CONDA_BUILD_DIR}/no_cuda"
fi
+META_YAML="${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
# Change the package name for CUDA builds to have the specific CUDA and cuDNN
# version in them
@@ -52,21 +84,15 @@ if [[ "${BUILD_ENVIRONMENT}" == *cuda* ]]; then
# take the CUDA and cuDNN versions that it finds in the build environment,
# and manually set the package name ourself.
# WARNING: This does not work on mac.
- sed -i "s/caffe2-cuda\$/${CAFFE2_PACKAGE_NAME}/" "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
+ sed -i "s/caffe2-cuda\$/${CAFFE2_PACKAGE_NAME}/" "${META_YAML}"
fi
-# If skipping tests, remove the test related lines from the meta.yaml
+# If skipping tests, remove the test related lines from the meta.yaml and don't
+# upload to Anaconda.org
if [ -n "$SKIP_CONDA_TESTS" ]; then
-
- if [ "$(uname)" == 'Darwin' ]; then
- sed -i '' '/test:/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- sed -i '' '/imports:/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- sed -i '' '/caffe2.python.core/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- else
- sed -i '/test:/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- sed -i '/imports:/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- sed -i '/caffe2.python.core/d' "${CAFFE2_CONDA_BUILD_DIR}/meta.yaml"
- fi
+ portable_sed '/test:/d' "${META_YAML}"
+ portable_sed '/imports:/d' "${META_YAML}"
+ portable_sed '/caffe2.python.core/d' "${META_YAML}"
elif [ -n "$UPLOAD_TO_CONDA" ]; then
# Upload to Anaconda.org if needed. This is only allowed if testing is
@@ -75,10 +101,45 @@ elif [ -n "$UPLOAD_TO_CONDA" ]; then
CONDA_BUILD_ARGS+=(" --token ${CAFFE2_ANACONDA_ORG_ACCESS_TOKEN}")
fi
+# Change flags based on target gcc ABI
+if [[ "$(uname)" != 'Darwin' ]]; then
+ if [ "$GCC_USE_C11" -eq 0 ]; then
+ CMAKE_BUILD_ARGS+=("-DCMAKE_CXX_FLAGS=-D_GLIBCXX_USE_CXX11_ABI=0")
+ # Default conda channels use gcc 7.2 (for recent packages), conda-forge uses
+ # gcc 4.8.5
+ CONDA_BUILD_ARGS+=(" -c conda-forge")
+
+ else
+ # gflags 2.2.1 is built against the new ABI but gflags 2.2.0 is not
+ add_package 'gflags' '==2.2.1'
+
+ # opencv 3.3.1 requires protobuf 3.2.0 explicitly, so we use opencv 3.1.0
+ # since protobuf 3.2.0 is not in conda
+ add_package 'opencv' '==3.1.0'
+ if [[ "$PYTHON_VERSION" == 3.* ]]; then
+ # opencv 3.1.0 for python 3 requires numpy 1.12
+ add_package 'numpy' '>1.11'
+ fi
+
+ # These calls won't work since
+ # - these package requirements can't be put in meta.yaml (no support yet)
+ # - if they're put here then they won't be installed at test or install
+ # time
+ # glog 0.3.5=0 is built against old ABI, but 0.3.5=hf484d3e_1 is not
+ #remove_package 'glog'
+ #conda install -y 'glog=0.3.5=hf484d3e_1'
+
+ # leveldb=1.20 is built against old ABI, but 1.20=hf484d3e_1 is built
+ # against the new one
+ #remove_package 'leveldb'
+ #conda install -y 'leveldb=1.20=hf484d3e_1'
+ fi
+fi
+
# Build Caffe2 with conda-build
# If --user and --token are set, then this will also upload the built package
# to Anaconda.org, provided there were no failures and all the tests passed
-conda build "${CAFFE2_CONDA_BUILD_DIR}" ${CONDA_BUILD_ARGS[@]} "$@"
+CONDA_CMAKE_BUILD_ARGS="$CMAKE_BUILD_ARGS" conda build "${CAFFE2_CONDA_BUILD_DIR}" ${CONDA_BUILD_ARGS[@]} "$@"
# Install Caffe2 from the built package into the local conda environment
if [ -n "$CONDA_INSTALL_LOCALLY" ]; then
diff --git a/scripts/read_conda_versions.sh b/scripts/read_conda_versions.sh
new file mode 100755
index 0000000000..7662459f71
--- /dev/null
+++ b/scripts/read_conda_versions.sh
@@ -0,0 +1,181 @@
+# Simple script used to easily search all packages in conda for their
+# dependency requirements
+
+if [ -z "$CONDA_ROOT" ]; then
+ echo "Please set CONDA_ROOT so that I know where to search for conda libraries"
+ echo "I expect CONDA_ROOT to be the path to the current conda environment."
+ echo "Also FYI I will probably mess up the current conda environment."
+ exit 1
+fi
+
+if [ -z "$1" ]; then
+ echo "Please give me a package name to search for"
+ exit 1
+fi
+PKG_NAME="$1"
+
+if [ -n "$2" ]; then
+ echo "Searching in channel $2"
+ CONDA_CHANNEL="$2"
+fi
+
+# These are the packages of interest to search the dependencies for
+# TODO use this
+PACKAGES_OF_INTEREST=( libgcc-ng libprotobuf numpy )
+
+# We will run `conda install` and `conda uninstall` a lot, but we don't want
+# this very noisy output to clutter the user experience
+VERBOSE_LOG='read_conda_versions.log'
+echo "Conda install/uninstall log for $PKG_NAME" > $VERBOSE_LOG
+
+
+
+#
+# Build up the name of the installed library to call `nm` on
+#
+PKG_INSTALLED_LIB="$PKG_NAME"
+
+# opencv installs a bunch of libraries. We'll just check libopencv_core
+if [[ $PKG_NAME == opencv ]]; then
+ PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}_core"
+fi
+
+# Most packages prepend a 'lib' to the package name, but libprotobuf is an
+# exception
+if [[ $PKG_NAME != lib* ]]; then
+ PKG_INSTALLED_LIB="lib${PKG_INSTALLED_LIB}"
+fi
+
+# The shared library suffix differs on macOS an Linux
+if [[ "$(uname)" == Darwin ]]; then
+ PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}.dylib"
+else
+ PKG_INSTALLED_LIB="${PKG_INSTALLED_LIB}.so"
+fi
+echo "Determined the library name of $PKG_NAME to be $PKG_INSTALLED_LIB"
+echo "Determined the library name of $PKG_NAME to be $PKG_INSTALLED_LIB" >> $VERBOSE_LOG
+
+
+
+#
+# Get all available packages with conda-search
+#
+
+# Split the output from conda search into an array, one line per package (plus
+# the header)
+conda_search_packages=()
+while read -r line; do conda_search_packages+=("$line"); done <<< "$(conda search $PKG_NAME $CONDA_CHANNEL)"
+
+### Typical `conda search` output looks like
+### Loading channels: done
+### Name Version Build Channel
+### protobuf 2.6.1 py27_0 defaults
+### 2.6.1 py27_1 defaults
+### 3.2.0 py27_0 defaults
+### 3.2.0 py35_0 defaults
+### 3.2.0 py36_0 defaults
+### 3.4.1 py27h66c1d77_0 defaults
+### 3.4.1 py35h9d33684_0 defaults
+### 3.4.1 py36h314970b_0 defaults
+### 3.5.1 py27h0a44026_0 defaults
+### 3.5.1 py35h0a44026_0 defaults
+### 3.5.1 py36h0a44026_0 defaults
+##
+### Typical `conda info` output looks like
+### protobuf 3.5.1 py36h0a44026_0
+### -----------------------------
+### file name : protobuf-3.5.1-py36h0a44026_0.tar.bz2
+### name : protobuf
+### version : 3.5.1
+### build string: py36h0a44026_0
+### build number: 0
+### channel : https://repo.continuum.io/pkgs/main/osx-64
+### size : 589 KB
+### arch : None
+### constrains : ()
+### license : New BSD License
+### license_family: BSD
+### md5 : 7dbdb06612e21c42fbb8a62354e13e10
+### platform : None
+### subdir : osx-64
+### timestamp : 1519951502766
+### url : https://repo.continuum.io/pkgs/main/osx-64/protobuf-3.5.1-py36h0a44026_0.tar.bz2
+### dependencies:
+### libcxx >=4.0.1
+### libprotobuf >=3.5.1,<3.6.0a0
+### python >=3.6,<3.7.0a0
+### six
+
+# Echo what packages we'll look through.
+echo "Processing these packages:"
+for pkg in "${conda_search_packages[@]:2}"; do
+ echo " $pkg"
+done
+
+
+
+#
+# Look up each package in conda info, then install it and search the exported
+# symbols for signs of cxx11
+#
+for pkg in "${conda_search_packages[@]:2}"; do
+ echo "Processing $pkg" >> $VERBOSE_LOG
+
+ # Split each line into an array and build the package specification
+ # <package_name (1st line only)> maj.min.patch build_string channel_name
+ line_parts=( $pkg )
+ if [[ ${line_parts[0]} == $PKG_NAME ]]; then
+ # First line of output
+ PKG_VERSION="${line_parts[1]}"
+ PKG_BUILD_STR="${line_parts[2]}"
+ else
+ PKG_VERSION="${line_parts[0]}"
+ PKG_BUILD_STR="${line_parts[1]}"
+ fi
+ PKG_SPEC="$PKG_NAME=$PKG_VERSION=$PKG_BUILD_STR"
+
+ # Output current pkg spec
+ echo
+ echo "${PKG_SPEC}:"
+ echo "Determined that the package spec is $PKG_SPEC" >> $VERBOSE_LOG
+
+ # Split the output of conda_info into an array of lines
+ pkg_dependencies=()
+ while read -r line; do pkg_dependencies+=("$line"); done <<< "$(conda info "$PKG_SPEC" $CONDA_CHANNEL)"
+
+ # List all the listed dependencies in `conda info`
+ if [ "${#pkg_dependencies[@]}" -gt 19 ]; then
+ echo " Listed dependencies:"
+ echo " Listed dependencies:" >> $VERBOSE_LOG
+ for pkg_dependency in "${pkg_dependencies[@]:20}"; do
+ echo " $pkg_dependency"
+ echo " $pkg_dependency" >> $VERBOSE_LOG
+ done
+ else
+ echo " No listed dependencies in conda-info" >> $VERBOSE_LOG
+ fi
+
+ # But sometimes (a lot of the time) the gcc with which a package was built
+ # against is not listed in dependencies. So we try to figure it out manually
+ # We install this exact package, and then grep the exported symbols for signs
+ # of cxx11
+ echo "Calling conda-uninstall on $PKG_NAME" >> $VERBOSE_LOG
+ echo "conda uninstall -y $PKG_NAME --quiet" >> $VERBOSE_LOG
+ conda uninstall -y "$PKG_NAME" --quiet >> $VERBOSE_LOG 2>&1
+
+ echo "Calling conda-install on $PKG_SPEC" >> $VERBOSE_LOG
+ echo "conda install -y $PKG_SPEC --quiet --no-deps $CONDA_CHANNEL" >> $VERBOSE_LOG
+ conda install -y "$PKG_SPEC" --quiet --no-deps $CONDA_CHANNEL >> $VERBOSE_LOG 2>&1
+ if [ $? -eq 0 ]; then
+ # Only grep the exported symbols if the library was installed correctly
+
+ MENTIONS_CXX11="$(nm "$CONDA_ROOT/lib/$PKG_INSTALLED_LIB" | grep cxx11 | wc -l)"
+ if [ $MENTIONS_CXX11 -gt 0 ]; then
+ echo " This package is built against the recent gcc ABI ($MENTIONS_CXX11 mentions of cxx11)"
+ echo "$CONDA_ROOT/lib/$PKG_INSTALLED_LIB mentions cxx11 $MENTIONS_CXX11 times" >> $VERBOSE_LOG
+ fi
+ else
+ echo "Error installing $PKG_SPEC , continuing"
+ echo "Error installing $PKG_SPEC , continuing" >> $VERBOSE_LOG
+ fi
+done