forked from horovod/horovod
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Dockerfile.test.gpu
178 lines (147 loc) · 7.94 KB
/
Dockerfile.test.gpu
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
ARG CUDA_DOCKER_VERSION=10.0-devel-ubuntu16.04
FROM nvidia/cuda:${CUDA_DOCKER_VERSION}
# Arguments for the build. CUDA_DOCKER_VERSION needs to be repeated because
# the first usage only applies to the FROM tag.
ARG CUDA_DOCKER_VERSION=10.0-devel-ubuntu16.04
ARG CUDNN_VERSION=7.6.0.64-1+cuda10.0
ARG NCCL_VERSION_OVERRIDE=2.4.7-1+cuda10.0
ARG MPI_KIND=OpenMPI
ARG PYTHON_VERSION=3.6
ARG TENSORFLOW_PACKAGE=tensorflow-gpu==1.15.0
ARG KERAS_PACKAGE=keras==2.2.4
ARG PYTORCH_PACKAGE=torch==1.2.0
ARG TORCHVISION_PACKAGE=torchvision==0.4.0
ARG MXNET_PACKAGE=mxnet-cu100==1.5.0
ARG PYSPARK_PACKAGE=pyspark==2.4.7
# if SPARK_PACKAGE is set, installs Spark into /spark from the tgz archive
# if SPARK_PACKAGE is a preview version, installs PySpark from the tgz archive
# see https://archive.apache.org/dist/spark/ for available packages, version must match PYSPARK_PACKAGE
ARG SPARK_PACKAGE=spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz
ARG HOROVOD_BUILD_FLAGS="HOROVOD_GPU_OPERATIONS=NCCL"
ARG HOROVOD_MIXED_INSTALL=0
# Set default shell to /bin/bash
SHELL ["/bin/bash", "-cu"]
# Install essential packages.
RUN CUDNN_MAJOR=$(cut -d '.' -f 1 <<< "${CUDNN_VERSION}"); \
apt-get update -qq && apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \
wget \
ca-certificates \
cmake \
openssh-client \
openssh-server \
git \
build-essential \
g++-4.8 \
moreutils \
libcudnn${CUDNN_MAJOR}=${CUDNN_VERSION} \
libnccl2=${NCCL_VERSION_OVERRIDE} \
libnccl-dev=${NCCL_VERSION_OVERRIDE}
# setup ssh service
RUN ssh-keygen -f /root/.ssh/id_rsa -q -N ''
RUN cp -v /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys
# install g++-7
RUN apt-get install -y --no-install-recommends software-properties-common
RUN add-apt-repository ppa:ubuntu-toolchain-r/test
RUN apt-get update -qq && apt-get install -y --no-install-recommends g++-7
# Install Python.
RUN apt-get install -y python${PYTHON_VERSION} python${PYTHON_VERSION}-dev python${PYTHON_VERSION}-distutils
RUN ln -s -f /usr/bin/python${PYTHON_VERSION} /usr/bin/python
RUN wget --progress=dot:mega https://bootstrap.pypa.io/get-pip.py && python get-pip.py && rm get-pip.py
RUN pip install -U --force pip setuptools requests pytest mock pytest-forked parameterized
RUN echo pytest -v --capture=no --continue-on-collection-errors --junit-xml=/artifacts/junit.\$1.\${HOROVOD_RANK:-\${OMPI_COMM_WORLD_RANK:-\${PMI_RANK}}}.\$2.xml \${@:2} > /pytest.sh
RUN echo pytest -v --capture=no --continue-on-collection-errors --junit-xml=/artifacts/junit.\$1.standalone.\$2.xml \${@:2} > /pytest_standalone.sh
RUN chmod a+x /pytest.sh
RUN chmod a+x /pytest_standalone.sh
# Install Spark stand-alone cluster.
RUN if [[ -n ${SPARK_PACKAGE} ]]; then \
wget --progress=dot:giga https://archive.apache.org/dist/spark/${SPARK_PACKAGE} -O - | tar -xzC /tmp; \
archive=$(basename "${SPARK_PACKAGE}") bash -c "mv -v /tmp/\${archive/%.tgz/} /spark"; \
fi
# Install PySpark.
RUN apt-get update -qq && apt install -y openjdk-8-jdk-headless
RUN if [[ ${SPARK_PACKAGE} != *"-preview"* ]]; then \
pip install ${PYSPARK_PACKAGE}; \
else \
apt-get install pandoc; \
pip install pypandoc; \
(cd /spark/python && python setup.py sdist && pip install dist/pyspark-*.tar.gz && rm dist/pyspark-*); \
fi
# Install MPI.
RUN if [[ ${MPI_KIND} == "OpenMPI" ]]; then \
wget --progress=dot:mega -O /tmp/openmpi-3.0.0-bin.tar.gz https://github.com/horovod/horovod/files/1596799/openmpi-3.0.0-bin.tar.gz && \
cd /usr/local && tar -zxf /tmp/openmpi-3.0.0-bin.tar.gz && ldconfig && \
echo "mpirun -allow-run-as-root -np 2 -H localhost:2 -bind-to none -map-by slot -mca mpi_abort_print_stack 1" > /mpirun_command; \
elif [[ ${MPI_KIND} == "MPICH" ]]; then \
apt-get install -y mpich && \
echo "mpirun -np 2" > /mpirun_command; \
fi
# Set default NCCL parameters
RUN echo NCCL_DEBUG=INFO >> /etc/nccl.conf
# Install mpi4py.
RUN if [[ ${MPI_KIND} != "None" ]]; then \
pip install mpi4py; \
fi
### END OF CACHE ###
COPY . /horovod
# Install TensorFlow.
RUN pip install ${TENSORFLOW_PACKAGE}
# Install Keras.
# Pin h5py: https://github.com/h5py/h5py/issues/1732
# Pin scipy<1.4.0: https://github.com/scipy/scipy/issues/11237
RUN pip install ${KERAS_PACKAGE} "h5py<3" "scipy<1.4.0" "pandas<1.1.0"
RUN mkdir -p ~/.keras
RUN ldconfig /usr/local/cuda/targets/x86_64-linux/lib/stubs && \
python -c "from keras.datasets import mnist; mnist.load_data()" && \
ldconfig
# Install PyTorch.
RUN PYTORCH_CUDA=$(echo ${CUDA_DOCKER_VERSION} | awk -F- '{print $1}' | sed 's/\.//'); \
if [[ ${PYTORCH_PACKAGE} == "torch-nightly" ]]; then \
pip install --pre torch ${TORCHVISION_PACKAGE} -v -f https://download.pytorch.org/whl/nightly/cu${PYTORCH_CUDA}/torch_nightly.html; \
else \
pip install ${PYTORCH_PACKAGE} ${TORCHVISION_PACKAGE} -f https://download.pytorch.org/whl/cu${PYTORCH_CUDA}/torch_stable.html; \
fi
# Pin Pillow<7.0: https://github.com/pytorch/vision/issues/1718
RUN pip install "Pillow<7.0" --no-deps
# Install MXNet.
RUN if [[ ${MXNET_PACKAGE} == "mxnet-nightly" ]]; then \
pip install --pre mxnet-cu101==2.0.0b20201213 -f https://dist.mxnet.io/python/all; \
elif [[ ${MXNET_PACKAGE} == "mxnet-nightly-cu110" ]]; then \
pip install --pre mxnet-cu110==2.0.0b20201213 -f https://dist.mxnet.io/python/cu110; \
else \
pip install ${MXNET_PACKAGE} ; \
fi
# Install Horovod.
RUN cd /horovod && python setup.py sdist
RUN ldconfig /usr/local/cuda/targets/x86_64-linux/lib/stubs && \
bash -c "${HOROVOD_BUILD_FLAGS} HOROVOD_WITH_TENSORFLOW=1 HOROVOD_WITH_PYTORCH=1 HOROVOD_WITH_MXNET=1 pip install -v $(ls /horovod/dist/horovod-*.tar.gz)[spark,ray]" && \
ldconfig
# Hack for compatibility of MNIST example with TensorFlow 1.1.0.
RUN if [[ ${TENSORFLOW_PACKAGE} == "tensorflow-gpu==1.1.0" ]]; then \
sed -i "s/from tensorflow import keras/from tensorflow.contrib import keras/" /horovod/examples/tensorflow/tensorflow_mnist.py; \
fi
# Prefetch Spark MNIST dataset.
RUN mkdir -p /work
RUN mkdir -p /data && wget --progress=dot:mega https://horovod-datasets.s3.amazonaws.com/mnist.bz2 -O /data/mnist.bz2
# Hack TensorFlow MNIST example to be smaller.
RUN sed -i "s/last_step=20000/last_step=100/" /horovod/examples/tensorflow/tensorflow_mnist.py
# Hack TensorFlow Eager MNIST example to be smaller.
RUN sed -i "s/dataset.take(20000/dataset.take(100/" /horovod/examples/tensorflow/tensorflow_mnist_eager.py
# Hack TensorFlow 2.0 example to be smaller.
RUN sed -i "s/dataset.take(10000/dataset.take(100/" /horovod/examples/tensorflow2/tensorflow2_mnist.py
# Hack Keras MNIST advanced example to be smaller.
RUN sed -i "s/'--epochs', type=int, default=24,/'--epochs', type=int, default=9,/" /horovod/examples/keras/keras_mnist_advanced.py
# Hack TensorFlow 2.0 Keras MNIST advanced example to be smaller.
RUN sed -i "s/epochs = .*/epochs = 9/" /horovod/examples/tensorflow2/tensorflow2_keras_mnist.py
# Hack PyTorch MNIST example to be smaller.
RUN sed -i "s/'--epochs', type=int, default=10,/'--epochs', type=int, default=2,/" /horovod/examples/pytorch/pytorch_mnist.py
# Prefetch Spark Rossmann dataset.
RUN mkdir -p /work
RUN mkdir -p /data && wget --progress=dot:mega https://horovod-datasets.s3.amazonaws.com/rossmann.tgz -O - | tar -xzC /data
# Hack Keras Spark Rossmann Run example to be smaller.
RUN sed -i "s/x = Dense(1000,/x = Dense(100,/g" /horovod/examples/spark/keras/keras_spark_rossmann_run.py
RUN sed -i "s/x = Dense(500,/x = Dense(50,/g" /horovod/examples/spark/keras/keras_spark_rossmann_run.py
# Hack Keras Spark Rossmann Estimator example to be smaller.
RUN sed -i "s/x = Dense(1000,/x = Dense(100,/g" /horovod/examples/spark/keras/keras_spark_rossmann_estimator.py
RUN sed -i "s/x = Dense(500,/x = Dense(50,/g" /horovod/examples/spark/keras/keras_spark_rossmann_estimator.py
# Export HOROVOD_MIXED_INSTALL
ENV HOROVOD_MIXED_INSTALL=${HOROVOD_MIXED_INSTALL}