Skip to content

Commit 2093048

Browse files
KellenSunderlandmarcoabreu
authored andcommitted
Dockerfile to create Jetson TX1 and TX2 compatible builds (apache#9837)
Builds are ARM64 with CUDA 8 support. Dependencies based on Jetpack 3.1.
1 parent 6d2621a commit 2093048

File tree

1 file changed

+92
-0
lines changed

1 file changed

+92
-0
lines changed
+92
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
# -*- mode: dockerfile -*-
2+
# dockerfile to build libmxnet.so, and a python wheel for the Jetson TX1/TX2
3+
4+
FROM nvidia/cuda:8.0-cudnn5-devel as cudabuilder
5+
6+
FROM dockcross/linux-arm64
7+
8+
ENV ARCH aarch64
9+
ENV NVCCFLAGS "-m64"
10+
ENV CUDA_ARCH "-gencode arch=compute_53,code=sm_53 -gencode arch=compute_62,code=sm_62"
11+
ENV BUILD_OPTS "USE_OPENCV=0 USE_BLAS=openblas USE_SSE=0 USE_CUDA=1 USE_CUDNN=1 ENABLE_CUDA_RTC=0 USE_NCCL=0 USE_CUDA_PATH=/usr/local/cuda/"
12+
ENV CC /usr/bin/aarch64-linux-gnu-gcc
13+
ENV CXX /usr/bin/aarch64-linux-gnu-g++
14+
ENV FC /usr/bin/aarch64-linux-gnu-gfortran-4.9
15+
ENV HOSTCC gcc
16+
17+
WORKDIR /work
18+
19+
# Build OpenBLAS
20+
ADD https://api.github.com/repos/xianyi/OpenBLAS/git/refs/heads/master /tmp/openblas_version.json
21+
RUN git clone https://github.com/xianyi/OpenBLAS.git && \
22+
cd OpenBLAS && \
23+
make -j$(nproc) TARGET=ARMV8 && \
24+
make install && \
25+
ln -s /opt/OpenBLAS/lib/libopenblas.so /usr/lib/libopenblas.so && \
26+
ln -s /opt/OpenBLAS/lib/libopenblas.a /usr/lib/libopenblas.a && \
27+
ln -s /opt/OpenBLAS/lib/libopenblas.a /usr/lib/liblapack.a
28+
29+
ENV LD_LIBRARY_PATH $LD_LIBRARY_PATH:/opt/OpenBLAS/lib
30+
ENV CPLUS_INCLUDE_PATH /opt/OpenBLAS/include
31+
32+
# Setup CUDA build env (including configuring and copying nvcc)
33+
COPY --from=cudabuilder /usr/local/cuda /usr/local/cuda
34+
ENV PATH $PATH:/usr/local/cuda/bin
35+
ENV TARGET_ARCH aarch64
36+
ENV TARGET_OS linux
37+
38+
# Install ARM depedencies based on Jetpack 3.1
39+
RUN wget http://developer.download.nvidia.com/devzone/devcenter/mobile/jetpack_l4t/013/linux-x64/cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb && \
40+
wget http://developer.download.nvidia.com/devzone/devcenter/mobile/jetpack_l4t/013/linux-x64/libcudnn6_6.0.21-1+cuda8.0_arm64.deb && \
41+
dpkg -i cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb && \
42+
dpkg -i libcudnn6_6.0.21-1+cuda8.0_arm64.deb && \
43+
apt update -y && \
44+
apt install cuda-cudart-cross-aarch64-8-0 cuda-cublas-cross-aarch64-8-0 \
45+
cuda-nvml-cross-aarch64-8-0 cuda-nvrtc-cross-aarch64-8-0 cuda-cufft-cross-aarch64-8-0 \
46+
cuda-curand-cross-aarch64-8-0 cuda-cusolver-cross-aarch64-8-0 cuda-cusparse-cross-aarch64-8-0 \
47+
cuda-misc-headers-cross-aarch64-8-0 cuda-npp-cross-aarch64-8-0 libcudnn6 -y && \
48+
cp /usr/local/cuda-8.0/targets/aarch64-linux/lib/*.so /usr/local/cuda/lib64/ && \
49+
cp /usr/local/cuda-8.0/targets/aarch64-linux/lib/stubs/*.so /usr/local/cuda/lib64/stubs/ && \
50+
cp -r /usr/local/cuda-8.0/targets/aarch64-linux/include/ /usr/local/cuda/include/ && \
51+
rm cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb && rm libcudnn6_6.0.21-1+cuda8.0_arm64.deb
52+
53+
# Build MXNet
54+
ADD mxnet mxnet
55+
56+
WORKDIR /work/mxnet
57+
58+
# Add ARM specific settings
59+
ADD arm.crosscompile.mk make/config.mk
60+
61+
# Build and link
62+
RUN make -j$(nproc) $BUILD_OPTS
63+
64+
# Create a binary wheel for easy installation.
65+
# When using tool.py output will be in the jetson folder.
66+
# Scp the .whl file to your target device, and install via
67+
# pip install
68+
WORKDIR /work/mxnet/python
69+
RUN python setup.py bdist_wheel --universal
70+
71+
# Copy build artifacts to output folder for tool.py script
72+
RUN mkdir -p /work/build & cp dist/*.whl /work/build && cp ../lib/* /work/build
73+
74+
# Fix pathing issues in the wheel. We need to move libmxnet.so from the data folder to the root
75+
# of the wheel, then repackage the wheel.
76+
# Create a temp dir to do the work.
77+
WORKDIR /work/build
78+
RUN apt-get install -y unzip && \
79+
mkdir temp && \
80+
cp *.whl temp
81+
82+
# Extract the wheel, move the libmxnet.so file, repackage the wheel.
83+
WORKDIR /work/build/temp
84+
RUN unzip *.whl && \
85+
rm *.whl && \
86+
mv *.data/data/mxnet/libmxnet.so mxnet && \
87+
zip -r ../temp.zip *
88+
89+
# Replace the existing wheel with our fixed version.
90+
WORKDIR /work/build
91+
RUN rm -rf temp && \
92+
for f in *.whl; do rm "$f" && mv temp.zip "$f"; done

0 commit comments

Comments
 (0)