xref: /aosp_15_r20/external/pytorch/.ci/docker/ubuntu-cuda/Dockerfile (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1ARG UBUNTU_VERSION
2ARG CUDA_VERSION
3ARG IMAGE_NAME
4
5FROM ${IMAGE_NAME}
6
7ARG UBUNTU_VERSION
8ARG CUDA_VERSION
9
10ENV DEBIAN_FRONTEND noninteractive
11
12# Install common dependencies (so that this step can be cached separately)
13COPY ./common/install_base.sh install_base.sh
14RUN bash ./install_base.sh && rm install_base.sh
15
16# Install user
17COPY ./common/install_user.sh install_user.sh
18RUN bash ./install_user.sh && rm install_user.sh
19
20# Install katex
21ARG KATEX
22COPY ./common/install_docs_reqs.sh install_docs_reqs.sh
23RUN bash ./install_docs_reqs.sh && rm install_docs_reqs.sh
24
25# Install conda and other packages (e.g., numpy, pytest)
26ARG ANACONDA_PYTHON_VERSION
27ENV ANACONDA_PYTHON_VERSION=$ANACONDA_PYTHON_VERSION
28ENV PATH /opt/conda/envs/py_$ANACONDA_PYTHON_VERSION/bin:/opt/conda/bin:$PATH
29ARG CONDA_CMAKE
30COPY requirements-ci.txt /opt/conda/requirements-ci.txt
31COPY ./common/install_conda.sh install_conda.sh
32COPY ./common/common_utils.sh common_utils.sh
33RUN bash ./install_conda.sh && rm install_conda.sh common_utils.sh /opt/conda/requirements-ci.txt
34
35# Install gcc
36ARG GCC_VERSION
37COPY ./common/install_gcc.sh install_gcc.sh
38RUN bash ./install_gcc.sh && rm install_gcc.sh
39
40# Install clang
41ARG CLANG_VERSION
42COPY ./common/install_clang.sh install_clang.sh
43RUN bash ./install_clang.sh && rm install_clang.sh
44
45# (optional) Install protobuf for ONNX
46ARG PROTOBUF
47COPY ./common/install_protobuf.sh install_protobuf.sh
48RUN if [ -n "${PROTOBUF}" ]; then bash ./install_protobuf.sh; fi
49RUN rm install_protobuf.sh
50ENV INSTALLED_PROTOBUF ${PROTOBUF}
51
52# (optional) Install database packages like LMDB and LevelDB
53ARG DB
54COPY ./common/install_db.sh install_db.sh
55RUN if [ -n "${DB}" ]; then bash ./install_db.sh; fi
56RUN rm install_db.sh
57ENV INSTALLED_DB ${DB}
58
59# (optional) Install vision packages like OpenCV
60ARG VISION
61COPY ./common/install_vision.sh ./common/cache_vision_models.sh ./common/common_utils.sh ./
62RUN if [ -n "${VISION}" ]; then bash ./install_vision.sh; fi
63RUN rm install_vision.sh cache_vision_models.sh common_utils.sh
64ENV INSTALLED_VISION ${VISION}
65
66# (optional) Install UCC
67ARG UCX_COMMIT
68ARG UCC_COMMIT
69ENV UCX_COMMIT $UCX_COMMIT
70ENV UCC_COMMIT $UCC_COMMIT
71ENV UCX_HOME /usr
72ENV UCC_HOME /usr
73ADD ./common/install_ucc.sh install_ucc.sh
74RUN if [ -n "${UCX_COMMIT}" ] && [ -n "${UCC_COMMIT}" ]; then bash ./install_ucc.sh; fi
75RUN rm install_ucc.sh
76
77COPY ./common/install_openssl.sh install_openssl.sh
78ENV OPENSSL_ROOT_DIR /opt/openssl
79RUN bash ./install_openssl.sh
80ENV OPENSSL_DIR /opt/openssl
81
82ARG INDUCTOR_BENCHMARKS
83COPY ./common/install_inductor_benchmark_deps.sh install_inductor_benchmark_deps.sh
84COPY ./common/common_utils.sh common_utils.sh
85COPY ci_commit_pins/huggingface.txt huggingface.txt
86COPY ci_commit_pins/timm.txt timm.txt
87RUN if [ -n "${INDUCTOR_BENCHMARKS}" ]; then bash ./install_inductor_benchmark_deps.sh; fi
88RUN rm install_inductor_benchmark_deps.sh common_utils.sh timm.txt huggingface.txt
89
90# (optional) Install non-default CMake version
91ARG CMAKE_VERSION
92COPY ./common/install_cmake.sh install_cmake.sh
93RUN if [ -n "${CMAKE_VERSION}" ]; then bash ./install_cmake.sh; fi
94RUN rm install_cmake.sh
95
96ARG TRITON
97# Install triton, this needs to be done before sccache because the latter will
98# try to reach out to S3, which docker build runners don't have access
99COPY ./common/install_triton.sh install_triton.sh
100COPY ./common/common_utils.sh common_utils.sh
101COPY ci_commit_pins/triton.txt triton.txt
102COPY triton_version.txt triton_version.txt
103RUN if [ -n "${TRITON}" ]; then bash ./install_triton.sh; fi
104RUN rm install_triton.sh common_utils.sh triton.txt triton_version.txt
105
106ARG HALIDE
107# Build and install halide
108COPY ./common/install_halide.sh install_halide.sh
109COPY ./common/common_utils.sh common_utils.sh
110COPY ci_commit_pins/halide.txt halide.txt
111RUN if [ -n "${HALIDE}" ]; then bash ./install_halide.sh; fi
112RUN rm install_halide.sh common_utils.sh halide.txt
113
114# Install ccache/sccache (do this last, so we get priority in PATH)
115COPY ./common/install_cache.sh install_cache.sh
116ENV PATH /opt/cache/bin:$PATH
117# See https://github.com/pytorch/pytorch/issues/82174
118# TODO([email protected]):
119# check if this is needed after full off Xenial migration
120ENV CARGO_NET_GIT_FETCH_WITH_CLI true
121RUN bash ./install_cache.sh && rm install_cache.sh
122ENV CMAKE_CUDA_COMPILER_LAUNCHER=/opt/cache/bin/sccache
123
124# Add jni.h for java host build
125COPY ./common/install_jni.sh install_jni.sh
126COPY ./java/jni.h jni.h
127RUN bash ./install_jni.sh && rm install_jni.sh
128
129# Install Open MPI for CUDA
130COPY ./common/install_openmpi.sh install_openmpi.sh
131RUN if [ -n "${CUDA_VERSION}" ]; then bash install_openmpi.sh; fi
132RUN rm install_openmpi.sh
133
134# Include BUILD_ENVIRONMENT environment variable in image
135ARG BUILD_ENVIRONMENT
136ENV BUILD_ENVIRONMENT ${BUILD_ENVIRONMENT}
137
138# AWS specific CUDA build guidance
139ENV TORCH_CUDA_ARCH_LIST Maxwell
140ENV TORCH_NVCC_FLAGS "-Xfatbin -compress-all"
141ENV CUDA_PATH /usr/local/cuda
142
143# Install LLVM dev version (Defined in the pytorch/builder github repository)
144COPY --from=pytorch/llvm:9.0.1 /opt/llvm /opt/llvm
145
146# Install CUDNN
147ARG CUDNN_VERSION
148ARG CUDA_VERSION
149COPY ./common/install_cudnn.sh install_cudnn.sh
150RUN if [ -n "${CUDNN_VERSION}" ]; then bash install_cudnn.sh; fi
151RUN rm install_cudnn.sh
152
153# Install CUSPARSELT
154ARG CUDA_VERSION
155COPY ./common/install_cusparselt.sh install_cusparselt.sh
156RUN bash install_cusparselt.sh
157RUN rm install_cusparselt.sh
158
159# Install CUDSS
160ARG CUDA_VERSION
161COPY ./common/install_cudss.sh install_cudss.sh
162RUN bash install_cudss.sh
163RUN rm install_cudss.sh
164
165# Delete /usr/local/cuda-11.X/cuda-11.X symlinks
166RUN if [ -h /usr/local/cuda-11.6/cuda-11.6 ]; then rm /usr/local/cuda-11.6/cuda-11.6; fi
167RUN if [ -h /usr/local/cuda-11.7/cuda-11.7 ]; then rm /usr/local/cuda-11.7/cuda-11.7; fi
168RUN if [ -h /usr/local/cuda-12.1/cuda-12.1 ]; then rm /usr/local/cuda-12.1/cuda-12.1; fi
169RUN if [ -h /usr/local/cuda-12.4/cuda-12.4 ]; then rm /usr/local/cuda-12.4/cuda-12.4; fi
170
171USER jenkins
172CMD ["bash"]
173