1#!/bin/bash 2 3set -ex 4 5source "$(dirname "${BASH_SOURCE[0]}")/common_utils.sh" 6 7retry () { 8 "$@" || (sleep 10 && "$@") || (sleep 20 && "$@") || (sleep 40 && "$@") 9} 10 11# A bunch of custom pip dependencies for ONNX 12pip_install \ 13 beartype==0.15.0 \ 14 filelock==3.9.0 \ 15 flatbuffers==2.0 \ 16 mock==5.0.1 \ 17 ninja==1.10.2 \ 18 networkx==2.5 \ 19 numpy==1.24.2 20 21# ONNXRuntime should be installed before installing 22# onnx-weekly. Otherwise, onnx-weekly could be 23# overwritten by onnx. 24pip_install \ 25 parameterized==0.8.1 \ 26 pytest-cov==4.0.0 \ 27 pytest-subtests==0.10.0 \ 28 tabulate==0.9.0 \ 29 transformers==4.36.2 30 31pip_install coloredlogs packaging 32 33pip_install onnxruntime==1.18.1 34pip_install onnx==1.16.2 35pip_install onnxscript==0.1.0.dev20240831 --no-deps 36# required by onnxscript 37pip_install ml_dtypes 38 39# Cache the transformers model to be used later by ONNX tests. We need to run the transformers 40# package to download the model. By default, the model is cached at ~/.cache/huggingface/hub/ 41IMPORT_SCRIPT_FILENAME="/tmp/onnx_import_script.py" 42as_jenkins echo 'import transformers; transformers.AutoModel.from_pretrained("sshleifer/tiny-gpt2"); transformers.AutoTokenizer.from_pretrained("sshleifer/tiny-gpt2"); transformers.AutoModelForSpeechSeq2Seq.from_pretrained("openai/whisper-large-v3");' > "${IMPORT_SCRIPT_FILENAME}" 43 44# Need a PyTorch version for transformers to work 45pip_install --pre torch --index-url https://download.pytorch.org/whl/nightly/cpu 46# Very weird quoting behavior here https://github.com/conda/conda/issues/10972, 47# so echo the command to a file and run the file instead 48conda_run python "${IMPORT_SCRIPT_FILENAME}" 49 50# Cleaning up 51conda_run pip uninstall -y torch 52rm "${IMPORT_SCRIPT_FILENAME}" || true 53