xref: /aosp_15_r20/external/tensorflow/tensorflow/tools/ci_build/release/common.sh (revision b6fb3261f9314811a0f4371741dbb8839866f948)
1#!/usr/bin/env bash
2# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15# ==============================================================================
16# External `common.sh`
17
18# Keep in sync with tensorflow_estimator and configure.py.
19# LINT.IfChange
20LATEST_BAZEL_VERSION=5.1.1
21# LINT.ThenChange(
22#   //tensorflow_estimator/google/kokoro/common.sh,
23#   //tensorflow/tools/ci_build/install/install_bazel.sh,
24#   //tensorflow/tools/ci_build/install/install_bazel_from_source.sh)
25
26# Run flaky functions with retries.
27# run_with_retry cmd
28function run_with_retry {
29  eval "$1"
30  # If the command fails retry again in 60 seconds.
31  if [[ $? -ne 0 ]]; then
32    sleep 60
33    eval "$1"
34  fi
35}
36
37function die() {
38  echo "$@" 1>&2 ; exit 1;
39}
40
41# A small utility to run the command and only print logs if the command fails.
42# On success, all logs are hidden.
43function readable_run {
44  # Disable debug mode to avoid printing of variables here.
45  set +x
46  result=$("$@" 2>&1) || die "$result"
47  echo "$@"
48  echo "Command completed successfully at $(date)"
49  set -x
50}
51
52# LINT.IfChange
53# Redirect bazel output dir b/73748835
54function set_bazel_outdir {
55  mkdir -p /tmpfs/bazel_output
56  export TEST_TMPDIR=/tmpfs/bazel_output
57}
58
59# Downloads bazelisk to ~/bin as `bazel`.
60function install_bazelisk {
61  date
62  case "$(uname -s)" in
63    Darwin) local name=bazelisk-darwin-amd64 ;;
64    Linux)
65      case "$(uname -m)" in
66       x86_64) local name=bazelisk-linux-amd64 ;;
67       aarch64) local name=bazelisk-linux-arm64 ;;
68       *) die "Unknown machine type: $(uname -m)" ;;
69      esac ;;
70    *) die "Unknown OS: $(uname -s)" ;;
71  esac
72  mkdir -p "$HOME/bin"
73  wget --no-verbose -O "$HOME/bin/bazel" \
74      "https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/$name"
75  chmod u+x "$HOME/bin/bazel"
76  if [[ ! ":$PATH:" =~ :"$HOME"/bin/?: ]]; then
77    PATH="$HOME/bin:$PATH"
78  fi
79  set_bazel_outdir
80  which bazel
81  bazel version
82  date
83}
84
85# Install the given bazel version on linux
86function update_bazel_linux {
87  if [[ -z "$1" ]]; then
88    BAZEL_VERSION=${LATEST_BAZEL_VERSION}
89  else
90    BAZEL_VERSION=$1
91  fi
92  rm -rf ~/bazel
93  mkdir ~/bazel
94
95  pushd ~/bazel
96  readable_run wget https://github.com/bazelbuild/bazel/releases/download/"${BAZEL_VERSION}"/bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh
97  chmod +x bazel-*.sh
98  ./bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh --user
99  rm bazel-"${BAZEL_VERSION}"-installer-linux-x86_64.sh
100  popd
101
102  PATH="/home/kbuilder/bin:$PATH"
103  set_bazel_outdir
104  which bazel
105  bazel version
106}
107# LINT.ThenChange(
108#   //tensorflow_estimator/google/kokoro/common.sh)
109
110function install_ubuntu_16_pip_deps {
111  PIP_CMD="pip"
112
113  while true; do
114    if [[ -z "${1}" ]]; then
115      break
116    fi
117    if [[ "$1" == "pip"* ]]; then
118      PIP_CMD="$1"
119    fi
120    shift
121  done
122
123  # First, upgrade pypi wheels
124  "${PIP_CMD}" install --user --upgrade 'setuptools' pip wheel
125
126  # LINT.IfChange(linux_pip_installations_orig)
127  # Remove any historical keras package if they are installed.
128  "${PIP_CMD}" list
129  "${PIP_CMD}" uninstall -y keras
130  "${PIP_CMD}" install --user -r tensorflow/tools/ci_build/release/requirements_ubuntu.txt
131  # LINT.ThenChange(:mac_pip_installations)
132}
133
134# Gradually replace function install_ubuntu_16_pip_deps.
135# TODO(lpak): delete install_ubuntu_16_pip_deps when completely replaced.
136function install_ubuntu_16_python_pip_deps {
137  PIP_CMD="pip"
138
139  while true; do
140    if [[ -z "${1}" ]]; then
141      break
142    fi
143    if [[ "$1" == "pip"* ]]; then
144      PIP_CMD="$1"
145    fi
146    if [[ "$1" == "python"* ]]; then
147      PIP_CMD="${1} -m pip"
148    fi
149    shift
150  done
151
152  # First, upgrade pypi wheels
153  ${PIP_CMD} install --user --upgrade 'setuptools' pip wheel
154
155  # LINT.IfChange(linux_pip_installations)
156  # Remove any historical keras package if they are installed.
157  ${PIP_CMD} list
158  ${PIP_CMD} uninstall -y keras
159  ${PIP_CMD} install --user -r tensorflow/tools/ci_build/release/requirements_ubuntu.txt
160  # LINT.ThenChange(:mac_pip_installations)
161}
162
163function install_ubuntu_pip_deps {
164  # Install requirements in the python environment
165  which python
166  which pip
167  PIP_CMD="python -m pip"
168  ${PIP_CMD} list
169  # auditwheel>=4 supports manylinux_2 and changes the output wheel filename
170  # when upgrading auditwheel modify upload_wheel_cpu_ubuntu and upload_wheel_gpu_ubuntu
171  # to match the filename generated.
172  ${PIP_CMD} install --upgrade pip wheel auditwheel~=3.3.1
173  ${PIP_CMD} install -r tensorflow/tools/ci_build/release/${REQUIREMENTS_FNAME}
174  ${PIP_CMD} list
175}
176
177function setup_venv_ubuntu () {
178  # Create virtual env and install dependencies
179  # First argument needs to be the python executable.
180  ${1} -m venv ~/.venv/tf
181  source ~/.venv/tf/bin/activate
182  REQUIREMENTS_FNAME="requirements_ubuntu.txt"
183  install_ubuntu_pip_deps
184}
185
186function remove_venv_ubuntu () {
187  # Deactivate virtual environment and clean up
188  deactivate
189  rm -rf ~/.venv/tf
190}
191
192function install_ubuntu_pip_deps_novenv () {
193  # Install on default python Env (No Virtual Env for pip packages)
194  PIP_CMD="${1} -m pip"
195  REQUIREMENTS_FNAME="requirements_ubuntu.txt"
196  ${PIP_CMD} install --user --upgrade 'setuptools' pip wheel pyparsing auditwheel~=3.3.1
197  ${PIP_CMD} install --user -r tensorflow/tools/ci_build/release/${REQUIREMENTS_FNAME}
198  ${PIP_CMD} list
199
200}
201
202function upload_wheel_cpu_ubuntu() {
203  # Upload the built packages to pypi.
204  for WHL_PATH in $(ls pip_pkg/tf_nightly_cpu-*dev*.whl); do
205
206    WHL_DIR=$(dirname "${WHL_PATH}")
207    WHL_BASE_NAME=$(basename "${WHL_PATH}")
208    AUDITED_WHL_NAME="${WHL_DIR}"/$(echo "${WHL_BASE_NAME//linux/manylinux2010}")
209    auditwheel repair --plat manylinux2010_x86_64 -w "${WHL_DIR}" "${WHL_PATH}"
210
211    # test the whl pip package
212    chmod +x tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh
213    ./tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh ${AUDITED_WHL_NAME}
214    RETVAL=$?
215
216    # Upload the PIP package if whl test passes.
217    if [ ${RETVAL} -eq 0 ]; then
218      echo "Basic PIP test PASSED, Uploading package: ${AUDITED_WHL_NAME}"
219      python -m pip install twine
220      python -m twine upload -r pypi-warehouse "${AUDITED_WHL_NAME}"
221    else
222      echo "Basic PIP test FAILED, will not upload ${AUDITED_WHL_NAME} package"
223      return 1
224    fi
225  done
226}
227
228function upload_wheel_gpu_ubuntu() {
229  # Upload the built packages to pypi.
230  for WHL_PATH in $(ls pip_pkg/tf_nightly*dev*.whl); do
231
232    WHL_DIR=$(dirname "${WHL_PATH}")
233    WHL_BASE_NAME=$(basename "${WHL_PATH}")
234    AUDITED_WHL_NAME="${WHL_DIR}"/$(echo "${WHL_BASE_NAME//linux/manylinux2010}")
235
236    # Copy and rename for gpu manylinux as we do not want auditwheel to package in libcudart.so
237    WHL_PATH=${AUDITED_WHL_NAME}
238    cp "${WHL_DIR}"/"${WHL_BASE_NAME}" "${WHL_PATH}"
239    echo "Copied manylinux2010 wheel file at: ${WHL_PATH}"
240
241    # test the whl pip package
242    chmod +x tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh
243    ./tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh ${AUDITED_WHL_NAME}
244    RETVAL=$?
245
246    # Upload the PIP package if whl test passes.
247    if [ ${RETVAL} -eq 0 ]; then
248      echo "Basic PIP test PASSED, Uploading package: ${AUDITED_WHL_NAME}"
249      python -m pip install twine
250      python -m twine upload -r pypi-warehouse "${AUDITED_WHL_NAME}"
251    else
252      echo "Basic PIP test FAILED, will not upload ${AUDITED_WHL_NAME} package"
253      return 1
254    fi
255  done
256}
257
258function install_macos_pip_deps {
259
260  PIP_CMD="python -m pip"
261
262  # First, upgrade pypi wheels
263  ${PIP_CMD} install --upgrade 'setuptools' pip wheel
264
265  # LINT.IfChange(mac_pip_installations)
266  # Remove any historical keras package if they are installed.
267  ${PIP_CMD} list
268  ${PIP_CMD} uninstall -y keras
269  ${PIP_CMD} install -r tensorflow/tools/ci_build/release/requirements_mac.txt
270  # LINT.ThenChange(
271  #   :linux_pip_installations_orig,
272  #   :install_macos_pip_deps_no_venv,
273  #   :linux_pip_installations)
274}
275
276# This hack is unfortunately necessary for MacOS builds that use pip_new.sh
277# You cannot deactivate a virtualenv from a subshell.
278function install_macos_pip_deps_no_venv {
279
280  PIP_CMD="${1} -m pip"
281
282  # First, upgrade pypi wheels
283  ${PIP_CMD} install --user --upgrade 'setuptools' pip wheel
284
285  # LINT.IfChange(mac_pip_installations)
286  # Remove any historical keras package if they are installed.
287  ${PIP_CMD} list
288  ${PIP_CMD} uninstall -y keras
289  ${PIP_CMD} install --user -r tensorflow/tools/ci_build/release/requirements_mac.txt
290  # LINT.ThenChange(:install_macos_pip_deps)
291}
292
293function setup_venv_macos () {
294  # First argument needs to be the python executable.
295  ${1} -m pip install virtualenv
296  ${1} -m virtualenv tf_build_env
297  source tf_build_env/bin/activate
298  install_macos_pip_deps
299}
300
301function activate_venv_macos () {
302  source tf_build_env/bin/activate
303}
304
305function setup_python_from_pyenv_macos {
306  if [[ -z "${1}" ]]; then
307    PY_VERSION=3.9.1
308  else
309    PY_VERSION=$1
310  fi
311
312  git clone --branch v2.2.2 https://github.com/pyenv/pyenv.git
313
314  PYENV_ROOT="$(pwd)/pyenv"
315  export PYENV_ROOT
316  export PATH="$PYENV_ROOT/bin:$PYENV_ROOT/shims:$PATH"
317
318  eval "$(pyenv init -)"
319
320  pyenv install -s "${PY_VERSION}"
321  pyenv local "${PY_VERSION}"
322  python --version
323}
324
325function maybe_skip_v1 {
326  # If we are building with v2 by default, skip tests with v1only tag.
327  if grep -q "build --config=v2" ".bazelrc"; then
328    echo ",-v1only"
329  else
330    echo ""
331  fi
332}
333
334# Copy and rename a wheel to a new project name.
335# Usage: copy_to_new_project_name <whl_path> <new_project_name>, for example
336# copy_to_new_project_name test_dir/tf_nightly-1.15.0.dev20190813-cp35-cp35m-manylinux2010_x86_64.whl tf_nightly_cpu
337# will create a wheel with the same tags, but new project name under the same
338# directory at
339# test_dir/tf_nightly_cpu-1.15.0.dev20190813-cp35-cp35m-manylinux2010_x86_64.whl
340function copy_to_new_project_name {
341  WHL_PATH="$1"
342  NEW_PROJECT_NAME="$2"
343  PYTHON_CMD="$3"
344
345  ORIGINAL_WHL_NAME=$(basename "${WHL_PATH}")
346  ORIGINAL_WHL_DIR=$(realpath "$(dirname "${WHL_PATH}")")
347  ORIGINAL_PROJECT_NAME="$(echo "${ORIGINAL_WHL_NAME}" | cut -d '-' -f 1)"
348  FULL_TAG="$(echo "${ORIGINAL_WHL_NAME}" | cut -d '-' -f 2-)"
349  NEW_WHL_NAME="${NEW_PROJECT_NAME}-${FULL_TAG}"
350  VERSION="$(echo "${FULL_TAG}" | cut -d '-' -f 1)"
351
352  ORIGINAL_WHL_DIR_PREFIX="${ORIGINAL_PROJECT_NAME}-${VERSION}"
353  NEW_WHL_DIR_PREFIX="${NEW_PROJECT_NAME}-${VERSION}"
354
355 TMP_DIR="$(mktemp -d)"
356 ${PYTHON_CMD} -m wheel unpack "${WHL_PATH}"
357 mv "${ORIGINAL_WHL_DIR_PREFIX}" "${TMP_DIR}"
358 pushd "${TMP_DIR}/${ORIGINAL_WHL_DIR_PREFIX}"
359
360  mv "${ORIGINAL_WHL_DIR_PREFIX}.dist-info" "${NEW_WHL_DIR_PREFIX}.dist-info"
361  if [[ -d "${ORIGINAL_WHL_DIR_PREFIX}.data" ]]; then
362    mv "${ORIGINAL_WHL_DIR_PREFIX}.data" "${NEW_WHL_DIR_PREFIX}.data"
363  fi
364
365  ORIGINAL_PROJECT_NAME_DASH="${ORIGINAL_PROJECT_NAME//_/-}"
366  NEW_PROJECT_NAME_DASH="${NEW_PROJECT_NAME//_/-}"
367
368  # We need to change the name in the METADATA file, but we need to ensure that
369  # all other occurrences of the name stay the same, otherwise things such as
370  # URLs and depedencies might be broken (for example, replacing without care
371  # might transform a `tensorflow_estimator` dependency into
372  # `tensorflow_gpu_estimator`, which of course does not exist -- except by
373  # manual upload of a manually altered `tensorflow_estimator` package)
374  sed -i.bak "s/Name: ${ORIGINAL_PROJECT_NAME_DASH}/Name: ${NEW_PROJECT_NAME_DASH}/g" "${NEW_WHL_DIR_PREFIX}.dist-info/METADATA"
375
376  ${PYTHON_CMD} -m wheel pack .
377  mv *.whl "${ORIGINAL_WHL_DIR}"
378
379  popd
380  rm -rf "${TMP_DIR}"
381}
382
383# Create minimalist test XML for web view. It includes the pass/fail status
384# of each target, without including errors or stacktraces.
385# Remember to "set +e" before calling bazel or we'll only generate the XML for
386# passing runs.
387function test_xml_summary {
388  set +x
389  set +e
390  mkdir -p "${KOKORO_ARTIFACTS_DIR}/${KOKORO_JOB_NAME}/summary"
391  # First build the repeated inner XML blocks, since the header block needs to
392  # report the number of test cases / failures / errors.
393  # TODO(rsopher): handle build breakages
394  # TODO(rsopher): extract per-test times as well
395  TESTCASE_XML="$(sed -n '/INFO:\ Build\ completed/,/INFO:\ Build\ completed/p' \
396    /tmpfs/kokoro_build.log \
397    | grep -E '(PASSED|FAILED|TIMEOUT)\ in' \
398    | while read -r line; \
399      do echo '<testcase name="'"$(echo "${line}" | tr -s ' ' | cut -d ' ' -f 1)"\
400          '" status="run" classname="" time="0">'"$( \
401        case "$(echo "${line}" | tr -s ' ' | cut -d ' ' -f 2)" in \
402          FAILED) echo '<failure message="" type=""/>' ;; \
403          TIMEOUT) echo '<failure message="timeout" type=""/>' ;; \
404        esac; \
405      )"'</testcase>'; done; \
406  )"
407  NUMBER_OF_TESTS="$(echo "${TESTCASE_XML}" | wc -l)"
408  NUMBER_OF_FAILURES="$(echo "${TESTCASE_XML}" | grep -c '<failure')"
409  echo '<?xml version="1.0" encoding="UTF-8"?>'\
410  '<testsuites name="1"  tests="1" failures="0" errors="0" time="0">'\
411  '<testsuite name="Kokoro Summary" tests="'"${NUMBER_OF_TESTS}"\
412  '" failures="'"${NUMBER_OF_FAILURES}"'" errors="0" time="0">'\
413  "${TESTCASE_XML}"'</testsuite></testsuites>'\
414  > "${KOKORO_ARTIFACTS_DIR}/${KOKORO_JOB_NAME}/summary/sponge_log.xml"
415}
416
417# Create minimalist test XML for web view, then exit.
418# Ends script with value of previous command, meant to be called immediately
419# after bazel as the last call in the build script.
420function test_xml_summary_exit {
421  RETVAL=$?
422  test_xml_summary
423  exit "${RETVAL}"
424}
425
426# Note: The Docker-based Ubuntu TF-nightly jobs do not use this list. They use
427# https://github.com/tensorflow/build/blob/master/tf_sig_build_dockerfiles/devel.usertools/wheel_verification.bats
428# instead. See go/tf-devinfra/docker.
429# CPU size
430MAC_CPU_MAX_WHL_SIZE=240M
431WIN_CPU_MAX_WHL_SIZE=170M
432# GPU size
433WIN_GPU_MAX_WHL_SIZE=360M
434
435function test_tf_whl_size() {
436  WHL_PATH=${1}
437  # First, list all wheels with their sizes:
438  echo "Found these wheels: "
439  find $WHL_PATH -type f -exec ls -lh {} \;
440  echo "===================="
441  # Check CPU whl size.
442  if [[ "$WHL_PATH" == *"_cpu"* ]]; then
443    # Check MAC CPU whl size.
444    if [[ "$WHL_PATH" == *"-macos"* ]] && [[ $(find $WHL_PATH -type f -size +${MAC_CPU_MAX_WHL_SIZE}) ]]; then
445        echo "Mac CPU whl size has exceeded ${MAC_CPU_MAX_WHL_SIZE}. To keep
446within pypi's CDN distribution limit, we must not exceed that threshold."
447      return 1
448    fi
449    # Check Windows CPU whl size.
450    if [[ "$WHL_PATH" == *"-win"* ]] && [[ $(find $WHL_PATH -type f -size +${WIN_CPU_MAX_WHL_SIZE}) ]]; then
451        echo "Windows CPU whl size has exceeded ${WIN_CPU_MAX_WHL_SIZE}. To keep
452within pypi's CDN distribution limit, we must not exceed that threshold."
453      return 1
454    fi
455  elif [[ "$WHL_PATH" == *"_gpu"* ]]; then
456    # Check Windows GPU whl size.
457    if [[ "$WHL_PATH" == *"-win"* ]] && [[ $(find $WHL_PATH -type f -size +${WIN_GPU_MAX_WHL_SIZE}) ]]; then
458        echo "Windows GPU whl size has exceeded ${WIN_GPU_MAX_WHL_SIZE}. To keep
459within pypi's CDN distribution limit, we must not exceed that threshold."
460      return 1
461    fi
462  fi
463}
464
465