xref: /aosp_15_r20/external/angle/build/android/gyp/util/build_utils.py (revision 8975f5c5ed3d1c378011245431ada316dfb6f244)
1# Copyright 2013 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""Contains common helpers for GN action()s."""
6
7import atexit
8import collections
9import contextlib
10import filecmp
11import fnmatch
12import json
13import logging
14import os
15import re
16import shlex
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import textwrap
23import zipfile
24
25sys.path.append(os.path.join(os.path.dirname(__file__),
26                             os.pardir, os.pardir, os.pardir))
27import gn_helpers
28
29# Use relative paths to improved hermetic property of build scripts.
30DIR_SOURCE_ROOT = os.path.relpath(
31    os.environ.get(
32        'CHECKOUT_SOURCE_ROOT',
33        os.path.join(
34            os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
35            os.pardir)))
36JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
37JAVA_PATH = os.path.join(JAVA_HOME, 'bin', 'java')
38JAVA_PATH_FOR_INPUTS = f'{JAVA_PATH}.chromium'
39JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
40JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
41KOTLIN_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'kotlinc', 'current')
42KOTLINC_PATH = os.path.join(KOTLIN_HOME, 'bin', 'kotlinc')
43
44
45def JavaCmd(xmx='1G'):
46  ret = [JAVA_PATH]
47  # Limit heap to avoid Java not GC'ing when it should, and causing
48  # bots to OOM when many java commands are runnig at the same time
49  # https://crbug.com/1098333
50  ret += ['-Xmx' + xmx]
51  # JDK17 bug.
52  # See: https://chromium-review.googlesource.com/c/chromium/src/+/4705883/3
53  # https://github.com/iBotPeaches/Apktool/issues/3174
54  ret += ['-Djdk.util.zip.disableZip64ExtraFieldValidation=true']
55  return ret
56
57
58@contextlib.contextmanager
59def TempDir(**kwargs):
60  dirname = tempfile.mkdtemp(**kwargs)
61  try:
62    yield dirname
63  finally:
64    shutil.rmtree(dirname)
65
66
67def MakeDirectory(dir_path):
68  try:
69    os.makedirs(dir_path)
70  except OSError:
71    pass
72
73
74def DeleteDirectory(dir_path):
75  if os.path.exists(dir_path):
76    shutil.rmtree(dir_path)
77
78
79def Touch(path, fail_if_missing=False):
80  if fail_if_missing and not os.path.exists(path):
81    raise Exception(path + ' doesn\'t exist.')
82
83  MakeDirectory(os.path.dirname(path))
84  with open(path, 'a'):
85    os.utime(path, None)
86
87
88def FindInDirectory(directory, filename_filter='*'):
89  files = []
90  for root, _dirnames, filenames in os.walk(directory):
91    matched_files = fnmatch.filter(filenames, filename_filter)
92    files.extend((os.path.join(root, f) for f in matched_files))
93  return files
94
95
96def CheckOptions(options, parser, required=None):
97  if not required:
98    return
99  for option_name in required:
100    if getattr(options, option_name) is None:
101      parser.error('--%s is required' % option_name.replace('_', '-'))
102
103
104def WriteJson(obj, path, only_if_changed=False):
105  old_dump = None
106  if os.path.exists(path):
107    with open(path, 'r') as oldfile:
108      old_dump = oldfile.read()
109
110  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
111
112  if not only_if_changed or old_dump != new_dump:
113    with open(path, 'w') as outfile:
114      outfile.write(new_dump)
115
116
117@contextlib.contextmanager
118def _AtomicOutput(path, only_if_changed=True, mode='w+b'):
119  # Create in same directory to ensure same filesystem when moving.
120  dirname = os.path.dirname(path)
121  if not os.path.exists(dirname):
122    MakeDirectory(dirname)
123  with tempfile.NamedTemporaryFile(
124      mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f:
125    try:
126      yield f
127
128      # file should be closed before comparison/move.
129      f.close()
130      if not (only_if_changed and os.path.exists(path) and
131              filecmp.cmp(f.name, path)):
132        shutil.move(f.name, path)
133    finally:
134      if os.path.exists(f.name):
135        os.unlink(f.name)
136
137
138class CalledProcessError(Exception):
139  """This exception is raised when the process run by CheckOutput
140  exits with a non-zero exit code."""
141
142  def __init__(self, cwd, args, output):
143    super().__init__()
144    self.cwd = cwd
145    self.args = args
146    self.output = output
147
148  def __str__(self):
149    # A user should be able to simply copy and paste the command that failed
150    # into their shell (unless it is more than 200 chars).
151    # User can set PRINT_FULL_COMMAND=1 to always print the full command.
152    print_full = os.environ.get('PRINT_FULL_COMMAND', '0') != '0'
153    full_cmd = shlex.join(self.args)
154    short_cmd = textwrap.shorten(full_cmd, width=200)
155    printed_cmd = full_cmd if print_full else short_cmd
156    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
157                                              printed_cmd)
158    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
159
160
161def FilterLines(output, filter_string):
162  """Output filter from build_utils.CheckOutput.
163
164  Args:
165    output: Executable output as from build_utils.CheckOutput.
166    filter_string: An RE string that will filter (remove) matching
167        lines from |output|.
168
169  Returns:
170    The filtered output, as a single string.
171  """
172  re_filter = re.compile(filter_string)
173  return '\n'.join(
174      line for line in output.split('\n') if not re_filter.search(line))
175
176
177def FilterReflectiveAccessJavaWarnings(output):
178  """Filters out warnings about illegal reflective access operation.
179
180  These warnings were introduced in Java 9, and generally mean that dependencies
181  need to be updated.
182  """
183  #  WARNING: An illegal reflective access operation has occurred
184  #  WARNING: Illegal reflective access by ...
185  #  WARNING: Please consider reporting this to the maintainers of ...
186  #  WARNING: Use --illegal-access=warn to enable warnings of further ...
187  #  WARNING: All illegal access operations will be denied in a future release
188  return FilterLines(
189      output, r'WARNING: ('
190      'An illegal reflective|'
191      'Illegal reflective access|'
192      'Please consider reporting this to|'
193      'Use --illegal-access=warn|'
194      'All illegal access operations)')
195
196
197# This filter applies globally to all CheckOutput calls. We use this to prevent
198# messages from failing the build, without actually removing them.
199def _FailureFilter(output):
200  # This is a message that comes from the JDK which can't be disabled, which as
201  # far as we can tell, doesn't cause any real issues. It only happens
202  # occasionally on the bots. See crbug.com/1441023 for details.
203  jdk_filter = (r'.*warning.*Cannot use file \S+ because'
204                r' it is locked by another process')
205  output = FilterLines(output, jdk_filter)
206  return output
207
208
209# This can be used in most cases like subprocess.check_output(). The output,
210# particularly when the command fails, better highlights the command's failure.
211# If the command fails, raises a build_utils.CalledProcessError.
212def CheckOutput(args,
213                cwd=None,
214                env=None,
215                print_stdout=False,
216                print_stderr=True,
217                stdout_filter=None,
218                stderr_filter=None,
219                fail_on_output=True,
220                before_join_callback=None,
221                fail_func=lambda returncode, stderr: returncode != 0):
222  if not cwd:
223    cwd = os.getcwd()
224
225  logging.info('CheckOutput: %s', ' '.join(args))
226  child = subprocess.Popen(args,
227      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
228
229  if before_join_callback:
230    before_join_callback()
231  stdout, stderr = child.communicate()
232
233  # For Python3 only:
234  if isinstance(stdout, bytes) and sys.version_info >= (3, ):
235    stdout = stdout.decode('utf-8')
236    stderr = stderr.decode('utf-8')
237
238  if stdout_filter is not None:
239    stdout = stdout_filter(stdout)
240
241  if stderr_filter is not None:
242    stderr = stderr_filter(stderr)
243
244  if fail_func and fail_func(child.returncode, stderr):
245    raise CalledProcessError(cwd, args, stdout + stderr)
246
247  if print_stdout:
248    sys.stdout.write(stdout)
249  if print_stderr:
250    sys.stderr.write(stderr)
251
252  has_stdout = print_stdout and stdout
253  has_stderr = print_stderr and stderr
254  if has_stdout or has_stderr:
255    if has_stdout and has_stderr:
256      stream_name = 'stdout and stderr'
257    elif has_stdout:
258      stream_name = 'stdout'
259    else:
260      stream_name = 'stderr'
261
262    if fail_on_output and _FailureFilter(stdout + stderr):
263      MSG = """
264Command failed because it wrote to {}.
265You can often set treat_warnings_as_errors=false to not treat output as \
266failure (useful when developing locally).
267"""
268      raise CalledProcessError(cwd, args, MSG.format(stream_name))
269
270    short_cmd = textwrap.shorten(shlex.join(args), width=200)
271    sys.stderr.write(
272        f'\nThe above {stream_name} output was from: {short_cmd}\n')
273
274  return stdout
275
276
277def GetModifiedTime(path):
278  # For a symlink, the modified time should be the greater of the link's
279  # modified time and the modified time of the target.
280  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
281
282
283def IsTimeStale(output, inputs):
284  if not os.path.exists(output):
285    return True
286
287  output_time = GetModifiedTime(output)
288  for i in inputs:
289    if GetModifiedTime(i) > output_time:
290      return True
291  return False
292
293
294def _CheckZipPath(name):
295  if os.path.normpath(name) != name:
296    raise Exception('Non-canonical zip path: %s' % name)
297  if os.path.isabs(name):
298    raise Exception('Absolute zip path: %s' % name)
299
300
301def _IsSymlink(zip_file, name):
302  zi = zip_file.getinfo(name)
303
304  # The two high-order bytes of ZipInfo.external_attr represent
305  # UNIX permissions and file type bits.
306  return stat.S_ISLNK(zi.external_attr >> 16)
307
308
309def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
310               predicate=None):
311  if path is None:
312    path = os.getcwd()
313  elif not os.path.exists(path):
314    MakeDirectory(path)
315
316  if not zipfile.is_zipfile(zip_path):
317    raise Exception('Invalid zip file: %s' % zip_path)
318
319  extracted = []
320  with zipfile.ZipFile(zip_path) as z:
321    for name in z.namelist():
322      if name.endswith('/'):
323        MakeDirectory(os.path.join(path, name))
324        continue
325      if pattern is not None:
326        if not fnmatch.fnmatch(name, pattern):
327          continue
328      if predicate and not predicate(name):
329        continue
330      _CheckZipPath(name)
331      if no_clobber:
332        output_path = os.path.join(path, name)
333        if os.path.exists(output_path):
334          raise Exception(
335              'Path already exists from zip: %s %s %s'
336              % (zip_path, name, output_path))
337      if _IsSymlink(z, name):
338        dest = os.path.join(path, name)
339        MakeDirectory(os.path.dirname(dest))
340        os.symlink(z.read(name), dest)
341        extracted.append(dest)
342      else:
343        z.extract(name, path)
344        extracted.append(os.path.join(path, name))
345
346  return extracted
347
348
349def MatchesGlob(path, filters):
350  """Returns whether the given path matches any of the given glob patterns."""
351  return filters and any(fnmatch.fnmatch(path, f) for f in filters)
352
353
354def MergeZips(output, input_zips, path_transform=None, compress=None):
355  """Combines all files from |input_zips| into |output|.
356
357  Args:
358    output: Path, fileobj, or ZipFile instance to add files to.
359    input_zips: Iterable of paths to zip files to merge.
360    path_transform: Called for each entry path. Returns a new path, or None to
361        skip the file.
362    compress: Overrides compression setting from origin zip entries.
363  """
364  path_transform = path_transform or (lambda p: p)
365
366  out_zip = output
367  if not isinstance(output, zipfile.ZipFile):
368    out_zip = zipfile.ZipFile(output, 'w')
369
370  # Include paths in the existing zip here to avoid adding duplicate files.
371  added_names = set(out_zip.namelist())
372
373  try:
374    for in_file in input_zips:
375      with zipfile.ZipFile(in_file, 'r') as in_zip:
376        for info in in_zip.infolist():
377          # Ignore directories.
378          if info.filename[-1] == '/':
379            continue
380          dst_name = path_transform(info.filename)
381          if not dst_name:
382            continue
383          already_added = dst_name in added_names
384          if not already_added:
385            if compress is not None:
386              compress_entry = compress
387            else:
388              compress_entry = info.compress_type != zipfile.ZIP_STORED
389            AddToZipHermetic(
390                out_zip,
391                dst_name,
392                data=in_zip.read(info),
393                compress=compress_entry)
394            added_names.add(dst_name)
395  finally:
396    if output is not out_zip:
397      out_zip.close()
398
399
400def GetSortedTransitiveDependencies(top, deps_func):
401  """Gets the list of all transitive dependencies in sorted order.
402
403  There should be no cycles in the dependency graph (crashes if cycles exist).
404
405  Args:
406    top: A list of the top level nodes
407    deps_func: A function that takes a node and returns a list of its direct
408        dependencies.
409  Returns:
410    A list of all transitive dependencies of nodes in top, in order (a node will
411    appear in the list at a higher index than all of its dependencies).
412  """
413  # Find all deps depth-first, maintaining original order in the case of ties.
414  deps_map = collections.OrderedDict()
415  def discover(nodes):
416    for node in nodes:
417      if node in deps_map:
418        continue
419      deps = deps_func(node)
420      discover(deps)
421      deps_map[node] = deps
422
423  discover(top)
424  return list(deps_map)
425
426
427def InitLogging(enabling_env):
428  logging.basicConfig(
429      level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
430      format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s')
431  script_name = os.path.basename(sys.argv[0])
432  logging.info('Started (%s)', script_name)
433
434  my_pid = os.getpid()
435
436  def log_exit():
437    # Do not log for fork'ed processes.
438    if os.getpid() == my_pid:
439      logging.info("Job's done (%s)", script_name)
440
441  atexit.register(log_exit)
442
443
444def ExpandFileArgs(args):
445  """Replaces file-arg placeholders in args.
446
447  These placeholders have the form:
448    @FileArg(filename:key1:key2:...:keyn)
449
450  The value of such a placeholder is calculated by reading 'filename' as json.
451  And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]'
452  suffix the (intermediate) value will be interpreted as a single item list and
453  the single item will be returned or used for further traversal.
454
455  Note: This intentionally does not return the list of files that appear in such
456  placeholders. An action that uses file-args *must* know the paths of those
457  files prior to the parsing of the arguments (typically by explicitly listing
458  them in the action's inputs in build files).
459  """
460  new_args = list(args)
461  file_jsons = dict()
462  r = re.compile(r'@FileArg\((.*?)\)')
463  for i, arg in enumerate(args):
464    match = r.search(arg)
465    if not match:
466      continue
467
468    def get_key(key):
469      if key.endswith('[]'):
470        return key[:-2], True
471      return key, False
472
473    lookup_path = match.group(1).split(':')
474    file_path, _ = get_key(lookup_path[0])
475    if not file_path in file_jsons:
476      with open(file_path) as f:
477        file_jsons[file_path] = json.load(f)
478
479    expansion = file_jsons
480    for k in lookup_path:
481      k, flatten = get_key(k)
482      expansion = expansion[k]
483      if flatten:
484        if not isinstance(expansion, list) or not len(expansion) == 1:
485          raise Exception('Expected single item list but got %s' % expansion)
486        expansion = expansion[0]
487
488    # This should match parse_gn_list. The output is either a GN-formatted list
489    # or a literal (with no quotes).
490    if isinstance(expansion, list):
491      new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
492                     arg[match.end():])
493    else:
494      new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
495
496  return new_args
497
498
499def ReadSourcesList(sources_list_file_name):
500  """Reads a GN-written file containing list of file names and returns a list.
501
502  Note that this function should not be used to parse response files.
503  """
504  with open(sources_list_file_name) as f:
505    return [file_name.strip() for file_name in f]
506