1#!/usr/bin/env python3 2# Copyright (c) 2013 The Chromium Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6 7"""Top-level presubmit script for Skia. 8 9See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts 10for more details about the presubmit API built into gcl. 11""" 12 13import difflib 14import os 15import re 16import subprocess 17import sys 18 19 20RELEASE_NOTES_DIR = 'relnotes' 21RELEASE_NOTES_FILE_NAME = 'RELEASE_NOTES.md' 22RELEASE_NOTES_README = '//relnotes/README.md' 23 24GOLD_TRYBOT_URL = 'https://gold.skia.org/search?issue=' 25 26SERVICE_ACCOUNT_SUFFIX = [ 27 '@%s.iam.gserviceaccount.com' % project for project in [ 28 'skia-buildbots.google.com', 'skia-swarming-bots', 'skia-public', 29 'skia-corp.google.com', 'chops-service-accounts']] 30 31USE_PYTHON3 = True 32 33 34def _CheckChangeHasEol(input_api, output_api, source_file_filter=None): 35 """Checks that files end with at least one \n (LF).""" 36 eof_files = [] 37 for f in input_api.AffectedSourceFiles(source_file_filter): 38 contents = input_api.ReadFile(f, 'rb') 39 # Check that the file ends in at least one newline character. 40 if len(contents) > 1 and contents[-1:] != '\n': 41 eof_files.append(f.LocalPath()) 42 43 if eof_files: 44 return [output_api.PresubmitPromptWarning( 45 'These files should end in a newline character:', 46 items=eof_files)] 47 return [] 48 49 50def _JsonChecks(input_api, output_api): 51 """Run checks on any modified json files.""" 52 failing_files = [] 53 for affected_file in input_api.AffectedFiles(None): 54 affected_file_path = affected_file.LocalPath() 55 is_json = affected_file_path.endswith('.json') 56 is_metadata = (affected_file_path.startswith('site/') and 57 affected_file_path.endswith('/METADATA')) 58 if is_json or is_metadata: 59 try: 60 input_api.json.load(open(affected_file_path, 'r')) 61 except ValueError as ve: 62 failing_files.append(f'{affected_file_path}\t\t{ve}') 63 64 results = [] 65 if failing_files: 66 results.append( 67 output_api.PresubmitError( 68 'The following files contain invalid json:\n%s\n' % 69 '\n'.join(failing_files))) 70 return results 71 72 73def _IfDefChecks(input_api, output_api): 74 """Ensures if/ifdef are not before includes. See skbug/3362 for details.""" 75 comment_block_start_pattern = re.compile('^\s*\/\*.*$') 76 comment_block_middle_pattern = re.compile('^\s+\*.*') 77 comment_block_end_pattern = re.compile('^\s+\*\/.*$') 78 single_line_comment_pattern = re.compile('^\s*//.*$') 79 def is_comment(line): 80 return (comment_block_start_pattern.match(line) or 81 comment_block_middle_pattern.match(line) or 82 comment_block_end_pattern.match(line) or 83 single_line_comment_pattern.match(line)) 84 85 empty_line_pattern = re.compile('^\s*$') 86 def is_empty_line(line): 87 return empty_line_pattern.match(line) 88 89 failing_files = [] 90 for affected_file in input_api.AffectedSourceFiles(None): 91 affected_file_path = affected_file.LocalPath() 92 if affected_file_path.endswith('.cpp') or affected_file_path.endswith('.h'): 93 f = open(affected_file_path) 94 for line in f: 95 if is_comment(line) or is_empty_line(line): 96 continue 97 # The below will be the first real line after comments and newlines. 98 if line.startswith('#if 0 '): 99 pass 100 elif line.startswith('#if ') or line.startswith('#ifdef '): 101 failing_files.append(affected_file_path) 102 break 103 104 results = [] 105 if failing_files: 106 results.append( 107 output_api.PresubmitError( 108 'The following files have #if or #ifdef before includes:\n%s\n\n' 109 'See https://bug.skia.org/3362 for why this should be fixed.' % 110 '\n'.join(failing_files))) 111 return results 112 113 114def _CopyrightChecks(input_api, output_api, source_file_filter=None): 115 results = [] 116 year_pattern = r'\d{4}' 117 year_range_pattern = r'%s(-%s)?' % (year_pattern, year_pattern) 118 years_pattern = r'%s(,%s)*,?' % (year_range_pattern, year_range_pattern) 119 copyright_pattern = ( 120 r'Copyright (\([cC]\) )?%s \w+' % years_pattern) 121 122 for affected_file in input_api.AffectedSourceFiles(source_file_filter): 123 if ('third_party/' in affected_file.LocalPath() or 124 'tests/sksl/' in affected_file.LocalPath() or 125 'bazel/rbe/' in affected_file.LocalPath() or 126 'bazel/external/' in affected_file.LocalPath() or 127 'bazel/exporter/interfaces/mocks/' in affected_file.LocalPath() or 128 affected_file.LocalPath().endswith('gen.go')): 129 continue 130 contents = input_api.ReadFile(affected_file, 'rb') 131 if not re.search(copyright_pattern, contents): 132 results.append(output_api.PresubmitError( 133 '%s is missing a correct copyright header.' % affected_file)) 134 return results 135 136 137def _InfraTests(input_api, output_api): 138 """Run the infra tests.""" 139 results = [] 140 if not any(f.LocalPath().startswith('infra') 141 for f in input_api.AffectedFiles()): 142 return results 143 144 cmd = ['python3', os.path.join('infra', 'bots', 'infra_tests.py')] 145 try: 146 subprocess.check_output(cmd) 147 except subprocess.CalledProcessError as e: 148 results.append(output_api.PresubmitError( 149 '`%s` failed:\n%s' % (' '.join(cmd), e.output))) 150 return results 151 152 153def _CheckGNFormatted(input_api, output_api): 154 """Make sure any .gn files we're changing have been formatted.""" 155 files = [] 156 for f in input_api.AffectedFiles(include_deletes=False): 157 if (f.LocalPath().endswith('.gn') or 158 f.LocalPath().endswith('.gni')): 159 files.append(f) 160 if not files: 161 return [] 162 163 cmd = ['python3', os.path.join('bin', 'fetch-gn')] 164 try: 165 subprocess.check_output(cmd) 166 except subprocess.CalledProcessError as e: 167 return [output_api.PresubmitError( 168 '`%s` failed:\n%s' % (' '.join(cmd), e.output))] 169 170 results = [] 171 for f in files: 172 gn = 'gn.exe' if 'win32' in sys.platform else 'gn' 173 gn = os.path.join(input_api.PresubmitLocalPath(), 'bin', gn) 174 cmd = [gn, 'format', '--dry-run', f.LocalPath()] 175 try: 176 subprocess.check_output(cmd) 177 except subprocess.CalledProcessError: 178 fix = 'bin/gn format ' + f.LocalPath() 179 results.append(output_api.PresubmitError( 180 '`%s` failed, try\n\t%s' % (' '.join(cmd), fix))) 181 return results 182 183 184def _CheckGitConflictMarkers(input_api, output_api): 185 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$') 186 results = [] 187 for f in input_api.AffectedFiles(): 188 for line_num, line in f.ChangedContents(): 189 if f.LocalPath().endswith('.md'): 190 # First-level headers in markdown look a lot like version control 191 # conflict markers. http://daringfireball.net/projects/markdown/basics 192 continue 193 if pattern.match(line): 194 results.append( 195 output_api.PresubmitError( 196 'Git conflict markers found in %s:%d %s' % ( 197 f.LocalPath(), line_num, line))) 198 return results 199 200 201def _CheckIncludesFormatted(input_api, output_api): 202 """Make sure #includes in files we're changing have been formatted.""" 203 files = [str(f) for f in input_api.AffectedFiles() if f.Action() != 'D'] 204 cmd = ['python3', 205 'tools/rewrite_includes.py', 206 '--dry-run'] + files 207 if 0 != subprocess.call(cmd): 208 return [output_api.PresubmitError('`%s` failed' % ' '.join(cmd))] 209 return [] 210 211 212class _WarningsAsErrors(): 213 def __init__(self, output_api): 214 self.output_api = output_api 215 self.old_warning = None 216 def __enter__(self): 217 self.old_warning = self.output_api.PresubmitPromptWarning 218 self.output_api.PresubmitPromptWarning = self.output_api.PresubmitError 219 return self.output_api 220 def __exit__(self, ex_type, ex_value, ex_traceback): 221 self.output_api.PresubmitPromptWarning = self.old_warning 222 223 224def _RegenerateAllExamplesCPP(input_api, output_api): 225 """Regenerates all_examples.cpp if an example was added or deleted.""" 226 if not any(f.LocalPath().startswith('docs/examples/') 227 for f in input_api.AffectedFiles()): 228 return [] 229 command_str = 'tools/fiddle/make_all_examples_cpp.py' 230 cmd = ['python3', command_str, '--print-diff'] 231 proc = subprocess.run(cmd, capture_output=True) 232 if proc.returncode != 0: 233 return [output_api.PresubmitError('`%s` failed' % ' '.join(cmd))] 234 235 results = [] 236 diff_output = proc.stdout.decode('utf-8').strip() 237 if diff_output: 238 results += [output_api.PresubmitError( 239 'Diffs found after running "%s":\n\n%s\n' 240 'Please commit or discard the above changes.' % ( 241 command_str, 242 diff_output, 243 ) 244 )] 245 return results 246 247 248def _CheckIncludeForOutsideDeps(input_api, output_api): 249 """The include directory should consist of only public APIs. 250 251 This check makes sure we don't have anything in the include directory 252 depend on outside folders. If we had include/core/SkDonut.h depend on 253 src/core/SkPastry.h, then clients would have transitive access to the 254 private SkPastry class and any symbols in there, even if they don't 255 directly include src/core/SkPastry.h (which can be detected/blocked 256 with build systems like GN or Bazel). By keeping include/ self-contained, 257 we keep a tighter grip on our public API and make Skia easier to distribute 258 (one can ship a .a/.so and a single directory of .h files). 259 """ 260 banned_includes = [ 261 input_api.re.compile(r'#\s*include\s+("src/.*)'), 262 input_api.re.compile(r'#\s*include\s+("tools/.*)'), 263 ] 264 file_filter = lambda x: (x.LocalPath().startswith('include/')) 265 errors = [] 266 for affected_file in input_api.AffectedSourceFiles(file_filter): 267 affected_filepath = affected_file.LocalPath() 268 for (line_num, line) in affected_file.ChangedContents(): 269 for re in banned_includes: 270 match = re.search(line) 271 if match: 272 errors.append(('%s:%s: include/* should only depend on other things in include/*. ' + 273 'Please remove #include of %s, perhaps making it a forward-declare.') % ( 274 affected_filepath, line_num, match.group(1))) 275 276 if errors: 277 return [output_api.PresubmitError('\n'.join(errors))] 278 return [] 279 280 281def _CheckExamplesForPrivateAPIs(input_api, output_api): 282 """We only want our checked-in examples (aka fiddles) to show public API.""" 283 banned_includes = [ 284 input_api.re.compile(r'#\s*include\s+("src/.*)'), 285 input_api.re.compile(r'#\s*include\s+("include/private/.*)'), 286 ] 287 file_filter = lambda x: (x.LocalPath().startswith('docs/examples/')) 288 errors = [] 289 for affected_file in input_api.AffectedSourceFiles(file_filter): 290 affected_filepath = affected_file.LocalPath() 291 for (line_num, line) in affected_file.ChangedContents(): 292 for re in banned_includes: 293 match = re.search(line) 294 if match: 295 errors.append('%s:%s: Fiddles should not use private/internal API like %s.' % ( 296 affected_filepath, line_num, match.group(1))) 297 298 if errors: 299 return [output_api.PresubmitError('\n'.join(errors))] 300 return [] 301 302 303def _CheckGeneratedBazelBUILDFiles(input_api, output_api): 304 if 'win32' in sys.platform: 305 # TODO(crbug.com/skia/12541): Remove when Bazel builds work on Windows. 306 # Note: `make` is not installed on Windows by default. 307 return [] 308 if 'darwin' in sys.platform: 309 # This takes too long on Mac with default settings. Probably due to sandboxing. 310 return [] 311 files = [] 312 for affected_file in input_api.AffectedFiles(include_deletes=True): 313 affected_file_path = affected_file.LocalPath() 314 if (affected_file_path.endswith('.go') or 315 affected_file_path.endswith('BUILD.bazel')): 316 files.append(affected_file) 317 if not files: 318 return [] 319 return _RunCommandAndCheckDiff( 320 output_api, ['make', '-C', 'bazel', 'generate_go'], files 321 ) 322 323 324def _CheckBazelBUILDFiles(input_api, output_api): 325 """Makes sure our BUILD.bazel files are compatible with G3.""" 326 results = [] 327 for affected_file in input_api.AffectedFiles(include_deletes=False): 328 affected_file_path = affected_file.LocalPath() 329 is_bazel = affected_file_path.endswith('BUILD.bazel') 330 # This list lines up with the one in autoroller_lib.py (see G3). 331 excluded_paths = ["infra/", "bazel/rbe/", "bazel/external/", "bazel/common_config_settings/", 332 "modules/canvaskit/go/", "experimental/", "bazel/platform", "third_party/", 333 "tests/", "resources/", "bazel/deps_parser/", "bazel/exporter_tool/", 334 "tools/gpu/gl/interface/", "bazel/utils/", "include/config/", 335 "bench/", "example/external_client/"] 336 is_excluded = any(affected_file_path.startswith(n) for n in excluded_paths) 337 if is_bazel and not is_excluded: 338 with open(affected_file_path, 'r') as file: 339 contents = file.read() 340 if 'licenses(["notice"])' not in contents: 341 results.append(output_api.PresubmitError( 342 ('%s needs to have\nlicenses(["notice"])\nimmediately after ' + 343 'the load() calls to comply with G3 policies.') % affected_file_path 344 )) 345 if 'cc_library(' in contents and '"skia_cc_library"' not in contents: 346 results.append(output_api.PresubmitError( 347 ('%s needs to load skia_cc_library from macros.bzl instead of using the ' + 348 'native one. This allows us to build differently for G3.\n' + 349 'Add "skia_cc_library" to load("//bazel:macros.bzl", ...)') 350 % affected_file_path 351 )) 352 if 'default_applicable_licenses' not in contents: 353 # See https://opensource.google/documentation/reference/thirdparty/new_license_rules 354 results.append(output_api.PresubmitError( 355 ('%s needs to have\npackage(default_applicable_licenses = ["//:license"])\n'+ 356 'to comply with G3 policies') % affected_file_path 357 )) 358 return results 359 360 361def _RunCommandAndCheckDiff(output_api, command, files_to_check): 362 """Run an arbitrary command. Fail if it produces any diffs on the given files.""" 363 prev_contents = {} 364 for file in files_to_check: 365 # NewContents just reads the file. 366 prev_contents[file] = file.NewContents() 367 368 command_str = ' '.join(command) 369 results = [] 370 371 try: 372 subprocess.check_output( 373 command, 374 stderr=subprocess.STDOUT, encoding='utf-8') 375 except subprocess.CalledProcessError as e: 376 results += [output_api.PresubmitError( 377 'Command "%s" returned non-zero exit code %d. Output: \n\n%s' % ( 378 command_str, 379 e.returncode, 380 e.output, 381 ) 382 )] 383 384 # Compare the new content to the previous content. 385 diffs = [] 386 for file, prev_content in prev_contents.items(): 387 new_content = file.NewContents(flush_cache=True) 388 if new_content != prev_content: 389 path = file.LocalPath() 390 diff = difflib.unified_diff(prev_content, new_content, path, path, lineterm='') 391 diffs.append('\n'.join(diff)) 392 393 if diffs: 394 results += [output_api.PresubmitError( 395 'Diffs found after running "%s":\n\n%s\n\n' 396 'Please commit or discard the above changes.' % ( 397 command_str, 398 '\n'.join(diffs), 399 ) 400 )] 401 402 return results 403 404 405def _CheckGNIGenerated(input_api, output_api): 406 """Ensures that the generated *.gni files are current. 407 408 The Bazel project files are authoritative and some *.gni files are 409 generated from them using the exporter_tool. This check ensures they 410 are still current. 411 """ 412 if 'win32' in sys.platform: 413 # TODO(crbug.com/skia/12541): Remove when Bazel builds work on Windows. 414 # Note: `make` is not installed on Windows by default. 415 return [ 416 output_api.PresubmitNotifyResult( 417 'Skipping Bazel=>GNI export check on Windows (unsupported platform).' 418 ) 419 ] 420 if 'darwin' in sys.platform: 421 # This takes too long on Mac with default settings. Probably due to sandboxing. 422 return [] 423 files = [] 424 for affected_file in input_api.AffectedFiles(include_deletes=True): 425 affected_file_path = affected_file.LocalPath() 426 if affected_file_path.endswith('BUILD.bazel') or affected_file_path.endswith('.gni'): 427 files.append(affected_file) 428 # Generate GNI files and verify no changes. 429 if not files: 430 # No Bazel build files changed. 431 return [] 432 return _RunCommandAndCheckDiff( 433 output_api, ['make', '-C', 'bazel', 'generate_gni'], files 434 ) 435 436 437def _CheckBuildifier(input_api, output_api): 438 """Runs Buildifier and fails on linting errors, or if it produces any diffs. 439 440 This check only runs if the affected files include any WORKSPACE, BUILD, 441 BUILD.bazel or *.bzl files. 442 """ 443 files = [] 444 # Please keep the below exclude patterns in sync with those in the //:buildifier rule definition. 445 for affected_file in input_api.AffectedFiles(include_deletes=False): 446 affected_file_path = affected_file.LocalPath() 447 if affected_file_path.endswith('BUILD.bazel') or affected_file_path.endswith('.bzl'): 448 if not affected_file_path.endswith('public.bzl') and \ 449 not affected_file_path.endswith('go_repositories.bzl') and \ 450 not "bazel/rbe/gce_linux/" in affected_file_path and \ 451 not affected_file_path.startswith("third_party/externals/") and \ 452 not "node_modules/" in affected_file_path: # Skip generated files. 453 files.append(affected_file) 454 if not files: 455 return [] 456 try: 457 subprocess.check_output( 458 ['buildifier', '--version'], 459 stderr=subprocess.STDOUT) 460 except: 461 return [output_api.PresubmitNotifyResult( 462 'Skipping buildifier check because it is not on PATH. \n' + 463 'You can download it from https://github.com/bazelbuild/buildtools/releases')] 464 465 return _RunCommandAndCheckDiff( 466 # Please keep the below arguments in sync with those in the //:buildifier rule definition. 467 output_api, [ 468 'buildifier', 469 '--mode=fix', 470 '--lint=fix', 471 '--warnings', 472 ','.join([ 473 '-native-android', 474 '-native-cc', 475 '-native-py', 476 ]) 477 ] + [f.LocalPath() for f in files], files) 478 479 480def _CheckBannedAPIs(input_api, output_api): 481 """Check source code for functions, packages, and symbols that should not be used.""" 482 483 # A list of tuples of a regex to match an API and a suggested replacement for 484 # that API. There is an optional third parameter for files which *can* use this 485 # API without warning. 486 banned_replacements = [ 487 (r'std::stof\(', 'std::strtof(), which does not throw'), 488 (r'std::stod\(', 'std::strtod(), which does not throw'), 489 (r'std::stold\(', 'std::strtold(), which does not throw'), 490 491 # We used to have separate symbols for this, but coalesced them to make the 492 # Bazel build easier. 493 (r'GR_TEST_UTILS', 'GPU_TEST_UTILS'), 494 (r'GRAPHITE_TEST_UTILS', 'GPU_TEST_UTILS'), 495 ] 496 497 # Our Bazel rules have special copies of our cc_library rules with GPU_TEST_UTILS 498 # set. If GPU_TEST_UTILS is used outside of those files in Skia proper, the build 499 # will break/crash in mysterious ways (because files may get compiled in multiple 500 # conflicting ways as a result of the define being inconsistently set). 501 allowed_test_util_paths = [ 502 'include/core/SkTypes.h', 503 'include/gpu/', 504 'include/private/gpu/', 505 'src/gpu/ganesh', 506 'src/gpu/graphite', 507 'tests/', 508 'tools/', 509 ] 510 gpu_test_utils_re = input_api.re.compile('GPU_TEST_UTILS') 511 512 # These defines are either there or not, and using them with just an #if is a 513 # subtle, frustrating bug. 514 existence_defines = ['SK_GANESH', 'SK_GRAPHITE', 'SK_GL', 'SK_VULKAN', 'SK_DAWN', 'SK_METAL', 515 'SK_DIRECT3D', 'SK_DEBUG', 'GPU_TEST_UTILS'] 516 for d in existence_defines: 517 banned_replacements.append(('#if {}'.format(d), 518 '#if defined({})'.format(d))) 519 compiled_replacements = [] 520 for rep in banned_replacements: 521 exceptions = [] 522 if len(rep) == 3: 523 (re, replacement, exceptions) = rep 524 else: 525 (re, replacement) = rep 526 527 compiled_re = input_api.re.compile(re) 528 compiled_exceptions = [input_api.re.compile(exc) for exc in exceptions] 529 compiled_replacements.append( 530 (compiled_re, replacement, compiled_exceptions)) 531 532 errors = [] 533 file_filter = lambda x: (x.LocalPath().endswith('.h') or 534 x.LocalPath().endswith('.cpp') or 535 x.LocalPath().endswith('.cc') or 536 x.LocalPath().endswith('.m') or 537 x.LocalPath().endswith('.mm')) 538 for affected_file in input_api.AffectedSourceFiles(file_filter): 539 affected_filepath = affected_file.LocalPath() 540 for (line_num, line) in affected_file.ChangedContents(): 541 for (re, replacement, exceptions) in compiled_replacements: 542 match = re.search(line) 543 if match: 544 for exc in exceptions: 545 if exc.search(affected_filepath): 546 break 547 else: 548 errors.append('%s:%s: Instead of %s, please use %s.' % ( 549 affected_filepath, line_num, match.group(), replacement)) 550 # Now to an explicit search for use of GPU_TEST_UTILS outside of 551 # files that our Bazel rules that define to be set. 552 match = gpu_test_utils_re.search(line) 553 if match: 554 for exc in allowed_test_util_paths: 555 if affected_filepath.startswith(exc): 556 break 557 else: 558 errors.append('%s:%s: Only GPU code should use GPU_TEST_UTILS.' % ( 559 affected_filepath, line_num)) 560 561 if errors: 562 return [output_api.PresubmitError('\n'.join(errors))] 563 564 return [] 565 566 567def _CheckDEPS(input_api, output_api): 568 """If DEPS was modified, run the deps_parser to update bazel/deps.bzl""" 569 files = [] 570 for affected_file in input_api.AffectedFiles(include_deletes=False): 571 affected_file_path = affected_file.LocalPath() 572 if affected_file_path.endswith('DEPS') or affected_file_path.endswith('deps.bzl'): 573 files.append(affected_file) 574 if not files: 575 return [] 576 try: 577 subprocess.check_output( 578 ['bazelisk', '--version'], 579 stderr=subprocess.STDOUT) 580 except: 581 return [output_api.PresubmitNotifyResult( 582 'Skipping DEPS check because bazelisk is not on PATH. \n' + 583 'You can download it from https://github.com/bazelbuild/bazelisk/releases/tag/v1.14.0')] 584 585 return _RunCommandAndCheckDiff( 586 output_api, ['bazelisk', 'run', '//bazel/deps_parser'], files 587 ) 588 589 590def _CommonChecks(input_api, output_api): 591 """Presubmit checks common to upload and commit.""" 592 results = [] 593 sources = lambda x: (x.LocalPath().endswith('.h') or 594 x.LocalPath().endswith('.py') or 595 x.LocalPath().endswith('.sh') or 596 x.LocalPath().endswith('.m') or 597 x.LocalPath().endswith('.mm') or 598 x.LocalPath().endswith('.go') or 599 x.LocalPath().endswith('.c') or 600 x.LocalPath().endswith('.cc') or 601 x.LocalPath().endswith('.cpp')) 602 results.extend(_CheckChangeHasEol( 603 input_api, output_api, source_file_filter=sources)) 604 with _WarningsAsErrors(output_api): 605 results.extend(input_api.canned_checks.CheckChangeHasNoCR( 606 input_api, output_api, source_file_filter=sources)) 607 results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( 608 input_api, output_api, source_file_filter=sources)) 609 results.extend(_JsonChecks(input_api, output_api)) 610 results.extend(_IfDefChecks(input_api, output_api)) 611 results.extend(_CopyrightChecks(input_api, output_api, 612 source_file_filter=sources)) 613 results.extend(_CheckIncludesFormatted(input_api, output_api)) 614 results.extend(_CheckGNFormatted(input_api, output_api)) 615 results.extend(_CheckGitConflictMarkers(input_api, output_api)) 616 results.extend(_RegenerateAllExamplesCPP(input_api, output_api)) 617 results.extend(_CheckExamplesForPrivateAPIs(input_api, output_api)) 618 results.extend(_CheckIncludeForOutsideDeps(input_api, output_api)) 619 results.extend(_CheckBazelBUILDFiles(input_api, output_api)) 620 results.extend(_CheckBannedAPIs(input_api, output_api)) 621 return results 622 623 624def CheckChangeOnUpload(input_api, output_api): 625 """Presubmit checks for the change on upload.""" 626 results = [] 627 results.extend(_CommonChecks(input_api, output_api)) 628 # Run on upload, not commit, since the presubmit bot apparently doesn't have 629 # coverage or Go installed. 630 results.extend(_InfraTests(input_api, output_api)) 631 results.extend(_CheckTopReleaseNotesChanged(input_api, output_api)) 632 results.extend(_CheckReleaseNotesForPublicAPI(input_api, output_api)) 633 # Buildifier might not be on the CI machines. 634 results.extend(_CheckBuildifier(input_api, output_api)) 635 # We don't want this to block the CQ (for now). 636 results.extend(_CheckDEPS(input_api, output_api)) 637 # Bazelisk is not yet included in the Presubmit job. 638 results.extend(_CheckGeneratedBazelBUILDFiles(input_api, output_api)) 639 results.extend(_CheckGNIGenerated(input_api, output_api)) 640 return results 641 642 643class CodeReview(object): 644 """Abstracts which codereview tool is used for the specified issue.""" 645 646 def __init__(self, input_api): 647 self._issue = input_api.change.issue 648 self._gerrit = input_api.gerrit 649 650 def GetOwnerEmail(self): 651 return self._gerrit.GetChangeOwner(self._issue) 652 653 def GetSubject(self): 654 return self._gerrit.GetChangeInfo(self._issue)['subject'] 655 656 def GetDescription(self): 657 return self._gerrit.GetChangeDescription(self._issue) 658 659 def GetReviewers(self): 660 code_review_label = ( 661 self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review']) 662 return [r['email'] for r in code_review_label.get('all', [])] 663 664 def GetApprovers(self): 665 approvers = [] 666 code_review_label = ( 667 self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review']) 668 for m in code_review_label.get('all', []): 669 if m.get("value") == 1: 670 approvers.append(m["email"]) 671 return approvers 672 673 674def _CheckReleaseNotesForPublicAPI(input_api, output_api): 675 """Checks to see if a release notes file is added or edited with public API changes.""" 676 results = [] 677 public_api_changed = False 678 release_file_changed = False 679 for affected_file in input_api.AffectedFiles(): 680 affected_file_path = affected_file.LocalPath() 681 file_path, file_ext = os.path.splitext(affected_file_path) 682 # We only care about files that end in .h and are under the top-level 683 # include dir, but not include/private. 684 if (file_ext == '.h' and 685 file_path.split(os.path.sep)[0] == 'include' and 686 'private' not in file_path): 687 public_api_changed = True 688 elif os.path.dirname(file_path) == RELEASE_NOTES_DIR: 689 release_file_changed = True 690 691 if public_api_changed and not release_file_changed: 692 results.append(output_api.PresubmitPromptWarning( 693 'If this change affects a client API, please add a new summary ' 694 'file in the %s directory. More information can be found in ' 695 '%s.' % (RELEASE_NOTES_DIR, RELEASE_NOTES_README))) 696 return results 697 698 699def _CheckTopReleaseNotesChanged(input_api, output_api): 700 """Warns if the top level release notes file was changed. 701 702 The top level file is now auto-edited, and new release notes should 703 be added to the RELEASE_NOTES_DIR directory""" 704 results = [] 705 top_relnotes_changed = False 706 release_file_changed = False 707 for affected_file in input_api.AffectedFiles(): 708 affected_file_path = affected_file.LocalPath() 709 file_path, file_ext = os.path.splitext(affected_file_path) 710 if affected_file_path == RELEASE_NOTES_FILE_NAME: 711 top_relnotes_changed = True 712 elif os.path.dirname(file_path) == RELEASE_NOTES_DIR: 713 release_file_changed = True 714 # When relnotes_util is run it will modify RELEASE_NOTES_FILE_NAME 715 # and delete the individual note files in RELEASE_NOTES_DIR. 716 # So, if both paths are modified do not emit a warning. 717 if top_relnotes_changed and not release_file_changed: 718 results.append(output_api.PresubmitPromptWarning( 719 'Do not edit %s directly. %s is automatically edited during the ' 720 'release process. Release notes should be added as new files in ' 721 'the %s directory. More information can be found in %s.' % (RELEASE_NOTES_FILE_NAME, 722 RELEASE_NOTES_FILE_NAME, 723 RELEASE_NOTES_DIR, 724 RELEASE_NOTES_README))) 725 return results 726 727 728def PostUploadHook(gerrit, change, output_api): 729 """git cl upload will call this hook after the issue is created/modified. 730 731 This hook does the following: 732 * Adds a link to preview docs changes if there are any docs changes in the CL. 733 * Adds 'No-Try: true' if the CL contains only docs changes. 734 """ 735 if not change.issue: 736 return [] 737 738 # Skip PostUploadHooks for all auto-commit service account bots. New 739 # patchsets (caused due to PostUploadHooks) invalidates the CQ+2 vote from 740 # the "--use-commit-queue" flag to "git cl upload". 741 for suffix in SERVICE_ACCOUNT_SUFFIX: 742 if change.author_email.endswith(suffix): 743 return [] 744 745 results = [] 746 at_least_one_docs_change = False 747 all_docs_changes = True 748 for affected_file in change.AffectedFiles(): 749 affected_file_path = affected_file.LocalPath() 750 file_path, _ = os.path.splitext(affected_file_path) 751 if 'site' == file_path.split(os.path.sep)[0]: 752 at_least_one_docs_change = True 753 else: 754 all_docs_changes = False 755 if at_least_one_docs_change and not all_docs_changes: 756 break 757 758 footers = change.GitFootersFromDescription() 759 description_changed = False 760 761 # If the change includes only doc changes then add No-Try: true in the 762 # CL's description if it does not exist yet. 763 if all_docs_changes and 'true' not in footers.get('No-Try', []): 764 description_changed = True 765 change.AddDescriptionFooter('No-Try', 'true') 766 results.append( 767 output_api.PresubmitNotifyResult( 768 'This change has only doc changes. Automatically added ' 769 '\'No-Try: true\' to the CL\'s description')) 770 771 # If the description has changed update it. 772 if description_changed: 773 gerrit.UpdateDescription( 774 change.FullDescriptionText(), change.issue) 775 776 return results 777 778 779def CheckChangeOnCommit(input_api, output_api): 780 """Presubmit checks for the change on commit.""" 781 results = [] 782 results.extend(_CommonChecks(input_api, output_api)) 783 # Checks for the presence of 'DO NOT''SUBMIT' in CL description and in 784 # content of files. 785 results.extend( 786 input_api.canned_checks.CheckDoNotSubmit(input_api, output_api)) 787 return results 788