1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3# Copyright 2020 The Chromium OS Authors. All rights reserved. 4# Use of this source code is governed by a BSD-style license that can be 5# found in the LICENSE file. 6 7import argparse 8import logging as log 9import os 10import re 11import shlex 12import shutil 13import subprocess 14import multiprocessing 15import sys 16import time 17import uuid 18import json 19import functools 20import glob 21 22from google.cloud import storage 23from google.api_core import exceptions as cloud_exceptions 24# pylint: disable=no-name-in-module, import-error 25 26import common 27from autotest_lib.client.common_lib import global_config 28from autotest_lib.client.common_lib import mail, pidfile 29from autotest_lib.tko.parse import parse_one, export_tko_job_to_file 30 31# Appends the moblab source paths for the pubsub wrapper 32sys.path.append('/mnt/host/source/src/platform/moblab/src') 33from moblab_common import pubsub_client 34 35STATUS_FILE = "status" 36STATUS_LOG_FILE = "status.log" 37KEYVAL_FILE = "keyval" 38NEW_KEYVAL_FILE = "new_keyval" 39UPLOADED_STATUS_FILE = ".uploader_status" 40STATUS_GOOD = "PUBSUB_SENT" 41FAKE_MOBLAB_ID_FILE = "fake_moblab_id_do_not_delete.txt" 42GIT_HASH_FILE = "git_hash.txt" 43GIT_COMMAND = ("git log --pretty=format:'%h -%d %s (%ci) <%an>'" 44 " --abbrev-commit -20") 45AUTOTEST_DIR = "/mnt/host/source/src/third_party/autotest/files/" 46DEFAULT_SUITE_NAME = "default_suite" 47SUITE_NAME_REGEX = "Fetching suite for suite named (.+?)\.\.\." 48DEBUG_FILE_PATH = "debug/test_that.DEBUG" 49CONFIG_DIR = os.path.dirname(os.path.abspath(__file__)) + "/config/" 50DEFAULT_BOTO_CONFIG = CONFIG_DIR + ".boto_upload_utils" 51UPLOAD_CONFIG = CONFIG_DIR + "upload_config.json" 52SERVICE_ACCOUNT_CONFIG = CONFIG_DIR + ".service_account.json" 53 54logging = log.getLogger(__name__) 55 56 57def parse_arguments(argv): 58 """Creates the argument parser. 59 60 Args: 61 argv: A list of input arguments. 62 63 Returns: 64 A parser object for input arguments. 65 """ 66 parser = argparse.ArgumentParser(description=__doc__) 67 subparsers = parser.add_subparsers( 68 help='select sub option for test result utility', 69 dest='subcommand') 70 subparsers.required = True 71 parser.add_argument("-v", 72 "--verbose", 73 dest='verbose', 74 action='store_true', 75 help="Enable verbose (debug) logging.") 76 parser.add_argument("-q", 77 "--quiet", 78 dest='quiet', 79 action='store_true', 80 help="Quiet mode for background call") 81 def_logfile = "/tmp/" + os.path.basename( 82 sys.argv[0].split(".")[0]) + ".log" 83 parser.add_argument("-l", 84 "--logfile", 85 type=str, 86 required=False, 87 default=def_logfile, 88 help="Full path to logfile. Default: " + def_logfile) 89 90 # configuration subcommand to create config file and populate environment 91 config_parser = subparsers.add_parser(name="config", 92 help='upload test results to CPCon') 93 config_parser.add_argument( 94 "-b", 95 "--bucket", 96 type=str, 97 required=True, 98 help="The GCS bucket that test results are uploaded to, e.g." 99 "'gs://xxxx'.") 100 config_parser.add_argument("-f", 101 "--force", 102 dest='force', 103 action="store_true", 104 help="Force overwrite of previous config files") 105 106 upload_parser = subparsers.add_parser(name="upload", 107 help='upload test results to CPCon') 108 upload_parser.add_argument( 109 "--bug", 110 type=_valid_bug_id, 111 required=False, 112 help= 113 "Write bug id to the test results. Each test entry can only have " 114 "at most 1 bug id. Optional.") 115 upload_parser.add_argument( 116 "-d", 117 "--directory", 118 type=str, 119 required=True, 120 help="The directory of non-Moblab test results.") 121 upload_parser.add_argument( 122 "--parse_only", 123 action='store_true', 124 help="Generate job.serialize locally but do not upload test " 125 "directories and not send pubsub messages.") 126 upload_parser.add_argument( 127 "--upload_only", 128 action='store_true', 129 help="Leave existing protobuf files as-is, only upload " 130 "directories and send pubsub messages.") 131 upload_parser.add_argument( 132 "-f", 133 "--force", 134 dest='force', 135 action='store_true', 136 help= 137 "force re-upload of results even if results were already successfully uploaded." 138 ) 139 upload_parser.add_argument( 140 "-s", 141 "--suite", 142 type=str, 143 default=None, 144 help="The suite is used to identify the type of test results," 145 "e.g. 'power' for platform power team. If not specific, the " 146 "default value is 'default_suite'.") 147 return parser.parse_args(argv) 148 149 150def _confirm_option(question): 151 """ 152 Get a yes/no answer from the user via command line. 153 154 Args: 155 question: string, question to ask the user. 156 157 Returns: 158 A boolean. True if yes; False if no. 159 """ 160 expected_answers = ['y', 'yes', 'n', 'no'] 161 answer = '' 162 while answer not in expected_answers: 163 answer = input(question + "(y/n): ").lower().strip() 164 return answer[0] == "y" 165 166 167def _read_until_string(pipe, stop_string): 168 lines = [""] 169 while True: 170 c = pipe.read(1) 171 lines[-1] = lines[-1] + c.decode("utf-8") 172 if stop_string == lines[-1]: 173 return lines 174 if c.decode("utf-8") == "\n": 175 lines.append("") 176 177 178def _configure_environment(parsed_args): 179 # create config directory if not exists 180 os.makedirs(CONFIG_DIR, exist_ok=True) 181 182 if os.path.exists(UPLOAD_CONFIG) and not parsed_args.force: 183 logging.error("Environment already configured, run with --force") 184 exit(1) 185 186 # call the gsutil config tool to set up accounts 187 if os.path.exists(DEFAULT_BOTO_CONFIG + ".bak"): 188 os.remove(DEFAULT_BOTO_CONFIG + ".bak") 189 190 if os.path.exists(DEFAULT_BOTO_CONFIG): 191 os.remove(DEFAULT_BOTO_CONFIG) 192 os.mknod(DEFAULT_BOTO_CONFIG) 193 os.environ["BOTO_CONFIG"] = DEFAULT_BOTO_CONFIG 194 os.environ[ 195 "GOOGLE_APPLICATION_CREDENTIALS"] = CONFIG_DIR + ".service_account.json" 196 with subprocess.Popen(["gsutil", "config"], 197 stdout=subprocess.PIPE, 198 stderr=subprocess.PIPE, 199 stdin=subprocess.PIPE) as sp: 200 lines = _read_until_string(sp.stdout, "Enter the authorization code: ") 201 code = input("enter auth code from " + str(lines[1]) + ": ") 202 sp.stdin.write(bytes(code + '\n', "utf-8")) 203 sp.stdin.flush() 204 lines = _read_until_string(sp.stdout, "What is your project-id? ") 205 sp.stdin.write(bytes(parsed_args.bucket + '\n', "utf-8")) 206 sp.stdin.flush() 207 208 subprocess.run([ 209 "gsutil", "cp", 210 "gs://" + parsed_args.bucket + "/.service_account.json", CONFIG_DIR 211 ]) 212 subprocess.run([ 213 "gsutil", "cp", 214 "gs://" + parsed_args.bucket + "/pubsub-key-do-not-delete.json", 215 CONFIG_DIR 216 ]) 217 218 sa_filename = "" 219 if os.path.exists(CONFIG_DIR + "/.service_account.json"): 220 sa_filename = ".service_account.json" 221 elif os.path.exists(CONFIG_DIR + "/pubsub-key-do-not-delete.json"): 222 sa_filename = "pubsub-key-do-not-delete.json" 223 else: 224 logging.error("No pubsub key found in bucket, failed config!") 225 exit(1) 226 227 # deposit parsed_args.bucket to the json file 228 with open(UPLOAD_CONFIG, "w") as cf: 229 settings = {} 230 settings["bucket"] = parsed_args.bucket 231 settings["service_account"] = CONFIG_DIR + sa_filename 232 settings["boto_key"] = DEFAULT_BOTO_CONFIG 233 234 cf.write(json.dumps(settings)) 235 236 237def _load_config(): 238 mandatory_keys = ["bucket", "service_account", "boto_key"] 239 240 if not os.path.exists(UPLOAD_CONFIG): 241 logging.error("Missing mandatory config file, run config command") 242 exit(1) 243 with open(UPLOAD_CONFIG, "r") as cf: 244 settings = json.load(cf) 245 246 for key in mandatory_keys: 247 if key not in settings: 248 logging.error("Missing mandatory setting " + str(key) + 249 ", run config command") 250 exit() 251 252 os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = settings["service_account"] 253 os.environ["BOTO_CONFIG"] = settings["boto_key"] 254 return argparse.Namespace(**settings) 255 256 257class ResultsManager: 258 def __init__(self, results_parser, results_sender): 259 self.parent_directories = [] 260 self.result_directories = set() 261 self.results = [] 262 self.results_parser = results_parser 263 self.results_sender = results_sender 264 self.bug_id = None 265 self.suite_name = "" 266 267 self.moblab_id = self.get_fake_moblab_id() 268 269 def new_directory(self, parent_dir: str): 270 self.parent_directories.append(parent_dir) 271 272 def enumerate_all_directories(self): 273 self.result_directories = set() 274 for parent_dir in self.parent_directories: 275 self.enumerate_result_directories(parent_dir) 276 277 def enumerate_result_directories(self, parent_dir): 278 """ Gets all test directories. 279 280 Args: 281 parent_dir: The parent directory of one or multiple test directories 282 283 Creates a local_result for all directories with a status.log file 284 and appends to local_results 285 """ 286 if not os.path.exists(parent_dir) or not os.path.isdir(parent_dir): 287 logging.warning('Test directory does not exist: %s' % parent_dir) 288 return 289 290 status_log_file = os.path.join(parent_dir, STATUS_LOG_FILE) 291 if os.path.exists(status_log_file): 292 self.result_directories.add(parent_dir) 293 return 294 295 for dir_name in os.listdir(parent_dir): 296 subdir = os.path.join(parent_dir, dir_name) 297 if os.path.isdir(subdir): 298 self.enumerate_result_directories(subdir) 299 300 def set_destination(self, destination): 301 self.results_sender.set_destination(destination) 302 303 def get_fake_moblab_id(self): 304 """Get or generate a fake moblab id. 305 306 Moblab id is the unique id to a moblab device. Since the upload script runs 307 from the chroot instead of a moblab device, we need to generate a fake 308 moblab id to comply with the CPCon backend. If there is a previously saved 309 fake moblab id, read and use it. Otherwise, generate a uuid to fake a moblab 310 device, and store it in the same directory as the upload script. 311 312 Returns: 313 A string representing a fake moblab id. 314 """ 315 script_dir = os.path.dirname(__file__) 316 fake_moblab_id_path = os.path.join(script_dir, "config", 317 FAKE_MOBLAB_ID_FILE) 318 319 # Migrate from prior moblab ID location into config directory if possible 320 old_moblab_id_file = os.path.join(script_dir, FAKE_MOBLAB_ID_FILE) 321 if os.path.exists(old_moblab_id_file): 322 logging.info( 323 'Found an existing moblab ID outside config directory, migrating now' 324 ) 325 os.rename(old_moblab_id_file, fake_moblab_id_path) 326 try: 327 with open(fake_moblab_id_path, "r") as fake_moblab_id_file: 328 fake_moblab_id = str(fake_moblab_id_file.read())[0:32] 329 if fake_moblab_id: 330 return fake_moblab_id 331 except IOError as e: 332 logging.info( 333 'Cannot find a fake moblab id at %s, creating a new one.', 334 fake_moblab_id_path) 335 fake_moblab_id = uuid.uuid4().hex 336 try: 337 with open(fake_moblab_id_path, "w") as fake_moblab_id_file: 338 fake_moblab_id_file.write(fake_moblab_id) 339 except IOError as e: 340 logging.warning('Unable to write the fake moblab id to %s: %s', 341 fake_moblab_id_path, e) 342 return fake_moblab_id 343 344 def overwrite_suite_name(self, suite_name): 345 self.suite_name = suite_name 346 347 def annotate_results_with_bugid(self, bug_id): 348 self.bug_id = bug_id 349 350 def parse_all_results(self, upload_only: bool = False): 351 self.results = [] 352 self.enumerate_all_directories() 353 354 for result_dir in self.result_directories: 355 if self.bug_id is not None: 356 self.results_parser.write_bug_id(result_dir, self.bug_id) 357 self.results.append( 358 (result_dir, 359 self.results_parser.parse(result_dir, 360 upload_only, 361 suite_name=self.suite_name))) 362 363 def upload_all_results(self, force): 364 for result in self.results: 365 self.results_sender.upload_result_and_notify( 366 result[0], self.moblab_id, result[1], force) 367 368 369class FakeTkoDb: 370 def find_job(self, tag): 371 return None 372 373 def run_with_retry(self, fn, *args): 374 fn(*args) 375 376 377class ResultsParserClass: 378 def __init__(self): 379 pass 380 381 def job_tag(self, job_id, machine): 382 return str(job_id) + "-moblab/" + str(machine) 383 384 def parse(self, path, upload_only: bool, suite_name=""): 385 #temporarily assign a fake job id until parsed 386 fake_job_id = 1234 387 fake_machine = "localhost" 388 name = self.job_tag(fake_job_id, fake_machine) 389 parse_options = argparse.Namespace( 390 **{ 391 "suite_report": False, 392 "dry_run": True, 393 "reparse": False, 394 "mail_on_failure": False 395 }) 396 pid_file_manager = pidfile.PidFileManager("parser", path) 397 self.print_autotest_git_history(path) 398 job = parse_one(FakeTkoDb(), pid_file_manager, name, path, 399 parse_options) 400 job.board = job.tests[0].attributes['host-board'] 401 job_id = int(job.started_time.timestamp() * 1000) 402 job.afe_parent_job_id = job_id + 1 403 if suite_name == "": 404 job.suite = self.parse_suite_name(path) 405 else: 406 job.suite = suite_name 407 job.build_version = self.get_build_version(job.tests) 408 name = self.job_tag(job_id, job.machine) 409 if not upload_only: 410 export_tko_job_to_file(job, name, path + "/job.serialize") 411 412 # autotest_lib appends additional global logger handlers 413 # remove these handlers to avoid affecting logging for the google 414 # storage library 415 for handler in log.getLogger().handlers: 416 log.getLogger().removeHandler(handler) 417 return job 418 419 def print_autotest_git_history(self, path): 420 """ 421 Print the hash of the latest git commit of the autotest directory. 422 423 Args: 424 path: The test directory for non-moblab test results. 425 """ 426 git_hash = subprocess.check_output(shlex.split(GIT_COMMAND), 427 cwd=AUTOTEST_DIR) 428 git_hash_path = os.path.join(path, GIT_HASH_FILE) 429 with open(git_hash_path, "w") as git_hash_file: 430 git_hash_file.write(git_hash.decode("utf-8")) 431 432 def parse_suite_name(self, path): 433 """Get the suite name from a results directory. 434 435 If we don't find the suite name in the first ten lines of test_that.DEBUG 436 then return None. 437 438 Args: 439 path: The directory specified on the command line. 440 """ 441 path = path.split('/')[:-1] 442 path = '/'.join(path) 443 444 debug_file = os.path.join(path, DEBUG_FILE_PATH) 445 if not os.path.exists(debug_file) or not os.path.isfile(debug_file): 446 return None 447 exp = re.compile(SUITE_NAME_REGEX) 448 try: 449 with open(debug_file) as f: 450 line_count = 0 451 for line in f: 452 line_count += 1 453 if line_count > 10: 454 break 455 result = exp.search(line) 456 if not result: 457 continue 458 else: 459 return result.group(1) 460 except IOError as e: 461 logging.warning('Error trying to read test_that.DEBUG: %s', e) 462 return DEFAULT_SUITE_NAME 463 464 def get_build_version(self, tests): 465 release_version_label = "CHROMEOS_RELEASE_VERSION" 466 milestone_label = "CHROMEOS_RELEASE_CHROME_MILESTONE" 467 for test in tests: 468 if not test.subdir: 469 continue 470 471 release = None 472 milestone = None 473 if release_version_label in test.attributes: 474 release = test.attributes[release_version_label] 475 if milestone_label in test.attributes: 476 milestone = test.attributes[milestone_label] 477 if release and milestone: 478 return "R%s-%s" % (milestone, release) 479 480 return "" 481 482 def valid_bug_id(self, v): 483 """Check if user input bug id is in valid format. 484 485 Args: 486 v: User input bug id in string. 487 Returns: 488 An int representing the bug id. 489 Raises: 490 argparse.ArgumentTypeError: if user input bug id has wrong format. 491 """ 492 try: 493 bug_id = int(v) 494 except ValueError as e: 495 raise argparse.ArgumentTypeError( 496 "Bug id %s is not a positive integer: " 497 "%s" % (v, e)) 498 if bug_id <= 0: 499 raise argparse.ArgumentTypeError( 500 "Bug id %s is not a positive integer" % v) 501 return bug_id 502 503 def write_bug_id(self, test_dir, bug_id): 504 """ 505 Write the bug id to the test results. 506 507 Args: 508 test_dir: The test directory for non-moblab test results. 509 bug_id: The bug id to write to the test results. 510 Returns: 511 A boolean. True if the bug id is written successfully or is the same as 512 the old bug id already in test results; False if failed to write the 513 bug id, or if the user decides not to overwrite the old bug id already 514 in test results. 515 """ 516 old_bug_id = None 517 new_keyval = list() 518 519 keyval_file = os.path.join(test_dir, KEYVAL_FILE) 520 try: 521 with open(keyval_file, 'r') as keyval_raw: 522 for line in keyval_raw.readlines(): 523 match = re.match(r'bug_id=(\d+)', line) 524 if match: 525 old_bug_id = self.valid_bug_id(match.group(1)) 526 else: 527 new_keyval.append(line) 528 except IOError as e: 529 logging.error( 530 'Cannot read keyval file from %s, skip writing the bug ' 531 'id %s: %s', test_dir, bug_id, e) 532 return False 533 534 if old_bug_id: 535 if old_bug_id == bug_id: 536 return True 537 overwrite_bug_id = _confirm_option( 538 'Would you like to overwrite bug id ' 539 '%s with new bug id %s?' % (old_bug_id, bug_id)) 540 if not overwrite_bug_id: 541 return False 542 543 new_keyval.append('bug_id=%s' % bug_id) 544 new_keyval_file = os.path.join(test_dir, NEW_KEYVAL_FILE) 545 try: 546 with open(new_keyval_file, 'w') as new_keyval_raw: 547 for line in new_keyval: 548 new_keyval_raw.write(line) 549 new_keyval_raw.write('\n') 550 shutil.move(new_keyval_file, keyval_file) 551 return True 552 except Exception as e: 553 logging.error( 554 'Cannot write bug id to keyval file in %s, skip writing ' 555 'the bug id %s: %s', test_dir, bug_id, e) 556 return False 557 558 559ResultsParser = ResultsParserClass() 560_valid_bug_id = functools.partial(ResultsParserClass.valid_bug_id, 561 ResultsParser) 562 563 564class ResultsSenderClass: 565 def __init__(self): 566 self.gcs_bucket = "" 567 568 def set_destination(self, destination): 569 self.gcs_bucket = destination 570 571 def upload_result_and_notify(self, test_dir, moblab_id, job, force): 572 job_id = str(int(job.started_time.timestamp() * 1000)) 573 if self.uploaded(test_dir) and not force: 574 return 575 self.upload_result(test_dir, moblab_id, job_id, job.machine) 576 self.send_pubsub_message(test_dir, moblab_id, job_id) 577 578 def upload_batch_files(self, gs_path, test_dir, files): 579 for file in files: 580 if not os.path.isfile(file): 581 continue 582 gs_client_bucket = storage.Client().bucket(self.gcs_bucket) 583 # remove trailing slash to ensure dest_file path gets created properly 584 test_dir = test_dir.rstrip('/') 585 dest_file = gs_path + file.replace(test_dir, "", 1) 586 logging.info("uploading file: %s", dest_file) 587 blob = gs_client_bucket.blob(dest_file) 588 blob.upload_from_filename(file) 589 590 def upload_result(self, test_dir, moblab_id, job_id, hostname): 591 """ 592 Upload the test directory with job.serialize to GCS bucket. 593 594 Args: 595 args: A list of input arguments. 596 test_dir: The test directory for non-moblab test results. 597 job_keyval: The key-value object of the job. 598 moblab_id: A string that represents the unique id of a moblab device. 599 job_id: A job id. 600 """ 601 upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE) 602 with open(upload_status_file, "w") as upload_status: 603 upload_status.write("UPLOADING") 604 605 fake_moblab_id = moblab_id 606 fake_moblab_install_id = moblab_id 607 608 gcs_bucket_path = os.path.join("results", fake_moblab_id, 609 fake_moblab_install_id, 610 "%s-moblab" % job_id, hostname) 611 612 try: 613 logging.info( 614 "Start to upload test directory: %s to GCS bucket path: %s", 615 test_dir, gcs_bucket_path) 616 with open(upload_status_file, "w") as upload_status: 617 upload_status.write("UPLOADED") 618 619 files_to_upload = glob.glob(test_dir + "/**", recursive=True) 620 batch_size = 8 621 with multiprocessing.Pool(4) as p: 622 files_to_upload_batch = [ 623 files_to_upload[i:i + batch_size] 624 for i in range(0, len(files_to_upload), batch_size) 625 ] 626 p.map( 627 functools.partial( 628 ResultsSenderClass.upload_batch_files, self, 629 gcs_bucket_path, test_dir), 630 files_to_upload_batch) 631 632 logging.info( 633 "Successfully uploaded test directory: %s to GCS bucket path: %s", 634 test_dir, gcs_bucket_path) 635 except Exception as e: 636 with open(upload_status_file, "w") as upload_status: 637 upload_status.write("UPLOAD_FAILED") 638 raise Exception( 639 "Failed to upload test directory: %s to GCS bucket " 640 "path: %s for the error: %s" % 641 (test_dir, gcs_bucket_path, e)) 642 643 def send_pubsub_message(self, test_dir, moblab_id, job_id): 644 """ 645 Send pubsub messages to trigger CPCon pipeline to process non-moblab 646 test results in the specific GCS bucket path. 647 648 Args: 649 bucket: The GCS bucket. 650 moblab_id: A moblab id. 651 job_id: A job id. 652 """ 653 moblab_install_id = moblab_id 654 console_client = pubsub_client.PubSubBasedClient() 655 gsuri = "gs://%s/results/%s/%s/%s-moblab" % ( 656 self.gcs_bucket, moblab_id, moblab_install_id, job_id) 657 658 try: 659 logging.info("Start to send the pubsub message to GCS path: %s", 660 gsuri) 661 message_id = \ 662 console_client.send_test_job_offloaded_message(gsuri, 663 moblab_id, 664 moblab_install_id) 665 upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE) 666 with open(upload_status_file, "w") as upload_status: 667 upload_status.write(STATUS_GOOD) 668 669 logging.info( 670 "Successfully sent the pubsub message with message id: %s to GCS " 671 "path: %s", message_id[0], gsuri) 672 except Exception as e: 673 raise Exception( 674 "Failed to send the pubsub message with moblab id: %s " 675 "and job id: %s to GCS path: %s for the error: %s" % 676 (moblab_id, job_id, gsuri, e)) 677 678 def uploaded(self, test_dir): 679 """ 680 Checks if the message for the uploaded bucket has been sent. 681 682 Args: 683 test_dir: The test directory for non-moblab test results. 684 """ 685 upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE) 686 if not os.path.exists(upload_status_file): 687 logging.debug("The upload status file %s does not exist.", 688 upload_status_file) 689 return False 690 691 with open(upload_status_file, "r") as upload_status: 692 if upload_status.read() == STATUS_GOOD: 693 logging.warn( 694 "The test directory: %s status has already been " 695 "sent to CPCon and the .upload_status file has " 696 "been set to PUBSUB_SENT.", test_dir) 697 return True 698 else: 699 logging.debug("The pubsub message was not successful") 700 return False 701 702 703ResultsSender = ResultsSenderClass() 704 705 706def main(args): 707 parsed_args = parse_arguments(args) 708 709 fmt = log.Formatter('%(asctime)s :: %(levelname)-8s :: %(message)s') 710 logging.propagate = False 711 712 log_level = log.INFO 713 if parsed_args.verbose: 714 log_level = log.DEBUG 715 if not parsed_args.quiet: 716 stream_handler = log.StreamHandler(sys.stdout) 717 stream_handler.setFormatter(fmt) 718 stream_handler.setLevel(log_level) 719 logging.addHandler(stream_handler) 720 721 logging.info("logging to %s", parsed_args.logfile) 722 file_handler = log.FileHandler(parsed_args.logfile, mode='w') 723 file_handler.setFormatter(fmt) 724 file_handler.setLevel(log.DEBUG) 725 logging.addHandler(file_handler) 726 727 if parsed_args.subcommand == "config": 728 _configure_environment(parsed_args) 729 return 730 731 persistent_settings = _load_config() 732 733 results_manager = ResultsManager(ResultsParser, ResultsSender) 734 results_manager.set_destination(persistent_settings.bucket) 735 results_manager.new_directory(parsed_args.directory) 736 737 if parsed_args.bug: 738 results_manager.annotate_results_with_bugid(parsed_args.bug) 739 if parsed_args.suite: 740 results_manager.overwrite_suite_name(parsed_args.suite) 741 if parsed_args.parse_only: 742 results_manager.parse_all_results() 743 elif parsed_args.upload_only: 744 results_manager.parse_all_results(upload_only=True) 745 results_manager.upload_all_results(force=parsed_args.force) 746 else: 747 results_manager.parse_all_results() 748 results_manager.upload_all_results(force=parsed_args.force) 749 750 751if __name__ == "__main__": 752 try: 753 main(sys.argv[1:]) 754 except KeyboardInterrupt: 755 sys.exit(0) 756