xref: /aosp_15_r20/tools/asuite/atest/atest_execution_info.py (revision c2e18aaa1096c836b086f94603d04f4eb9cf37f5)
1# Copyright 2019, The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""ATest execution info generator."""
16
17
18from __future__ import print_function
19
20import argparse
21import glob
22import json
23import logging
24import os
25import pathlib
26import shutil
27import sys
28import time
29from typing import List
30
31from atest import atest_enum
32from atest import atest_utils
33from atest import constants
34from atest import usb_speed_detect as usb
35from atest.atest_enum import ExitCode
36from atest.logstorage import log_uploader
37from atest.metrics import metrics
38from atest.metrics import metrics_utils
39
40_ARGS_KEY = 'args'
41_STATUS_PASSED_KEY = 'PASSED'
42_STATUS_FAILED_KEY = 'FAILED'
43_STATUS_IGNORED_KEY = 'IGNORED'
44_SUMMARY_KEY = 'summary'
45_TOTAL_SUMMARY_KEY = 'total_summary'
46_TEST_RUNNER_KEY = 'test_runner'
47_TEST_NAME_KEY = 'test_name'
48_TEST_TIME_KEY = 'test_time'
49_TEST_DETAILS_KEY = 'details'
50_TEST_RESULT_NAME = 'test_result'
51_TEST_RESULT_LINK = 'test_result_link'
52_EXIT_CODE_ATTR = 'EXIT_CODE'
53_MAIN_MODULE_KEY = '__main__'
54_UUID_LEN = 30
55_RESULT_LEN = 20
56_RESULT_URL_LEN = 35
57_COMMAND_LEN = 50
58_LOGCAT_FMT = '{}/log/invocation_*/{}*device_logcat_test*'
59
60_SUMMARY_MAP_TEMPLATE = {
61    _STATUS_PASSED_KEY: 0,
62    _STATUS_FAILED_KEY: 0,
63    _STATUS_IGNORED_KEY: 0,
64}
65
66PREPARE_END_TIME = None
67
68
69def preparation_time(start_time):
70  """Return the preparation time.
71
72  Args:
73      start_time: The time.
74
75  Returns:
76      The preparation time if PREPARE_END_TIME is set, None otherwise.
77  """
78  return PREPARE_END_TIME - start_time if PREPARE_END_TIME else None
79
80
81def symlink_latest_result(test_result_dir):
82  """Make the symbolic link to latest result.
83
84  Args:
85      test_result_dir: A string of the dir path.
86  """
87  symlink = os.path.join(constants.ATEST_RESULT_ROOT, 'LATEST')
88  if os.path.exists(symlink) or os.path.islink(symlink):
89    os.remove(symlink)
90  os.symlink(test_result_dir, symlink)
91
92
93def print_test_result(root, history_arg):
94  """Make a list of latest n test result.
95
96  Args:
97      root: A string of the test result root path.
98      history_arg: A string of an integer or uuid. If it's an integer string,
99        the number of lines of test result will be given; else it will be
100        treated a uuid and print test result accordingly in detail.
101  """
102  if not history_arg.isdigit():
103    path = os.path.join(constants.ATEST_RESULT_ROOT, history_arg, 'test_result')
104    print_test_result_by_path(path)
105    return
106  target = '%s/20*_*_*' % root
107  paths = glob.glob(target)
108  paths.sort(reverse=True)
109  if has_url_results():
110    print(
111        '{:-^{uuid_len}} {:-^{result_len}} {:-^{result_url_len}}'
112        ' {:-^{command_len}}'.format(
113            'uuid',
114            'result',
115            'result_url',
116            'command',
117            uuid_len=_UUID_LEN,
118            result_len=_RESULT_LEN,
119            result_url_len=_RESULT_URL_LEN,
120            command_len=_COMMAND_LEN,
121        )
122    )
123  else:
124    print(
125        '{:-^{uuid_len}} {:-^{result_len}} {:-^{command_len}}'.format(
126            'uuid',
127            'result',
128            'command',
129            uuid_len=_UUID_LEN,
130            result_len=_RESULT_LEN,
131            command_len=_COMMAND_LEN,
132        )
133    )
134  for path in paths[0 : int(history_arg) + 1]:
135    result_path = os.path.join(path, 'test_result')
136    result = atest_utils.load_json_safely(result_path)
137    total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
138    summary_str = ', '.join(
139        [k[:1] + ':' + str(v) for k, v in total_summary.items()]
140    )
141    test_result_url = result.get(_TEST_RESULT_LINK, '')
142    if has_url_results():
143      print(
144          '{:<{uuid_len}} {:<{result_len}} '
145          '{:<{result_url_len}} atest {:<{command_len}}'.format(
146              os.path.basename(path),
147              summary_str,
148              test_result_url,
149              result.get(_ARGS_KEY, ''),
150              uuid_len=_UUID_LEN,
151              result_len=_RESULT_LEN,
152              result_url_len=_RESULT_URL_LEN,
153              command_len=_COMMAND_LEN,
154          )
155      )
156    else:
157      print(
158          '{:<{uuid_len}} {:<{result_len}} atest {:<{command_len}}'.format(
159              os.path.basename(path),
160              summary_str,
161              result.get(_ARGS_KEY, ''),
162              uuid_len=_UUID_LEN,
163              result_len=_RESULT_LEN,
164              command_len=_COMMAND_LEN,
165          )
166      )
167
168
169def print_test_result_by_path(path):
170  """Print latest test result.
171
172  Args:
173      path: A string of test result path.
174  """
175  result = atest_utils.load_json_safely(path)
176  if not result:
177    return
178  print('\natest {}'.format(result.get(_ARGS_KEY, '')))
179  test_result_url = result.get(_TEST_RESULT_LINK, '')
180  if test_result_url:
181    print('\nTest Result Link: {}'.format(test_result_url))
182  print('\nTotal Summary:\n{}'.format(atest_utils.delimiter('-')))
183  total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
184  print(', '.join([(k + ':' + str(v)) for k, v in total_summary.items()]))
185  fail_num = total_summary.get(_STATUS_FAILED_KEY)
186  if fail_num > 0:
187    message = '%d test failed' % fail_num
188    print(f'\n{atest_utils.mark_red(message)}\n{"-" * len(message)}')
189    test_runner = result.get(_TEST_RUNNER_KEY, {})
190    for runner_name in test_runner.keys():
191      test_dict = test_runner.get(runner_name, {})
192      for test_name in test_dict:
193        test_details = test_dict.get(test_name, {})
194        for fail in test_details.get(_STATUS_FAILED_KEY):
195          print(atest_utils.mark_red(f'{fail.get(_TEST_NAME_KEY)}'))
196          failure_files = glob.glob(
197              _LOGCAT_FMT.format(
198                  os.path.dirname(path), fail.get(_TEST_NAME_KEY)
199              )
200          )
201          if failure_files:
202            print(
203                '{} {}'.format(
204                    atest_utils.mark_cyan('LOGCAT-ON-FAILURES:'),
205                    failure_files[0],
206                )
207            )
208          print(
209              '{} {}'.format(
210                  atest_utils.mark_cyan('STACKTRACE:\n'),
211                  fail.get(_TEST_DETAILS_KEY),
212              )
213          )
214
215
216def has_non_test_options(args: argparse.ArgumentParser):
217  """check whether non-test option in the args.
218
219  Args:
220      args: An argparse.ArgumentParser class instance holding parsed args.
221
222  Returns:
223      True, if args has at least one non-test option.
224      False, otherwise.
225  """
226  return (
227      args.collect_tests_only
228      or args.dry_run
229      or args.history
230      or args.version
231      or args.latest_result
232      or args.history
233  )
234
235
236def has_url_results():
237  """Get if contains url info."""
238  for root, _, files in os.walk(constants.ATEST_RESULT_ROOT):
239    for file in files:
240      if file != 'test_result':
241        continue
242      json_file = os.path.join(root, 'test_result')
243      result = atest_utils.load_json_safely(json_file)
244      url_link = result.get(_TEST_RESULT_LINK, '')
245      if url_link:
246        return True
247  return False
248
249
250class AtestExecutionInfo:
251  """Class that stores the whole test progress information in JSON format.
252
253  ----
254  For example, running command
255      atest hello_world_test HelloWorldTest
256
257  will result in storing the execution detail in JSON:
258  {
259    "args": "hello_world_test HelloWorldTest",
260    "test_runner": {
261        "AtestTradefedTestRunner": {
262            "hello_world_test": {
263                "FAILED": [
264                    {"test_time": "(5ms)",
265                     "details": "Hello, Wor...",
266                     "test_name": "HelloWorldTest#PrintHelloWorld"}
267                    ],
268                "summary": {"FAILED": 1, "PASSED": 0, "IGNORED": 0}
269            },
270            "HelloWorldTests": {
271                "PASSED": [
272                    {"test_time": "(27ms)",
273                     "details": null,
274                     "test_name": "...HelloWorldTest#testHalloWelt"},
275                    {"test_time": "(1ms)",
276                     "details": null,
277                     "test_name": "....HelloWorldTest#testHelloWorld"}
278                    ],
279                "summary": {"FAILED": 0, "PASSED": 2, "IGNORED": 0}
280            }
281        }
282    },
283    "total_summary": {"FAILED": 1, "PASSED": 2, "IGNORED": 0}
284  }
285  """
286
287  result_reporters = []
288
289  def __init__(
290      self,
291      args: List[str],
292      work_dir: str,
293      args_ns: argparse.ArgumentParser,
294      start_time: float = None,
295      repo_out_dir: pathlib.Path = None,
296  ):
297    """Initialise an AtestExecutionInfo instance.
298
299    Args:
300        args: Command line parameters.
301        work_dir: The directory for saving information.
302        args_ns: An argparse.ArgumentParser class instance holding parsed args.
303        start_time: The execution start time. Can be None.
304        repo_out_dir: The repo output directory. Can be None.
305
306    Returns:
307           A json format string.
308    """
309    self.args = args
310    self.work_dir = work_dir
311    self.result_file_obj = None
312    self.args_ns = args_ns
313    self.test_result = os.path.join(self.work_dir, _TEST_RESULT_NAME)
314    self._proc_usb_speed = None
315    logging.debug(
316        'A %s object is created with args %s, work_dir %s',
317        __class__,
318        args,
319        work_dir,
320    )
321    self._start_time = start_time if start_time is not None else time.time()
322    self._repo_out_dir = (
323        repo_out_dir
324        if repo_out_dir is not None
325        else atest_utils.get_build_out_dir()
326    )
327
328  def __enter__(self):
329    """Create and return information file object."""
330    try:
331      self.result_file_obj = open(self.test_result, 'w')
332    except IOError:
333      atest_utils.print_and_log_error('Cannot open file %s', self.test_result)
334
335    self._proc_usb_speed = atest_utils.run_multi_proc(
336        func=self._send_usb_metrics_and_warning
337    )
338
339    return self.result_file_obj
340
341  def __exit__(self, exit_type, value, traceback):
342    """Write execution information and close information file."""
343
344    if self._proc_usb_speed:
345      # Usb speed detection is not an obligatory function of atest,
346      # so it can be skipped if the process hasn't finished by the time atest
347      # is ready to exit.
348      if self._proc_usb_speed.is_alive():
349        self._proc_usb_speed.terminate()
350
351    log_path = pathlib.Path(self.work_dir)
352
353    build_log_path = log_path / 'build_logs'
354    build_log_path.mkdir()
355    AtestExecutionInfo._copy_build_artifacts_to_log_dir(
356        self._start_time,
357        time.time(),
358        self._repo_out_dir,
359        build_log_path,
360        'build.trace',
361    )
362    AtestExecutionInfo._copy_build_artifacts_to_log_dir(
363        self._start_time,
364        time.time(),
365        self._repo_out_dir,
366        build_log_path,
367        'verbose.log',
368    )
369
370    html_path = None
371
372    if self.result_file_obj and not has_non_test_options(self.args_ns):
373      self.result_file_obj.write(
374          AtestExecutionInfo._generate_execution_detail(self.args)
375      )
376      self.result_file_obj.close()
377      atest_utils.prompt_suggestions(self.test_result)
378      html_path = atest_utils.generate_result_html(self.test_result)
379      symlink_latest_result(self.work_dir)
380    main_module = sys.modules.get(_MAIN_MODULE_KEY)
381    main_exit_code = (
382        value.code
383        if isinstance(value, SystemExit)
384        else (getattr(main_module, _EXIT_CODE_ATTR, ExitCode.ERROR))
385    )
386
387    print()
388    if log_path:
389      print(f'Test logs: {log_path / "log"}')
390    log_link = html_path if html_path else log_path
391    if log_link:
392      print(atest_utils.mark_magenta(f'Log file list: file://{log_link}'))
393    bug_report_url = AtestExecutionInfo._create_bug_report_url()
394    if bug_report_url:
395      print(atest_utils.mark_magenta(f"Bug report: {bug_report_url}"))
396    print()
397
398    # Do not send stacktrace with send_exit_event when exit code is not
399    # ERROR.
400    if main_exit_code != ExitCode.ERROR:
401      logging.debug('send_exit_event:%s', main_exit_code)
402      metrics_utils.send_exit_event(main_exit_code)
403    else:
404      logging.debug('handle_exc_and_send_exit_event:%s', main_exit_code)
405      metrics_utils.handle_exc_and_send_exit_event(main_exit_code)
406
407    if log_uploader.is_uploading_logs():
408      log_uploader.upload_logs_detached(log_path)
409
410  def _send_usb_metrics_and_warning(self):
411    # Read the USB speed and send usb metrics.
412    device_ids = usb.get_adb_device_identifiers()
413    if not device_ids:
414      return
415
416    usb_speed_dir_name = usb.get_udc_driver_usb_device_dir_name()
417    if not usb_speed_dir_name:
418      return
419
420    usb_negotiated_speed = usb.get_udc_driver_usb_device_attribute_speed_value(
421        usb_speed_dir_name, usb.UsbAttributeName.NEGOTIATED_SPEED
422    )
423    usb_max_speed = usb.get_udc_driver_usb_device_attribute_speed_value(
424        usb_speed_dir_name, usb.UsbAttributeName.MAXIMUM_SPEED
425    )
426    usb.verify_and_print_usb_speed_warning(
427        device_ids, usb_negotiated_speed, usb_max_speed
428    )
429
430    metrics.LocalDetectEvent(
431        detect_type=atest_enum.DetectType.USB_NEGOTIATED_SPEED,
432        result=usb_negotiated_speed,
433    )
434    metrics.LocalDetectEvent(
435        detect_type=atest_enum.DetectType.USB_MAX_SPEED,
436        result=usb_max_speed,
437    )
438
439  @staticmethod
440  def _create_bug_report_url() -> str:
441    if not metrics.is_internal_user():
442      return ''
443    if not log_uploader.is_uploading_logs():
444      return 'http://go/new-atest-issue'
445    return f'http://go/from-atest-runid/{metrics.get_run_id()}'
446
447  @staticmethod
448  def _copy_build_artifacts_to_log_dir(
449      start_time: float,
450      end_time: float,
451      repo_out_path: pathlib.Path,
452      log_path: pathlib.Path,
453      file_name_prefix: str,
454  ):
455    """Copy build trace files to log directory.
456
457    Params:
458      start_time: The start time of the build.
459      end_time: The end time of the build.
460      repo_out_path: The path to the repo out directory.
461      log_path: The path to the log directory.
462      file_name_prefix: The prefix of the file name.
463    """
464    for file in repo_out_path.iterdir():
465      if (
466          file.is_file()
467          and file.name.startswith(file_name_prefix)
468          and start_time <= file.stat().st_mtime <= end_time
469      ):
470        shutil.copy(file, log_path / file.name)
471
472  @staticmethod
473  def _generate_execution_detail(args):
474    """Generate execution detail.
475
476    Args:
477        args: Command line parameters that you want to save.
478
479    Returns:
480        A json format string.
481    """
482    info_dict = {_ARGS_KEY: ' '.join(args)}
483    try:
484      AtestExecutionInfo._arrange_test_result(
485          info_dict, AtestExecutionInfo.result_reporters
486      )
487      return json.dumps(info_dict)
488    except ValueError as err:
489      atest_utils.print_and_log_warning(
490          'Parsing test result failed due to : %s', err
491      )
492    return {}
493
494  @staticmethod
495  def _arrange_test_result(info_dict, reporters):
496    """Append test result information in given dict.
497
498    Arrange test information to below
499    "test_runner": {
500        "test runner name": {
501            "test name": {
502                "FAILED": [
503                    {"test time": "",
504                     "details": "",
505                     "test name": ""}
506                ],
507            "summary": {"FAILED": 0, "PASSED": 0, "IGNORED": 0}
508            },
509        },
510    "total_summary": {"FAILED": 0, "PASSED": 0, "IGNORED": 0}
511
512    Args:
513        info_dict: A dict you want to add result information in.
514        reporters: A list of result_reporter.
515
516    Returns:
517        A dict contains test result information data.
518    """
519    info_dict[_TEST_RUNNER_KEY] = {}
520    for reporter in reporters:
521      if reporter.test_result_link:
522        info_dict[_TEST_RESULT_LINK] = reporter.test_result_link
523      for test in reporter.all_test_results:
524        runner = info_dict[_TEST_RUNNER_KEY].setdefault(test.runner_name, {})
525        group = runner.setdefault(test.group_name, {})
526        result_dict = {
527            _TEST_NAME_KEY: test.test_name,
528            _TEST_TIME_KEY: test.test_time,
529            _TEST_DETAILS_KEY: test.details,
530        }
531        group.setdefault(test.status, []).append(result_dict)
532
533    total_test_group_summary = _SUMMARY_MAP_TEMPLATE.copy()
534    for runner in info_dict[_TEST_RUNNER_KEY]:
535      for group in info_dict[_TEST_RUNNER_KEY][runner]:
536        group_summary = _SUMMARY_MAP_TEMPLATE.copy()
537        for status in info_dict[_TEST_RUNNER_KEY][runner][group]:
538          count = len(info_dict[_TEST_RUNNER_KEY][runner][group][status])
539          if status in _SUMMARY_MAP_TEMPLATE:
540            group_summary[status] = count
541            total_test_group_summary[status] += count
542        info_dict[_TEST_RUNNER_KEY][runner][group][_SUMMARY_KEY] = group_summary
543    info_dict[_TOTAL_SUMMARY_KEY] = total_test_group_summary
544    return info_dict
545