1# Lint as: python2, python3 2# Copyright (c) 2011 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6import logging 7import os 8import stat 9 10from autotest_lib.client.common_lib import log 11from autotest_lib.client.common_lib import error, utils, global_config 12from autotest_lib.client.bin import base_sysinfo, utils 13from autotest_lib.client.cros import constants 14from autotest_lib.client.cros import tpm 15 16 17get_value = global_config.global_config.get_config_value 18collect_corefiles = get_value('CLIENT', 'collect_corefiles', 19 type=bool, default=False) 20 21 22logfile = base_sysinfo.logfile 23command = base_sysinfo.command 24 25 26class logdir(base_sysinfo.loggable): 27 """Represents a log directory.""" 28 29 DEFAULT_EXCLUDES = ("**autoserv*", "**.journal",) 30 31 def __init__(self, directory, excludes=DEFAULT_EXCLUDES): 32 super(logdir, self).__init__(directory, log_in_keyval=False) 33 self.dir = directory 34 self._excludes = excludes 35 self._infer_old_attributes() 36 37 38 def __setstate__(self, state): 39 """Unpickle handler 40 41 When client tests are run without SSP, we pickle this object on the 42 server-side (using the version of the class deployed in the lab) and 43 unpickle it on the DUT (using the version of the class from the build). 44 This means that when adding a new attribute to this class, for a while 45 the server-side code does not populate that attribute. So, deal with 46 missing attributes in a valid way. 47 """ 48 self.__dict__ = state 49 if '_excludes' not in state: 50 self._excludes = self.DEFAULT_EXCLUDES 51 if self.additional_exclude: 52 self._excludes += tuple(self.additional_exclude) 53 54 55 def __repr__(self): 56 return "site_sysinfo.logdir(%r, %s)" % (self.dir, 57 self._excludes) 58 59 60 def __eq__(self, other): 61 if isinstance(other, logdir): 62 return (self.dir == other.dir and self._excludes == other._excludes) 63 elif isinstance(other, base_sysinfo.loggable): 64 return False 65 return NotImplemented 66 67 68 def __ne__(self, other): 69 result = self.__eq__(other) 70 if result is NotImplemented: 71 return result 72 return not result 73 74 75 def __hash__(self): 76 return hash(self.dir) + hash(self._excludes) 77 78 79 def run(self, log_dir): 80 """Copies this log directory to the specified directory. 81 82 @param log_dir: The destination log directory. 83 """ 84 from_dir = os.path.realpath(self.dir) 85 if os.path.exists(from_dir): 86 parent_dir = os.path.dirname(from_dir) 87 utils.system("mkdir -p %s%s" % (log_dir, parent_dir)) 88 89 excludes = [ 90 "--exclude=%s" % self._anchored_exclude_pattern(from_dir, x) 91 for x in self._excludes] 92 # Take source permissions and add ugo+r so files are accessible via 93 # archive server. 94 utils.system( 95 "rsync --no-perms --chmod=ugo+r -a --safe-links %s %s %s%s" 96 % (" ".join(excludes), from_dir, log_dir, parent_dir)) 97 98 99 def _anchored_exclude_pattern(self, from_dir, pattern): 100 return '/%s/%s' % (os.path.basename(from_dir), pattern) 101 102 103 def _infer_old_attributes(self): 104 """ Backwards compatibility attributes. 105 106 YOU MUST NEVER DROP / REINTERPRET THESE. 107 A logdir object is pickled on server-side and unpickled on 108 client-side. This means that, when running aginst client-side code 109 from an older build, we need to be able to unpickle an instance of 110 logdir pickled from a newer version of the class. 111 112 Some old attributes are not accurately handled via __setstate__, so we can't 113 drop them without breaking compatibility. 114 """ 115 additional_excludes = list(set(self._excludes) - 116 set(self.DEFAULT_EXCLUDES)) 117 if additional_excludes: 118 # Old API only allowed a single additional exclude. 119 # Best effort, keep the first one, throw the rest. 120 self.additional_exclude = additional_excludes[0] 121 else: 122 self.additional_exclude = None 123 124 125class file_stat(object): 126 """Store the file size and inode, used for retrieving new data in file.""" 127 def __init__(self, file_path): 128 """Collect the size and inode information of a file. 129 130 @param file_path: full path to the file. 131 132 """ 133 stat = os.stat(file_path) 134 # Start size of the file, skip that amount of bytes when do diff. 135 self.st_size = stat.st_size 136 # inode of the file. If inode is changed, treat this as a new file and 137 # copy the whole file. 138 self.st_ino = stat.st_ino 139 140 141class diffable_logdir(logdir): 142 """Represents a log directory that only new content will be copied. 143 144 An instance of this class should be added in both 145 before_iteration_loggables and after_iteration_loggables. This is to 146 guarantee the file status information is collected when run method is 147 called in before_iteration_loggables, and diff is executed when run 148 method is called in after_iteration_loggables. 149 150 """ 151 def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES, 152 keep_file_hierarchy=True, append_diff_in_name=True): 153 """ 154 Constructor of a diffable_logdir instance. 155 156 @param directory: directory to be diffed after an iteration finished. 157 @param excludes: path patterns to exclude for rsync. 158 @param keep_file_hierarchy: True if need to preserve full path, e.g., 159 sysinfo/var/log/sysstat, v.s. sysinfo/sysstat if it's False. 160 @param append_diff_in_name: True if you want to append '_diff' to the 161 folder name to indicate it's a diff, e.g., var/log_diff. Option 162 keep_file_hierarchy must be True for this to take effect. 163 164 """ 165 super(diffable_logdir, self).__init__(directory, excludes) 166 self.keep_file_hierarchy = keep_file_hierarchy 167 self.append_diff_in_name = append_diff_in_name 168 # Init dictionary to store all file status for files in the directory. 169 self._log_stats = {} 170 171 172 def _get_init_status_of_src_dir(self, src_dir): 173 """Get initial status of files in src_dir folder. 174 175 @param src_dir: directory to be diff-ed. 176 177 """ 178 # Dictionary used to store the initial status of files in src_dir. 179 for file_path in self._get_all_files(src_dir): 180 self._log_stats[file_path] = file_stat(file_path) 181 self.file_stats_collected = True 182 183 184 def _get_all_files(self, path): 185 """Iterate through files in given path including subdirectories. 186 187 @param path: root directory. 188 @return: an iterator that iterates through all files in given path 189 including subdirectories. 190 191 """ 192 if not os.path.exists(path): 193 yield [] 194 for root, dirs, files in os.walk(path): 195 for f in files: 196 if f.startswith('autoserv'): 197 continue 198 if f.endswith('.journal') or f.endswith('.journal~'): 199 continue 200 full_path = os.path.join(root, f) 201 # Only list regular files or symlinks to those (os.stat follows 202 # symlinks) 203 try: 204 if stat.S_ISREG(os.stat(full_path).st_mode): 205 yield full_path 206 except OSError: 207 # Semi-often a source of a symlink will get deleted, which 208 # causes a crash when `stat`d, thus breaks the the hook. 209 # Instead of quietly crashing, we will just not collect 210 # the missing of file. 211 logging.debug( 212 'File {} could not stat & will not be collected'. 213 format(full_path)) 214 continue 215 216 217 def _copy_new_data_in_file(self, file_path, src_dir, dest_dir): 218 """Copy all new data in a file to target directory. 219 220 @param file_path: full path to the file to be copied. 221 @param src_dir: source directory to do the diff. 222 @param dest_dir: target directory to store new data of src_dir. 223 224 """ 225 bytes_to_skip = 0 226 if file_path in self._log_stats: 227 prev_stat = self._log_stats[file_path] 228 new_stat = os.stat(file_path) 229 if new_stat.st_ino == prev_stat.st_ino: 230 bytes_to_skip = prev_stat.st_size 231 if new_stat.st_size == bytes_to_skip: 232 return 233 elif new_stat.st_size < prev_stat.st_size: 234 # File is modified to a smaller size, copy whole file. 235 bytes_to_skip = 0 236 try: 237 with open(file_path, 'rb') as in_log: 238 if bytes_to_skip > 0: 239 in_log.seek(bytes_to_skip) 240 # Skip src_dir in path, e.g., src_dir/[sub_dir]/file_name. 241 target_path = os.path.join(dest_dir, 242 os.path.relpath(file_path, src_dir)) 243 target_dir = os.path.dirname(target_path) 244 if not os.path.exists(target_dir): 245 os.makedirs(target_dir) 246 with open(target_path, 'wb') as out_log: 247 out_log.write(in_log.read()) 248 except IOError as e: 249 logging.error('Diff %s failed with error: %s', file_path, e) 250 251 252 def _log_diff(self, src_dir, dest_dir): 253 """Log all of the new data in src_dir to dest_dir. 254 255 @param src_dir: source directory to do the diff. 256 @param dest_dir: target directory to store new data of src_dir. 257 258 """ 259 if self.keep_file_hierarchy: 260 dir = src_dir.lstrip('/') 261 if self.append_diff_in_name: 262 dir = dir.rstrip('/') + '_diff' 263 dest_dir = os.path.join(dest_dir, dir) 264 265 if not os.path.exists(dest_dir): 266 os.makedirs(dest_dir) 267 268 for src_file in self._get_all_files(src_dir): 269 self._copy_new_data_in_file(src_file, src_dir, dest_dir) 270 271 def run(self, log_dir, collect_init_status=True, collect_all=False): 272 """Copies new content from self.dir to the destination log_dir. 273 274 @param log_dir: The destination log directory. 275 @param collect_init_status: Set to True if run method is called to 276 collect the initial status of files. 277 @param collect_all: Set to True to force to collect all files. 278 279 """ 280 if collect_init_status: 281 self._get_init_status_of_src_dir(self.dir) 282 elif os.path.exists(self.dir): 283 # Always create a copy of the new logs to help debugging. 284 self._log_diff(self.dir, log_dir) 285 if collect_all: 286 logdir_temp = logdir(self.dir) 287 logdir_temp.run(log_dir) 288 289 290class purgeable_logdir(logdir): 291 """Represents a log directory that will be purged.""" 292 def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES): 293 super(purgeable_logdir, self).__init__(directory, excludes) 294 295 def run(self, log_dir): 296 """Copies this log dir to the destination dir, then purges the source. 297 298 @param log_dir: The destination log directory. 299 """ 300 super(purgeable_logdir, self).run(log_dir) 301 302 if os.path.exists(self.dir): 303 utils.system("rm -rf %s/*" % (self.dir)) 304 305 306class purged_on_init_logdir(logdir): 307 """Represents a log directory that is purged *when initialized*.""" 308 309 def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES): 310 super(purged_on_init_logdir, self).__init__(directory, excludes) 311 312 if os.path.exists(self.dir): 313 utils.system("rm -rf %s/*" % (self.dir)) 314 315 316class site_sysinfo(base_sysinfo.base_sysinfo): 317 """Represents site system info.""" 318 def __init__(self, job_resultsdir, version=None): 319 super(site_sysinfo, self).__init__(job_resultsdir) 320 crash_exclude_string = None 321 if not collect_corefiles: 322 crash_exclude_string = "*.core" 323 324 # This is added in before and after_iteration_loggables. When run is 325 # called in before_iteration_loggables, it collects file status in 326 # the directory. When run is called in after_iteration_loggables, diff 327 # is executed. 328 # self.diffable_loggables is only initialized if the instance does not 329 # have this attribute yet. The sysinfo instance could be loaded 330 # from an earlier pickle dump, which has already initialized attribute 331 # self.diffable_loggables. 332 if not hasattr(self, 'diffable_loggables'): 333 diffable_log = diffable_logdir(constants.LOG_DIR) 334 self.diffable_loggables = set() 335 self.diffable_loggables.add(diffable_log) 336 337 # add in some extra command logging 338 self.boot_loggables.add(command("ls -l /boot", 339 "boot_file_list")) 340 self.before_iteration_loggables.add( 341 command(constants.CHROME_VERSION_COMMAND, "chrome_version")) 342 self.boot_loggables.add(command("crossystem", "crossystem")) 343 self.test_loggables.add( 344 purgeable_logdir( 345 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "log"))) 346 347 # We do *not* want to purge crashes after iteration to allow post-test 348 # infrastructure to collect them as well. Instead, purge them before. 349 # TODO(mutexlox, ayatane): test_runner should handle the purging. 350 self.after_iteration_loggables.add( 351 purged_on_init_logdir(os.path.join( 352 constants.CRYPTOHOME_MOUNT_PT, "crash"), 353 excludes=logdir.DEFAULT_EXCLUDES + 354 (crash_exclude_string, ))) 355 356 self.test_loggables.add( 357 purgeable_logdir(constants.CRASH_DIR, 358 excludes=logdir.DEFAULT_EXCLUDES + 359 (crash_exclude_string, ))) 360 361 self.test_loggables.add( 362 logfile(os.path.join(constants.USER_DATA_DIR, 363 ".Google/Google Talk Plugin/gtbplugin.log"))) 364 365 # purged_on_init_logdir not compatible with client R86 and prior. 366 if version and int(version) > 86: 367 self.test_loggables.add( 368 purged_on_init_logdir(constants.CRASH_DIR, 369 excludes=logdir.DEFAULT_EXCLUDES + 370 (crash_exclude_string, ))) 371 # Collect files under /tmp/crash_reporter, which contain the procfs 372 # copy of those crashed processes whose core file didn't get converted 373 # into minidump. We need these additional files for retrospective analysis 374 # of the conversion failure. 375 self.test_loggables.add( 376 purgeable_logdir(constants.CRASH_REPORTER_RESIDUE_DIR)) 377 378 379 @log.log_and_ignore_errors("pre-test sysinfo error:") 380 def log_before_each_test(self, test): 381 """Logging hook called before a test starts. 382 383 @param test: A test object. 384 """ 385 super(site_sysinfo, self).log_before_each_test(test) 386 387 try: 388 for log in self.diffable_loggables: 389 log.run(log_dir=None, collect_init_status=True) 390 except Exception as e: 391 logging.warning("Exception hit during log_before_each_test %s", e) 392 393 @log.log_and_ignore_errors("post-test sysinfo error:") 394 def log_after_each_test(self, test): 395 """Logging hook called after a test finishs. 396 397 @param test: A test object. 398 """ 399 super(site_sysinfo, self).log_after_each_test(test) 400 401 test_sysinfodir = self._get_sysinfodir(test.outputdir) 402 403 for log in self.diffable_loggables: 404 log.run(log_dir=test_sysinfodir, 405 collect_init_status=False, 406 collect_all=not test.success or test.collect_full_logs) 407 408 409 def _get_chrome_version(self): 410 """Gets the Chrome version number and milestone as strings. 411 412 Invokes "chrome --version" to get the version number and milestone. 413 414 @return A tuple (chrome_ver, milestone) where "chrome_ver" is the 415 current Chrome version number as a string (in the form "W.X.Y.Z") 416 and "milestone" is the first component of the version number 417 (the "W" from "W.X.Y.Z"). If the version number cannot be parsed 418 in the "W.X.Y.Z" format, the "chrome_ver" will be the full output 419 of "chrome --version" and the milestone will be the empty string. 420 421 """ 422 version_string = utils.system_output(constants.CHROME_VERSION_COMMAND, 423 ignore_status=True) 424 return utils.parse_chrome_version(version_string) 425 426 427 def log_test_keyvals(self, test_sysinfodir): 428 """Generate keyval for the sysinfo. 429 430 Collects keyval entries to be written in the test keyval. 431 432 @param test_sysinfodir: The test's system info directory. 433 """ 434 keyval = super(site_sysinfo, self).log_test_keyvals(test_sysinfodir) 435 436 lsb_lines = utils.system_output( 437 "cat /etc/lsb-release", 438 ignore_status=True).splitlines() 439 lsb_dict = dict(item.split("=") for item in lsb_lines) 440 441 for lsb_key in lsb_dict.keys(): 442 # Special handling for build number 443 if lsb_key == "CHROMEOS_RELEASE_DESCRIPTION": 444 keyval["CHROMEOS_BUILD"] = ( 445 lsb_dict[lsb_key].rstrip(")").split(" ")[3]) 446 keyval[lsb_key] = lsb_dict[lsb_key] 447 448 # Get the hwid (hardware ID), if applicable. 449 try: 450 keyval["hwid"] = utils.system_output('crossystem hwid') 451 except error.CmdError: 452 # The hwid may not be available (e.g, when running on a VM). 453 # If the output of 'crossystem mainfw_type' is 'nonchrome', then 454 # we expect the hwid to not be avilable, and we can proceed in this 455 # case. Otherwise, the hwid is missing unexpectedly. 456 mainfw_type = utils.system_output('crossystem mainfw_type') 457 if mainfw_type == 'nonchrome': 458 logging.info( 459 'HWID not available; not logging it as a test keyval.') 460 else: 461 logging.exception('HWID expected but could not be identified; ' 462 'output of "crossystem mainfw_type" is "%s"', 463 mainfw_type) 464 raise 465 466 # Get the chrome version and milestone numbers. 467 keyval["CHROME_VERSION"], keyval["MILESTONE"] = ( 468 self._get_chrome_version()) 469 470 # Get the dictionary attack counter. 471 keyval["TPM_DICTIONARY_ATTACK_COUNTER"] = ( 472 tpm.get_tpm_da_info().get( 473 'dictionary_attack_counter', 474 'Failed to query tpm_manager')) 475 476 # Return the updated keyvals. 477 return keyval 478 479 480 def add_logdir(self, loggable): 481 """Collect files in log_path to sysinfo folder. 482 483 This method can be called from a control file for test to collect files 484 in a specified folder. autotest creates a folder [test result 485 dir]/sysinfo folder with the full path of log_path and copy all files in 486 log_path to that folder. 487 488 @param loggable: A logdir instance corresponding to the logs to collect. 489 """ 490 self.test_loggables.add(loggable) 491