1# Lint as: python2, python3 2# Copyright (c) 2012 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6from __future__ import absolute_import 7from __future__ import division 8from __future__ import print_function 9 10import abc, logging, os, re, time 11from six.moves import range 12import subprocess 13 14import common 15 16from autotest_lib.client.bin import utils 17from autotest_lib.client.common_lib import error 18from autotest_lib.client.cros.constants import CLEANUP_LOGS_PAUSED_FILE 19 20 21def strip_timestamp(msg): 22 """ 23 Strips timestamps and PIDs from a syslog message to facilitate 24 failure reason aggregation when this message is used as an autotest 25 exception text. 26 """ 27 kernel = re.search(r' kernel: \[ *\d+.\d+\] (.*)$', msg) 28 if kernel: 29 return 'kernel: ' + kernel.group(1) 30 31 user = re.match(r'[^a-z]+ [^ ]* ([^\[ ]+)(?:\[.*\])?(: .*)$', msg) 32 if user: 33 return user.group(1) + user.group(2) 34 35 logging.warning('Could not parse syslog message: ' + msg) 36 return msg 37 38 39def extract_kernel_timestamp(msg): 40 """Extract a timestmap that appears in kernel log messages and looks 41 like this: 42 ... kernel: [78791.721832] ... 43 44 Returns: 45 The timestamp as float in seconds since last boot. 46 """ 47 48 match = re.search('\[\s*([0-9]+\.[0-9]+)\] ', msg) 49 if match: 50 return float(match.group(1)) 51 raise error.TestError('Could not extract timestamp from message: ' + msg) 52 53 54class AbstractLogReader(object): 55 56 def __init__(self): 57 self._start_line = 1 58 59 @abc.abstractmethod 60 def read_all_logs(self): 61 """Read all content from log files. 62 63 Generator function. 64 Return an iterator on the content of files. 65 66 This generator can peek a line once (and only once!) by using 67 .send(offset). Must iterate over the peeked line before you can 68 peek again. 69 """ 70 pass 71 72 def set_start_by_regexp(self, index, regexp): 73 """Set the start of logs based on a regular expression. 74 75 @param index: line matching regexp to start at, earliest log at 0. 76 Negative numbers indicate matches since end of log. 77 78 @return True if a matching line was found, false otherwise 79 """ 80 regexp_compiled = re.compile(regexp) 81 starts = [] 82 line_number = 1 83 self._start_line = 1 84 for line in self.read_all_logs(): 85 if regexp_compiled.search(line): 86 starts.append(line_number) 87 line_number += 1 88 if index < -len(starts): 89 self._start_line = 1 90 return False 91 elif index >= len(starts): 92 self._start_line = line_number 93 else: 94 self._start_line = starts[index] 95 return True 96 97 98 def set_start_by_reboot(self, index): 99 """ Set the start of logs (must be system log) based on reboot. 100 101 @param index: reboot to start at, earliest log at 0. Negative 102 numbers indicate reboots since end of log. 103 104 @return True if the boot message line was found, False otherwise 105 """ 106 # Include a 'kernel' tag to avoid matching boot messages logged by 107 # crosvm: https://crbug.com/817946 108 return self.set_start_by_regexp(index, 109 r'kernel:(.*000\])? Linux version \d') 110 111 112 def set_start_by_current(self, relative=0): 113 """ Set start of logs based on current last line. 114 115 @param relative: line relative to current to start at. 1 means 116 to start the log after this line. 117 """ 118 count = self._start_line + relative 119 for line in self.read_all_logs(): 120 count += 1 121 self._start_line = count 122 123 124 def get_logs(self): 125 """ Get logs since the start line. 126 127 Start line is set by set_start_* functions or 128 since the start of the file if none were called. 129 130 All lines must be decoded as a string as Python 3 131 now differentiates between bytearrays and strings 132 133 @return string of contents of file since start line. 134 """ 135 logs = [] 136 for line in self.read_all_logs(): 137 logs.append(line.decode()) 138 return ''.join(logs) 139 140 141 def can_find(self, string): 142 """ Try to find string in the logs. 143 144 @return boolean indicating if we found the string. 145 """ 146 return string in self.get_logs() 147 148 149 def get_last_msg(self, patterns, retries=0, sleep_seconds=0.2): 150 """Search the logs and return the latest occurrence of a message 151 matching one of the patterns. 152 153 Args: 154 patterns: A regexp or a list of regexps to search the logs with. 155 The function returns as soon as it finds any match to one of 156 the patters, it will not search for the other patterns. 157 retries: Number of times to retry if none of the patterns were 158 found. Default is one attempt. 159 sleep_seconds: Time to sleep between retries. 160 161 Returns: 162 The last occurrence of the first matching pattern. "None" if none 163 of the patterns matched. 164 """ 165 166 if not type(patterns) in (list, tuple): 167 patterns = [patterns] 168 169 for retry in range(retries + 1): 170 for pattern in patterns: 171 regexp_compiled = re.compile(pattern) 172 last_match = None 173 for line in self.read_all_logs(): 174 if regexp_compiled.search(line): 175 last_match = line 176 if last_match: 177 return last_match 178 time.sleep(sleep_seconds) 179 180 return None 181 182 183class LogRotationPauser(object): 184 """ 185 Class to control when logs are rotated from either server or client. 186 187 Assumes all setting of CLEANUP_LOGS_PAUSED_FILE is done by this class 188 and that all calls to begin and end are properly 189 nested. For instance, [ a.begin(), b.begin(), b.end(), a.end() ] is 190 supported, but [ a.begin(), b.begin(), a.end(), b.end() ] is not. 191 We do support redundant calls to the same class, such as 192 [ a.begin(), a.begin(), a.end() ]. 193 """ 194 def __init__(self, host=None): 195 self._host = host 196 self._begun = False 197 self._is_nested = True 198 199 200 def _run(self, command, *args, **dargs): 201 if self._host: 202 return self._host.run(command, *args, **dargs).exit_status 203 else: 204 return utils.system(command, *args, **dargs) 205 206 207 def begin(self): 208 """Make sure that log rotation is disabled.""" 209 if self._begun: 210 return 211 self._is_nested = (self._run(('[ -r %s ]' % 212 CLEANUP_LOGS_PAUSED_FILE), 213 ignore_status=True) == 0) 214 if self._is_nested: 215 logging.info('File %s was already present' % 216 CLEANUP_LOGS_PAUSED_FILE) 217 else: 218 self._run('touch ' + CLEANUP_LOGS_PAUSED_FILE) 219 self._begun = True 220 221 222 def end(self): 223 assert self._begun 224 if not self._is_nested: 225 self._run('rm -f ' + CLEANUP_LOGS_PAUSED_FILE) 226 else: 227 logging.info('Leaving existing %s file' % CLEANUP_LOGS_PAUSED_FILE) 228 self._begun = False 229 230 231class LogReader(AbstractLogReader): 232 """Class to read traditional text log files. 233 234 Be default reads all logs from /var/log/messages. 235 """ 236 237 def __init__(self, filename='/var/log/messages', include_rotated_logs=True): 238 AbstractLogReader.__init__(self) 239 self._filename = filename 240 self._include_rotated_logs = include_rotated_logs 241 if not os.path.exists(CLEANUP_LOGS_PAUSED_FILE): 242 raise error.TestError('LogReader created without ' + 243 CLEANUP_LOGS_PAUSED_FILE) 244 245 def read_all_logs(self): 246 log_files = [] 247 line_number = 0 248 if self._include_rotated_logs: 249 log_files.extend(utils.system_output( 250 'ls -tr1 %s.*' % self._filename, 251 ignore_status=True).splitlines()) 252 log_files.append(self._filename) 253 for log_file in log_files: 254 f = open(log_file) 255 for line in f: 256 line_number += 1 257 if line_number < self._start_line: 258 continue 259 peek = yield line 260 if peek: 261 buf = [next(f) for _ in range(peek)] 262 yield buf[-1] 263 while buf: 264 yield buf.pop(0) 265 f.close() 266 267 268class SystemLogReader(AbstractLogReader): 269 """A class to read logs stored in plaintexts using croslog command 270 """ 271 272 def read_all_logs(self): 273 proc = subprocess.Popen(['croslog'], stdout=subprocess.PIPE) 274 line_number = 0 275 for line in proc.stdout: 276 line_number += 1 277 if line_number < self._start_line: 278 continue 279 yield line 280 proc.terminate() 281 282 283class JournalLogReader(AbstractLogReader): 284 """A class to read logs stored by systemd-journald. 285 """ 286 287 def read_all_logs(self): 288 proc = subprocess.Popen(['journalctl'], stdout=subprocess.PIPE) 289 line_number = 0 290 for line in proc.stdout: 291 line_number += 1 292 if line_number < self._start_line: 293 continue 294 yield line 295 proc.terminate() 296 297 298class ContinuousLogReader(AbstractLogReader): 299 """Reads log file from where it left over last time. 300 301 Useful when reading a growing log file. 302 """ 303 def __init__(self, filename): 304 self._filename = filename 305 # Keeps file offset of last read. 306 self._last_pos = 0 307 308 def read_all_logs(self): 309 try: 310 with open(self._filename, 'r') as f: 311 f.seek(self._last_pos) 312 for line in f: 313 yield line 314 # Notice that _last_pos is recorded only if all lines in the file is 315 # read up. Maybe put this into a context manager so it's always 316 # recorded if needed. 317 self._last_pos = f.tell() 318 except IOError: 319 logging.error('Could not read log file %s', self._filename) 320 321 322def make_system_log_reader(): 323 """Create a system log reader. 324 325 This will create SystemLogReader(). JournalLogReader() or LogReader() depending on 326 whether the system is configured with systemd. 327 """ 328 if os.path.exists("/usr/sbin/croslog"): 329 return SystemLogReader() 330 elif os.path.exists("/var/log/journal"): 331 return JournalLogReader() 332 else: 333 return LogReader() 334