1# Lint as: python2, python3 2# Copyright (c) 2017 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6from __future__ import absolute_import 7from __future__ import division 8from __future__ import print_function 9 10import collections 11import json 12import logging 13import numpy 14import operator 15import os 16import re 17import time 18from six.moves import range 19from six.moves import urllib 20 21from autotest_lib.client.bin import utils 22from autotest_lib.client.common_lib import error 23from autotest_lib.client.common_lib import lsbrelease_utils 24from autotest_lib.client.common_lib.cros import retry 25from autotest_lib.client.cros.power import power_status 26from autotest_lib.client.cros.power import power_utils 27from six.moves import zip 28 29_HTML_CHART_STR = ''' 30<!DOCTYPE html> 31<html> 32<head> 33<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"> 34</script> 35<script type="text/javascript"> 36 google.charts.load('current', {{'packages':['corechart', 'table']}}); 37 google.charts.setOnLoadCallback(drawChart); 38 function drawChart() {{ 39 var dataArray = [ 40{data} 41 ]; 42 var data = google.visualization.arrayToDataTable(dataArray); 43 var numDataCols = data.getNumberOfColumns() - 1; 44 var unit = '{unit}'; 45 var options = {{ 46 width: 1600, 47 height: 1200, 48 lineWidth: 1, 49 legend: {{ position: 'top', maxLines: 3 }}, 50 vAxis: {{ viewWindow: {{min: 0}}, title: '{type} ({unit})' }}, 51 hAxis: {{ viewWindow: {{min: 0}}, title: 'time (second)' }}, 52 }}; 53 var element = document.getElementById('{type}'); 54 var chart; 55 if (unit == 'percent') {{ 56 options['isStacked'] = true; 57 if (numDataCols == 2) {{ 58 options['colors'] = ['#d32f2f', '#43a047'] 59 }} else if (numDataCols <= 4) {{ 60 options['colors'] = ['#d32f2f', '#f4c7c3', '#cddc39','#43a047']; 61 }} else if (numDataCols <= 9) {{ 62 options['colors'] = ['#d32f2f', '#e57373', '#f4c7c3', '#ffccbc', 63 '#f0f4c3', '#c8e6c9', '#cddc39', '#81c784', '#43a047']; 64 }} 65 chart = new google.visualization.SteppedAreaChart(element); 66 }} else if (data.getNumberOfRows() == 2 && unit == 'point') {{ 67 var newArray = [['key', 'value']]; 68 for (var i = 1; i < dataArray[0].length; i++) {{ 69 newArray.push([dataArray[0][i], dataArray[1][i]]); 70 }} 71 data = google.visualization.arrayToDataTable(newArray); 72 delete options.width; 73 delete options.height; 74 chart = new google.visualization.Table(element); 75 }} else {{ 76 chart = new google.visualization.LineChart(element); 77 }} 78 chart.draw(data, options); 79 }} 80</script> 81</head> 82<body> 83<div id="{type}"></div> 84</body> 85</html> 86''' 87 88_HWID_LINK_STR = ''' 89<a href="http://goto.google.com/pdash-hwid?query={hwid}"> 90 Link to hwid lookup. 91</a><br /> 92''' 93 94_PDASH_LINK_STR = ''' 95<a href="http://chrome-power.appspot.com/dashboard?board={board}&test={test}&datetime={datetime}"> 96 Link to power dashboard. 97</a><br /> 98''' 99 100_TDASH_LINK_STR = ''' 101<a href="http://chrome-power.appspot.com/thermal_dashboard?note={note}"> 102 Link to thermal dashboard. 103</a><br /> 104''' 105 106# Global variable to avoid duplicate dashboard link in BaseDashboard._save_html 107generated_dashboard_link = False 108 109 110class BaseDashboard(object): 111 """Base class that implements method for prepare and upload data to power 112 dashboard. 113 """ 114 115 def __init__(self, logger, testname, start_ts=None, resultsdir=None, 116 uploadurl=None): 117 """Create BaseDashboard objects. 118 119 Args: 120 logger: object that store the log. This will get convert to 121 dictionary by self._convert() 122 testname: name of current test 123 start_ts: timestamp of when test started in seconds since epoch 124 resultsdir: directory to save the power json 125 uploadurl: url to upload power data 126 """ 127 self._logger = logger 128 self._testname = testname 129 self._start_ts = start_ts if start_ts else time.time() 130 self._resultsdir = resultsdir 131 self._uploadurl = uploadurl 132 133 def _create_powerlog_dict(self, raw_measurement): 134 """Create powerlog dictionary from raw measurement data 135 Data format in go/power-dashboard-data. 136 137 Args: 138 raw_measurement: dictionary contains raw measurement data 139 140 Returns: 141 A dictionary of powerlog 142 """ 143 powerlog_dict = { 144 'format_version': 6, 145 'timestamp': self._start_ts, 146 'test': self._testname, 147 'dut': self._create_dut_info_dict( 148 list(raw_measurement['data'].keys())), 149 'power': raw_measurement, 150 } 151 152 return powerlog_dict 153 154 def _create_dut_info_dict(self, power_rails): 155 """Create a dictionary that contain information of the DUT. 156 157 MUST be implemented in subclass. 158 159 Args: 160 power_rails: list of measured power rails 161 162 Returns: 163 DUT info dictionary 164 """ 165 raise NotImplementedError 166 167 def _save_json(self, powerlog_dict, resultsdir, filename='power_log.json'): 168 """Convert powerlog dict to human readable formatted JSON and 169 append to <resultsdir>/<filename>. 170 171 Args: 172 powerlog_dict: dictionary of power data 173 resultsdir: directory to save formatted JSON object 174 filename: filename to append to 175 """ 176 if not os.path.exists(resultsdir): 177 raise error.TestError('resultsdir %s does not exist.' % resultsdir) 178 filename = os.path.join(resultsdir, filename) 179 json_str = json.dumps(powerlog_dict, indent=4, separators=(',', ': '), 180 ensure_ascii=False) 181 json_str = utils.strip_non_printable(json_str) 182 with open(filename, 'a') as f: 183 f.write(json_str) 184 185 def _generate_dashboard_link(self, powerlog_dict): 186 """Generate link to power and thermal dashboard""" 187 # Use global variable to generate this only once. 188 global generated_dashboard_link 189 if generated_dashboard_link: 190 return '' 191 generated_dashboard_link = True 192 193 board = powerlog_dict['dut']['board'] 194 test = powerlog_dict['test'] 195 datetime = time.strftime('%Y%m%d%H%M', 196 time.gmtime(powerlog_dict['timestamp'])) 197 hwid = powerlog_dict['dut']['sku']['hwid'] 198 note = powerlog_dict['dut']['note'] 199 200 html_str = '<!DOCTYPE html><html><body>' 201 html_str += _HWID_LINK_STR.format(hwid=hwid) 202 html_str += _PDASH_LINK_STR.format(board=board, 203 test=test, 204 datetime=datetime) 205 206 if re.match('ThermalQual.(full|lab).*', note): 207 html_str += _TDASH_LINK_STR.format(note=note) 208 209 html_str += '</body></html>' 210 211 return html_str 212 213 def _save_html(self, powerlog_dict, resultsdir, filename='power_log.html'): 214 """Convert powerlog dict to chart in HTML page and append to 215 <resultsdir>/<filename>. 216 217 Note that this results in multiple HTML objects in one file but Chrome 218 can render all of it in one page. 219 220 Args: 221 powerlog_dict: dictionary of power data 222 resultsdir: directory to save HTML page 223 filename: filename to append to 224 """ 225 html_str = self._generate_dashboard_link(powerlog_dict) 226 227 # Create dict from type to sorted list of rail names. 228 rail_type = collections.defaultdict(list) 229 for r, t in powerlog_dict['power']['type'].items(): 230 rail_type[t].append(r) 231 for t in rail_type: 232 rail_type[t] = sorted(rail_type[t]) 233 234 row_indent = ' ' * 12 235 for t in rail_type: 236 data_str_list = [] 237 238 # Generate rail name data string. 239 header = ['time'] + rail_type[t] 240 header_str = row_indent + "['" + "', '".join(header) + "']" 241 data_str_list.append(header_str) 242 243 # Generate measurements data string. 244 for i in range(powerlog_dict['power']['sample_count']): 245 row = [str(i * powerlog_dict['power']['sample_duration'])] 246 for r in rail_type[t]: 247 row.append(str(powerlog_dict['power']['data'][r][i])) 248 row_str = row_indent + '[' + ', '.join(row) + ']' 249 data_str_list.append(row_str) 250 251 data_str = ',\n'.join(data_str_list) 252 unit = powerlog_dict['power']['unit'][rail_type[t][0]] 253 html_str += _HTML_CHART_STR.format(data=data_str, unit=unit, type=t) 254 255 if not os.path.exists(resultsdir): 256 raise error.TestError('resultsdir %s does not exist.' % resultsdir) 257 filename = os.path.join(resultsdir, filename) 258 with open(filename, 'a') as f: 259 f.write(html_str) 260 261 def _upload(self, powerlog_dict, uploadurl): 262 """Convert powerlog dict to minimal size JSON and upload to dashboard. 263 264 Args: 265 powerlog_dict: dictionary of power data 266 uploadurl: url to upload the power data 267 """ 268 json_str = json.dumps(powerlog_dict, ensure_ascii=False) 269 data_obj = {'data': utils.strip_non_printable(json_str)} 270 encoded = urllib.parse.urlencode(data_obj).encode('utf-8') 271 req = urllib.request.Request(uploadurl, encoded) 272 273 @retry.retry(urllib.error.URLError, 274 raiselist=[urllib.error.HTTPError], 275 timeout_min=5.0, 276 delay_sec=1, 277 backoff=2) 278 def _do_upload(): 279 urllib.request.urlopen(req) 280 281 _do_upload() 282 283 def _create_checkpoint_dict(self): 284 """Create dictionary for checkpoint. 285 286 @returns a dictionary of tags to their corresponding intervals in the 287 following format: 288 { 289 tag1: [(start1, end1), (start2, end2), ...], 290 tag2: [(start3, end3), (start4, end4), ...], 291 ... 292 } 293 """ 294 raise NotImplementedError 295 296 def _tag_with_checkpoint(self, power_dict): 297 """Tag power_dict with checkpoint data. 298 299 This function translates the checkpoint intervals into a list of tags 300 for each data point. 301 302 @param power_dict: a dictionary with power data; assume this dictionary 303 has attributes 'sample_count' and 'sample_duration'. 304 """ 305 checkpoint_dict = self._create_checkpoint_dict() 306 307 # Create list of check point event tuple. 308 # Tuple format: (checkpoint_name:str, event_time:float, is_start:bool) 309 checkpoint_event_list = [] 310 for name, intervals in checkpoint_dict.items(): 311 for start, finish in intervals: 312 checkpoint_event_list.append((name, start, True)) 313 checkpoint_event_list.append((name, finish, False)) 314 315 checkpoint_event_list = sorted(checkpoint_event_list, 316 key=operator.itemgetter(1)) 317 318 # Add placeholder check point at 1e9 seconds. 319 checkpoint_event_list.append(('dummy', 1e9, True)) 320 321 interval_set = set() 322 event_index = 0 323 checkpoint_list = [] 324 for i in range(power_dict['sample_count']): 325 curr_time = i * power_dict['sample_duration'] 326 327 # Process every checkpoint event until current point of time 328 while checkpoint_event_list[event_index][1] <= curr_time: 329 name, _, is_start = checkpoint_event_list[event_index] 330 if is_start: 331 interval_set.add(name) 332 else: 333 interval_set.discard(name) 334 event_index += 1 335 336 checkpoint_list.append(list(interval_set)) 337 power_dict['checkpoint'] = checkpoint_list 338 339 def _convert(self): 340 """Convert data from self._logger object to raw power measurement 341 dictionary. 342 343 MUST be implemented in subclass. 344 345 Return: 346 raw measurement dictionary 347 """ 348 raise NotImplementedError 349 350 def upload(self): 351 """Upload powerlog to dashboard and save data to results directory. 352 """ 353 raw_measurement = self._convert() 354 if raw_measurement is None: 355 return 356 357 powerlog_dict = self._create_powerlog_dict(raw_measurement) 358 if self._resultsdir is not None: 359 self._save_json(powerlog_dict, self._resultsdir) 360 self._save_html(powerlog_dict, self._resultsdir) 361 if self._uploadurl is not None: 362 self._upload(powerlog_dict, self._uploadurl) 363 364 365class ClientTestDashboard(BaseDashboard): 366 """Dashboard class for autotests that run on client side. 367 """ 368 369 def __init__(self, logger, testname, start_ts, resultsdir, uploadurl, note): 370 """Create BaseDashboard objects. 371 372 Args: 373 logger: object that store the log. This will get convert to 374 dictionary by self._convert() 375 testname: name of current test 376 start_ts: timestamp of when test started in seconds since epoch 377 resultsdir: directory to save the power json 378 uploadurl: url to upload power data 379 note: note for current test run 380 """ 381 super(ClientTestDashboard, self).__init__(logger, testname, start_ts, 382 resultsdir, uploadurl) 383 self._note = note 384 385 386 def _create_dut_info_dict(self, power_rails): 387 """Create a dictionary that contain information of the DUT. 388 389 Args: 390 power_rails: list of measured power rails 391 392 Returns: 393 DUT info dictionary 394 """ 395 board = utils.get_board() 396 platform = utils.get_platform() 397 398 if not platform.startswith(board): 399 board += '_' + platform 400 401 if power_utils.has_hammer(): 402 board += '_hammer' 403 404 dut_info_dict = { 405 'board': board, 406 'version': { 407 'hw': utils.get_hardware_revision(), 408 'milestone': 409 lsbrelease_utils.get_chromeos_release_milestone(), 410 'os': lsbrelease_utils.get_chromeos_release_version(), 411 'channel': lsbrelease_utils.get_chromeos_channel(), 412 'firmware': utils.get_firmware_version(), 413 'ec': utils.get_ec_version(), 414 'kernel': utils.get_kernel_version(), 415 }, 416 'sku': { 417 'cpu': utils.get_cpu_name(), 418 'memory_size': utils.get_mem_total_gb(), 419 'storage_size': 420 utils.get_disk_size_gb(utils.get_root_device()), 421 'display_resolution': utils.get_screen_resolution(), 422 'hwid': utils.get_hardware_id(), 423 }, 424 'ina': { 425 'version': 0, 426 'ina': power_rails, 427 }, 428 'note': self._note, 429 } 430 431 if power_utils.has_battery(): 432 status = power_status.get_status() 433 if status.battery: 434 # Round the battery size to nearest tenth because it is 435 # fluctuated for platform without battery nominal voltage data. 436 dut_info_dict['sku']['battery_size'] = round( 437 status.battery.energy_full_design, 1) 438 dut_info_dict['sku']['battery_shutdown_percent'] = \ 439 power_utils.get_low_battery_shutdown_percent() 440 return dut_info_dict 441 442 443class MeasurementLoggerDashboard(ClientTestDashboard): 444 """Dashboard class for power_status.MeasurementLogger. 445 """ 446 447 def __init__(self, logger, testname, resultsdir, uploadurl, note): 448 super(MeasurementLoggerDashboard, self).__init__(logger, testname, None, 449 resultsdir, uploadurl, 450 note) 451 self._unit = None 452 self._type = None 453 self._padded_domains = None 454 455 def _create_powerlog_dict(self, raw_measurement): 456 """Create powerlog dictionary from raw measurement data 457 Data format in go/power-dashboard-data. 458 459 Args: 460 raw_measurement: dictionary contains raw measurement data 461 462 Returns: 463 A dictionary of powerlog 464 """ 465 powerlog_dict = \ 466 super(MeasurementLoggerDashboard, self)._create_powerlog_dict( 467 raw_measurement) 468 469 # Using start time of the logger as the timestamp of powerlog dict. 470 powerlog_dict['timestamp'] = self._logger.times[0] 471 472 return powerlog_dict 473 474 def _create_padded_domains(self): 475 """Pad the domains name for dashboard to make the domain name better 476 sorted in alphabetical order""" 477 pass 478 479 def _create_checkpoint_dict(self): 480 """Create dictionary for checkpoint. 481 """ 482 start_time = self._logger.times[0] 483 return self._logger._checkpoint_logger.convert_relative(start_time) 484 485 def _convert(self): 486 """Convert data from power_status.MeasurementLogger object to raw 487 power measurement dictionary. 488 489 Return: 490 raw measurement dictionary or None if no readings 491 """ 492 if len(self._logger.readings) == 0: 493 logging.warning('No readings in logger ... ignoring') 494 return None 495 496 power_dict = collections.defaultdict(dict, { 497 'sample_count': len(self._logger.readings), 498 'sample_duration': 0, 499 'average': dict(), 500 'data': dict(), 501 }) 502 if power_dict['sample_count'] > 1: 503 total_duration = self._logger.times[-1] - self._logger.times[0] 504 power_dict['sample_duration'] = \ 505 1.0 * total_duration / (power_dict['sample_count'] - 1) 506 507 self._create_padded_domains() 508 for i, domain_readings in enumerate(zip(*self._logger.readings)): 509 if self._padded_domains: 510 domain = self._padded_domains[i] 511 else: 512 domain = self._logger.domains[i] 513 power_dict['data'][domain] = domain_readings 514 power_dict['average'][domain] = \ 515 numpy.average(power_dict['data'][domain]) 516 if self._unit: 517 power_dict['unit'][domain] = self._unit 518 if self._type: 519 power_dict['type'][domain] = self._type 520 521 self._tag_with_checkpoint(power_dict) 522 return power_dict 523 524 525class PowerLoggerDashboard(MeasurementLoggerDashboard): 526 """Dashboard class for power_status.PowerLogger. 527 """ 528 529 def __init__(self, logger, testname, resultsdir, uploadurl, note): 530 super(PowerLoggerDashboard, self).__init__(logger, testname, resultsdir, 531 uploadurl, note) 532 self._unit = 'watt' 533 self._type = 'power' 534 535 536class TempLoggerDashboard(MeasurementLoggerDashboard): 537 """Dashboard class for power_status.TempLogger. 538 """ 539 540 def __init__(self, logger, testname, resultsdir, uploadurl, note): 541 super(TempLoggerDashboard, self).__init__(logger, testname, resultsdir, 542 uploadurl, note) 543 self._unit = 'celsius' 544 self._type = 'temperature' 545 546 547class KeyvalLogger(power_status.MeasurementLogger): 548 """Class for logging custom keyval data to power dashboard. 549 550 Each key should be unique and only map to one value. 551 See power_SpeedoMeter2 for implementation example. 552 """ 553 554 def __init__(self, start_ts, end_ts=None): 555 # Do not call parent constructor to avoid making a new thread. 556 self.times = [start_ts] 557 self._start_ts = start_ts 558 self._fixed_end_ts = end_ts # prefer this (end time set by tests) 559 self._updating_end_ts = time.time() # updated when a new item is added 560 self.keys = [] 561 self.values = [] 562 self.units = [] 563 self.types = [] 564 565 def is_unit_valid(self, unit): 566 """Make sure that unit of the data is supported unit.""" 567 pattern = re.compile(r'^((kilo|mega|giga)hertz|' 568 r'percent|celsius|fps|rpm|point|' 569 r'(milli|micro)?(watt|volt|amp))$') 570 return pattern.match(unit) is not None 571 572 def add_item(self, key, value, unit, type_): 573 """Add a data point to the logger. 574 575 @param key: string, key of the data. 576 @param value: float, measurement value. 577 @param unit: string, unit for the data. 578 @param type: string, type of the data. 579 """ 580 if not self.is_unit_valid(unit): 581 raise error.TestError( 582 'Unit %s is not support in power dashboard.' % unit) 583 self.keys.append(key) 584 self.values.append(value) 585 self.units.append(unit) 586 self.types.append(type_) 587 self._updating_end_ts = time.time() 588 589 def set_end(self, end_ts): 590 """Set the end timestamp. 591 592 If the end timestamp is not set explicitly by tests, use the timestamp 593 of the last added item instead. 594 595 @param end_ts: end timestamp for KeyvalLogger. 596 """ 597 self._fixed_end_ts = end_ts 598 599 def calc(self, mtype=None): 600 return {} 601 602 def save_results(self, resultsdir=None, fname_prefix=None): 603 pass 604 605 606class KeyvalLoggerDashboard(MeasurementLoggerDashboard): 607 """Dashboard class for custom keyval data in KeyvalLogger class.""" 608 609 def _convert(self): 610 """Convert KeyvalLogger data to power dict.""" 611 power_dict = { 612 # 2 samples to show flat value spanning across duration of the test. 613 'sample_count': 614 2, 615 'sample_duration': 616 (self._logger._fixed_end_ts - 617 self._logger._start_ts) if self._logger._fixed_end_ts else 618 (self._logger._updating_end_ts - self._logger._start_ts), 619 'average': 620 dict(list(zip(self._logger.keys, self._logger.values))), 621 'data': 622 dict( 623 list( 624 zip(self._logger.keys, 625 ([v, v] for v in self._logger.values)))), 626 'unit': 627 dict(list(zip(self._logger.keys, self._logger.units))), 628 'type': 629 dict(list(zip(self._logger.keys, self._logger.types))), 630 'checkpoint': [[self._testname], [self._testname]], 631 } 632 return power_dict 633 634 635class CPUStatsLoggerDashboard(MeasurementLoggerDashboard): 636 """Dashboard class for power_status.CPUStatsLogger. 637 """ 638 @staticmethod 639 def _split_domain(domain): 640 """Return domain_type and domain_name for given domain. 641 642 Example: Split ................... to ........... and ....... 643 cpuidle_C1E-SKL cpuidle C1E-SKL 644 cpuidle_0_3_C0 cpuidle_0_3 C0 645 cpupkg_C0_C1 cpupkg C0_C1 646 cpufreq_0_3_1512000 cpufreq_0_3 1512000 647 648 Args: 649 domain: cpu stat domain name to split 650 651 Return: 652 tuple of domain_type and domain_name 653 """ 654 # Regex explanation 655 # .*? matches type non-greedily (cpuidle) 656 # (?:_\d+)* matches cpu part, ?: makes it not a group (_0_1_2_3) 657 # .* matches name greedily (C0_C1) 658 return re.match(r'(.*?(?:_\d+)*)_(.*)', domain).groups() 659 660 def _convert(self): 661 power_dict = super(CPUStatsLoggerDashboard, self)._convert() 662 if not power_dict or not power_dict['data']: 663 return None 664 remove_rail = [] 665 for rail in power_dict['data']: 666 if rail.startswith('wavg_cpu'): 667 power_dict['type'][rail] = 'cpufreq_wavg' 668 power_dict['unit'][rail] = 'kilohertz' 669 elif rail.startswith('wavg_gpu'): 670 power_dict['type'][rail] = 'gpufreq_wavg' 671 power_dict['unit'][rail] = 'megahertz' 672 else: 673 # Remove all aggregate stats, only 'non-c0' and 'non-C0_C1' now 674 if self._split_domain(rail)[1].startswith('non'): 675 remove_rail.append(rail) 676 continue 677 power_dict['type'][rail] = self._split_domain(rail)[0] 678 power_dict['unit'][rail] = 'percent' 679 for rail in remove_rail: 680 del power_dict['data'][rail] 681 del power_dict['average'][rail] 682 return power_dict 683 684 def _create_padded_domains(self): 685 """Padded number in the domain name with dot to make it sorted 686 alphabetically. 687 688 Example: 689 cpuidle_C1-SKL, cpuidle_C1E-SKL, cpuidle_C2-SKL, cpuidle_C10-SKL 690 will be changed to 691 cpuidle_C.1-SKL, cpuidle_C.1E-SKL, cpuidle_C.2-SKL, cpuidle_C10-SKL 692 which make it in alphabetically order. 693 """ 694 longest = collections.defaultdict(int) 695 searcher = re.compile(r'\d+') 696 number_strs = [] 697 splitted_domains = \ 698 [self._split_domain(domain) for domain in self._logger.domains] 699 for domain_type, domain_name in splitted_domains: 700 result = searcher.search(domain_name) 701 if not result: 702 number_strs.append('') 703 continue 704 number_str = result.group(0) 705 number_strs.append(number_str) 706 longest[domain_type] = max(longest[domain_type], len(number_str)) 707 708 self._padded_domains = [] 709 for i in range(len(self._logger.domains)): 710 if not number_strs[i]: 711 self._padded_domains.append(self._logger.domains[i]) 712 continue 713 714 domain_type, domain_name = splitted_domains[i] 715 formatter_component = '{:.>%ds}' % longest[domain_type] 716 717 # Change "cpuidle_C1E-SKL" to "cpuidle_C{:.>2s}E-SKL" 718 formatter_str = domain_type + '_' + \ 719 searcher.sub(formatter_component, domain_name, count=1) 720 721 # Run "cpuidle_C{:_>2s}E-SKL".format("1") to get "cpuidle_C.1E-SKL" 722 self._padded_domains.append(formatter_str.format(number_strs[i])) 723 724 725class VideoFpsLoggerDashboard(MeasurementLoggerDashboard): 726 """Dashboard class for power_status.VideoFpsLogger.""" 727 728 def __init__(self, logger, testname, resultsdir, uploadurl, note): 729 super(VideoFpsLoggerDashboard, self).__init__( 730 logger, testname, resultsdir, uploadurl, note) 731 self._unit = 'fps' 732 self._type = 'fps' 733 734 735class FanRpmLoggerDashboard(MeasurementLoggerDashboard): 736 """Dashboard class for power_status.FanRpmLogger.""" 737 738 def __init__(self, logger, testname, resultsdir, uploadurl, note): 739 super(FanRpmLoggerDashboard, self).__init__( 740 logger, testname, resultsdir, uploadurl, note) 741 self._unit = 'rpm' 742 self._type = 'fan' 743 744 745class FreeMemoryLoggerDashboard(MeasurementLoggerDashboard): 746 """Dashboard class for power_status.FreeMemoryLogger.""" 747 748 def __init__(self, logger, testname, resultsdir, uploadurl, note): 749 # Don't upload to dashboard 750 uploadurl = None 751 super(FreeMemoryLoggerDashboard, 752 self).__init__(logger, testname, resultsdir, uploadurl, note) 753 self._unit = 'point' 754 self._type = 'mem' 755 756 757dashboard_factory = None 758def get_dashboard_factory(): 759 global dashboard_factory 760 if not dashboard_factory: 761 dashboard_factory = LoggerDashboardFactory() 762 return dashboard_factory 763 764class LoggerDashboardFactory(object): 765 """Class to generate client test dashboard object from logger.""" 766 767 loggerToDashboardDict = { 768 power_status.CPUStatsLogger: CPUStatsLoggerDashboard, 769 power_status.PowerLogger: PowerLoggerDashboard, 770 power_status.TempLogger: TempLoggerDashboard, 771 power_status.VideoFpsLogger: VideoFpsLoggerDashboard, 772 power_status.FanRpmLogger: FanRpmLoggerDashboard, 773 power_status.FreeMemoryLogger: FreeMemoryLoggerDashboard, 774 KeyvalLogger: KeyvalLoggerDashboard, 775 } 776 777 def registerDataType(self, logger_type, dashboard_type): 778 """Register new type of dashboard to the factory 779 780 @param logger_type: Type of logger to register 781 @param dashboard_type: Type of dashboard to register 782 """ 783 self.loggerToDashboardDict[logger_type] = dashboard_type 784 785 def createDashboard(self, logger, testname, resultsdir=None, 786 uploadurl=None, note=''): 787 """Create dashboard object""" 788 if uploadurl is None: 789 uploadurl = 'http://chrome-power.appspot.com/rapl' 790 dashboard = self.loggerToDashboardDict[type(logger)] 791 return dashboard(logger, testname, resultsdir, uploadurl, note) 792 793 794def generate_parallax_report(output_dir): 795 """Generate parallax report in the result directory.""" 796 parallax_url = 'http://crospower.page.link/parallax' 797 local_dir = '/usr/local' 798 parallax_tar = os.path.join(local_dir, 'parallax.tar.xz') 799 parallax_dir = os.path.join(local_dir, 'report_analysis') 800 parallax_exe = os.path.join(parallax_dir, 'process.py') 801 results_dir = os.path.join(output_dir, 'results') 802 parallax_html = os.path.join(results_dir, 'parallax.html') 803 804 # Download the source 805 cmd = ' '.join(['wget', parallax_url, '-O', parallax_tar]) 806 utils.run(cmd) 807 808 # Extract the tool 809 cmd = ' '.join(['tar', 'xf', parallax_tar, '-C', local_dir]) 810 utils.run(cmd) 811 812 # Run the tool 813 cmd = ' '.join([ 814 'python', parallax_exe, '-t', 'PowerQual', '-p', output_dir, '-o', 815 parallax_html 816 ]) 817 utils.run(cmd) 818 819 # Clean up the tool 820 cmd = ' '.join(['rm', '-rf', parallax_tar, parallax_dir]) 821 utils.run(cmd) 822