1*01826a49SYabin Cui# ################################################################ 2*01826a49SYabin Cui# Copyright (c) Meta Platforms, Inc. and affiliates. 3*01826a49SYabin Cui# All rights reserved. 4*01826a49SYabin Cui# 5*01826a49SYabin Cui# This source code is licensed under both the BSD-style license (found in the 6*01826a49SYabin Cui# LICENSE file in the root directory of this source tree) and the GPLv2 (found 7*01826a49SYabin Cui# in the COPYING file in the root directory of this source tree). 8*01826a49SYabin Cui# You may select, at your option, one of the above-listed licenses. 9*01826a49SYabin Cui# ########################################################################## 10*01826a49SYabin Cui 11*01826a49SYabin Cuiimport argparse 12*01826a49SYabin Cuiimport glob 13*01826a49SYabin Cuiimport json 14*01826a49SYabin Cuiimport os 15*01826a49SYabin Cuiimport time 16*01826a49SYabin Cuiimport pickle as pk 17*01826a49SYabin Cuiimport subprocess 18*01826a49SYabin Cuiimport urllib.request 19*01826a49SYabin Cui 20*01826a49SYabin Cui 21*01826a49SYabin CuiGITHUB_API_PR_URL = "https://api.github.com/repos/facebook/zstd/pulls?state=open" 22*01826a49SYabin CuiGITHUB_URL_TEMPLATE = "https://github.com/{}/zstd" 23*01826a49SYabin CuiRELEASE_BUILD = {"user": "facebook", "branch": "dev", "hash": None} 24*01826a49SYabin Cui 25*01826a49SYabin Cui# check to see if there are any new PRs every minute 26*01826a49SYabin CuiDEFAULT_MAX_API_CALL_FREQUENCY_SEC = 60 27*01826a49SYabin CuiPREVIOUS_PRS_FILENAME = "prev_prs.pk" 28*01826a49SYabin Cui 29*01826a49SYabin Cui# Not sure what the threshold for triggering alarms should be 30*01826a49SYabin Cui# 1% regression sounds like a little too sensitive but the desktop 31*01826a49SYabin Cui# that I'm running it on is pretty stable so I think this is fine 32*01826a49SYabin CuiCSPEED_REGRESSION_TOLERANCE = 0.01 33*01826a49SYabin CuiDSPEED_REGRESSION_TOLERANCE = 0.01 34*01826a49SYabin Cui 35*01826a49SYabin Cui 36*01826a49SYabin Cuidef get_new_open_pr_builds(prev_state=True): 37*01826a49SYabin Cui prev_prs = None 38*01826a49SYabin Cui if os.path.exists(PREVIOUS_PRS_FILENAME): 39*01826a49SYabin Cui with open(PREVIOUS_PRS_FILENAME, "rb") as f: 40*01826a49SYabin Cui prev_prs = pk.load(f) 41*01826a49SYabin Cui data = json.loads(urllib.request.urlopen(GITHUB_API_PR_URL).read().decode("utf-8")) 42*01826a49SYabin Cui prs = { 43*01826a49SYabin Cui d["url"]: { 44*01826a49SYabin Cui "user": d["user"]["login"], 45*01826a49SYabin Cui "branch": d["head"]["ref"], 46*01826a49SYabin Cui "hash": d["head"]["sha"].strip(), 47*01826a49SYabin Cui } 48*01826a49SYabin Cui for d in data 49*01826a49SYabin Cui } 50*01826a49SYabin Cui with open(PREVIOUS_PRS_FILENAME, "wb") as f: 51*01826a49SYabin Cui pk.dump(prs, f) 52*01826a49SYabin Cui if not prev_state or prev_prs == None: 53*01826a49SYabin Cui return list(prs.values()) 54*01826a49SYabin Cui return [pr for url, pr in prs.items() if url not in prev_prs or prev_prs[url] != pr] 55*01826a49SYabin Cui 56*01826a49SYabin Cui 57*01826a49SYabin Cuidef get_latest_hashes(): 58*01826a49SYabin Cui tmp = subprocess.run(["git", "log", "-1"], stdout=subprocess.PIPE).stdout.decode( 59*01826a49SYabin Cui "utf-8" 60*01826a49SYabin Cui ) 61*01826a49SYabin Cui sha1 = tmp.split("\n")[0].split(" ")[1] 62*01826a49SYabin Cui tmp = subprocess.run( 63*01826a49SYabin Cui ["git", "show", "{}^1".format(sha1)], stdout=subprocess.PIPE 64*01826a49SYabin Cui ).stdout.decode("utf-8") 65*01826a49SYabin Cui sha2 = tmp.split("\n")[0].split(" ")[1] 66*01826a49SYabin Cui tmp = subprocess.run( 67*01826a49SYabin Cui ["git", "show", "{}^2".format(sha1)], stdout=subprocess.PIPE 68*01826a49SYabin Cui ).stdout.decode("utf-8") 69*01826a49SYabin Cui sha3 = "" if len(tmp) == 0 else tmp.split("\n")[0].split(" ")[1] 70*01826a49SYabin Cui return [sha1.strip(), sha2.strip(), sha3.strip()] 71*01826a49SYabin Cui 72*01826a49SYabin Cui 73*01826a49SYabin Cuidef get_builds_for_latest_hash(): 74*01826a49SYabin Cui hashes = get_latest_hashes() 75*01826a49SYabin Cui for b in get_new_open_pr_builds(False): 76*01826a49SYabin Cui if b["hash"] in hashes: 77*01826a49SYabin Cui return [b] 78*01826a49SYabin Cui return [] 79*01826a49SYabin Cui 80*01826a49SYabin Cui 81*01826a49SYabin Cuidef clone_and_build(build): 82*01826a49SYabin Cui if build["user"] != None: 83*01826a49SYabin Cui github_url = GITHUB_URL_TEMPLATE.format(build["user"]) 84*01826a49SYabin Cui os.system( 85*01826a49SYabin Cui """ 86*01826a49SYabin Cui rm -rf zstd-{user}-{sha} && 87*01826a49SYabin Cui git clone {github_url} zstd-{user}-{sha} && 88*01826a49SYabin Cui cd zstd-{user}-{sha} && 89*01826a49SYabin Cui {checkout_command} 90*01826a49SYabin Cui make -j && 91*01826a49SYabin Cui cd ../ 92*01826a49SYabin Cui """.format( 93*01826a49SYabin Cui user=build["user"], 94*01826a49SYabin Cui github_url=github_url, 95*01826a49SYabin Cui sha=build["hash"], 96*01826a49SYabin Cui checkout_command="git checkout {} &&".format(build["hash"]) 97*01826a49SYabin Cui if build["hash"] != None 98*01826a49SYabin Cui else "", 99*01826a49SYabin Cui ) 100*01826a49SYabin Cui ) 101*01826a49SYabin Cui return "zstd-{user}-{sha}/zstd".format(user=build["user"], sha=build["hash"]) 102*01826a49SYabin Cui else: 103*01826a49SYabin Cui os.system("cd ../ && make -j && cd tests") 104*01826a49SYabin Cui return "../zstd" 105*01826a49SYabin Cui 106*01826a49SYabin Cui 107*01826a49SYabin Cuidef parse_benchmark_output(output): 108*01826a49SYabin Cui idx = [i for i, d in enumerate(output) if d == "MB/s"] 109*01826a49SYabin Cui return [float(output[idx[0] - 1]), float(output[idx[1] - 1])] 110*01826a49SYabin Cui 111*01826a49SYabin Cui 112*01826a49SYabin Cuidef benchmark_single(executable, level, filename): 113*01826a49SYabin Cui return parse_benchmark_output(( 114*01826a49SYabin Cui subprocess.run( 115*01826a49SYabin Cui [executable, "-qb{}".format(level), filename], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, 116*01826a49SYabin Cui ) 117*01826a49SYabin Cui .stdout.decode("utf-8") 118*01826a49SYabin Cui .split(" ") 119*01826a49SYabin Cui )) 120*01826a49SYabin Cui 121*01826a49SYabin Cui 122*01826a49SYabin Cuidef benchmark_n(executable, level, filename, n): 123*01826a49SYabin Cui speeds_arr = [benchmark_single(executable, level, filename) for _ in range(n)] 124*01826a49SYabin Cui cspeed, dspeed = max(b[0] for b in speeds_arr), max(b[1] for b in speeds_arr) 125*01826a49SYabin Cui print( 126*01826a49SYabin Cui "Bench (executable={} level={} filename={}, iterations={}):\n\t[cspeed: {} MB/s, dspeed: {} MB/s]".format( 127*01826a49SYabin Cui os.path.basename(executable), 128*01826a49SYabin Cui level, 129*01826a49SYabin Cui os.path.basename(filename), 130*01826a49SYabin Cui n, 131*01826a49SYabin Cui cspeed, 132*01826a49SYabin Cui dspeed, 133*01826a49SYabin Cui ) 134*01826a49SYabin Cui ) 135*01826a49SYabin Cui return (cspeed, dspeed) 136*01826a49SYabin Cui 137*01826a49SYabin Cui 138*01826a49SYabin Cuidef benchmark(build, filenames, levels, iterations): 139*01826a49SYabin Cui executable = clone_and_build(build) 140*01826a49SYabin Cui return [ 141*01826a49SYabin Cui [benchmark_n(executable, l, f, iterations) for f in filenames] for l in levels 142*01826a49SYabin Cui ] 143*01826a49SYabin Cui 144*01826a49SYabin Cui 145*01826a49SYabin Cuidef benchmark_dictionary_single(executable, filenames_directory, dictionary_filename, level, iterations): 146*01826a49SYabin Cui cspeeds, dspeeds = [], [] 147*01826a49SYabin Cui for _ in range(iterations): 148*01826a49SYabin Cui output = subprocess.run([executable, "-qb{}".format(level), "-D", dictionary_filename, "-r", filenames_directory], stdout=subprocess.PIPE).stdout.decode("utf-8").split(" ") 149*01826a49SYabin Cui cspeed, dspeed = parse_benchmark_output(output) 150*01826a49SYabin Cui cspeeds.append(cspeed) 151*01826a49SYabin Cui dspeeds.append(dspeed) 152*01826a49SYabin Cui max_cspeed, max_dspeed = max(cspeeds), max(dspeeds) 153*01826a49SYabin Cui print( 154*01826a49SYabin Cui "Bench (executable={} level={} filenames_directory={}, dictionary_filename={}, iterations={}):\n\t[cspeed: {} MB/s, dspeed: {} MB/s]".format( 155*01826a49SYabin Cui os.path.basename(executable), 156*01826a49SYabin Cui level, 157*01826a49SYabin Cui os.path.basename(filenames_directory), 158*01826a49SYabin Cui os.path.basename(dictionary_filename), 159*01826a49SYabin Cui iterations, 160*01826a49SYabin Cui max_cspeed, 161*01826a49SYabin Cui max_dspeed, 162*01826a49SYabin Cui ) 163*01826a49SYabin Cui ) 164*01826a49SYabin Cui return (max_cspeed, max_dspeed) 165*01826a49SYabin Cui 166*01826a49SYabin Cui 167*01826a49SYabin Cuidef benchmark_dictionary(build, filenames_directory, dictionary_filename, levels, iterations): 168*01826a49SYabin Cui executable = clone_and_build(build) 169*01826a49SYabin Cui return [benchmark_dictionary_single(executable, filenames_directory, dictionary_filename, l, iterations) for l in levels] 170*01826a49SYabin Cui 171*01826a49SYabin Cui 172*01826a49SYabin Cuidef parse_regressions_and_labels(old_cspeed, new_cspeed, old_dspeed, new_dspeed, baseline_build, test_build): 173*01826a49SYabin Cui cspeed_reg = (old_cspeed - new_cspeed) / old_cspeed 174*01826a49SYabin Cui dspeed_reg = (old_dspeed - new_dspeed) / old_dspeed 175*01826a49SYabin Cui baseline_label = "{}:{} ({})".format( 176*01826a49SYabin Cui baseline_build["user"], baseline_build["branch"], baseline_build["hash"] 177*01826a49SYabin Cui ) 178*01826a49SYabin Cui test_label = "{}:{} ({})".format( 179*01826a49SYabin Cui test_build["user"], test_build["branch"], test_build["hash"] 180*01826a49SYabin Cui ) 181*01826a49SYabin Cui return cspeed_reg, dspeed_reg, baseline_label, test_label 182*01826a49SYabin Cui 183*01826a49SYabin Cui 184*01826a49SYabin Cuidef get_regressions(baseline_build, test_build, iterations, filenames, levels): 185*01826a49SYabin Cui old = benchmark(baseline_build, filenames, levels, iterations) 186*01826a49SYabin Cui new = benchmark(test_build, filenames, levels, iterations) 187*01826a49SYabin Cui regressions = [] 188*01826a49SYabin Cui for j, level in enumerate(levels): 189*01826a49SYabin Cui for k, filename in enumerate(filenames): 190*01826a49SYabin Cui old_cspeed, old_dspeed = old[j][k] 191*01826a49SYabin Cui new_cspeed, new_dspeed = new[j][k] 192*01826a49SYabin Cui cspeed_reg, dspeed_reg, baseline_label, test_label = parse_regressions_and_labels( 193*01826a49SYabin Cui old_cspeed, new_cspeed, old_dspeed, new_dspeed, baseline_build, test_build 194*01826a49SYabin Cui ) 195*01826a49SYabin Cui if cspeed_reg > CSPEED_REGRESSION_TOLERANCE: 196*01826a49SYabin Cui regressions.append( 197*01826a49SYabin Cui "[COMPRESSION REGRESSION] (level={} filename={})\n\t{} -> {}\n\t{} -> {} ({:0.2f}%)".format( 198*01826a49SYabin Cui level, 199*01826a49SYabin Cui filename, 200*01826a49SYabin Cui baseline_label, 201*01826a49SYabin Cui test_label, 202*01826a49SYabin Cui old_cspeed, 203*01826a49SYabin Cui new_cspeed, 204*01826a49SYabin Cui cspeed_reg * 100.0, 205*01826a49SYabin Cui ) 206*01826a49SYabin Cui ) 207*01826a49SYabin Cui if dspeed_reg > DSPEED_REGRESSION_TOLERANCE: 208*01826a49SYabin Cui regressions.append( 209*01826a49SYabin Cui "[DECOMPRESSION REGRESSION] (level={} filename={})\n\t{} -> {}\n\t{} -> {} ({:0.2f}%)".format( 210*01826a49SYabin Cui level, 211*01826a49SYabin Cui filename, 212*01826a49SYabin Cui baseline_label, 213*01826a49SYabin Cui test_label, 214*01826a49SYabin Cui old_dspeed, 215*01826a49SYabin Cui new_dspeed, 216*01826a49SYabin Cui dspeed_reg * 100.0, 217*01826a49SYabin Cui ) 218*01826a49SYabin Cui ) 219*01826a49SYabin Cui return regressions 220*01826a49SYabin Cui 221*01826a49SYabin Cuidef get_regressions_dictionary(baseline_build, test_build, filenames_directory, dictionary_filename, levels, iterations): 222*01826a49SYabin Cui old = benchmark_dictionary(baseline_build, filenames_directory, dictionary_filename, levels, iterations) 223*01826a49SYabin Cui new = benchmark_dictionary(test_build, filenames_directory, dictionary_filename, levels, iterations) 224*01826a49SYabin Cui regressions = [] 225*01826a49SYabin Cui for j, level in enumerate(levels): 226*01826a49SYabin Cui old_cspeed, old_dspeed = old[j] 227*01826a49SYabin Cui new_cspeed, new_dspeed = new[j] 228*01826a49SYabin Cui cspeed_reg, dspeed_reg, baesline_label, test_label = parse_regressions_and_labels( 229*01826a49SYabin Cui old_cspeed, new_cspeed, old_dspeed, new_dspeed, baseline_build, test_build 230*01826a49SYabin Cui ) 231*01826a49SYabin Cui if cspeed_reg > CSPEED_REGRESSION_TOLERANCE: 232*01826a49SYabin Cui regressions.append( 233*01826a49SYabin Cui "[COMPRESSION REGRESSION] (level={} filenames_directory={} dictionary_filename={})\n\t{} -> {}\n\t{} -> {} ({:0.2f}%)".format( 234*01826a49SYabin Cui level, 235*01826a49SYabin Cui filenames_directory, 236*01826a49SYabin Cui dictionary_filename, 237*01826a49SYabin Cui baseline_label, 238*01826a49SYabin Cui test_label, 239*01826a49SYabin Cui old_cspeed, 240*01826a49SYabin Cui new_cspeed, 241*01826a49SYabin Cui cspeed_reg * 100.0, 242*01826a49SYabin Cui ) 243*01826a49SYabin Cui ) 244*01826a49SYabin Cui if dspeed_reg > DSPEED_REGRESSION_TOLERANCE: 245*01826a49SYabin Cui regressions.append( 246*01826a49SYabin Cui "[DECOMPRESSION REGRESSION] (level={} filenames_directory={} dictionary_filename={})\n\t{} -> {}\n\t{} -> {} ({:0.2f}%)".format( 247*01826a49SYabin Cui level, 248*01826a49SYabin Cui filenames_directory, 249*01826a49SYabin Cui dictionary_filename, 250*01826a49SYabin Cui baseline_label, 251*01826a49SYabin Cui test_label, 252*01826a49SYabin Cui old_dspeed, 253*01826a49SYabin Cui new_dspeed, 254*01826a49SYabin Cui dspeed_reg * 100.0, 255*01826a49SYabin Cui ) 256*01826a49SYabin Cui ) 257*01826a49SYabin Cui return regressions 258*01826a49SYabin Cui 259*01826a49SYabin Cui 260*01826a49SYabin Cuidef main(filenames, levels, iterations, builds=None, emails=None, continuous=False, frequency=DEFAULT_MAX_API_CALL_FREQUENCY_SEC, dictionary_filename=None): 261*01826a49SYabin Cui if builds == None: 262*01826a49SYabin Cui builds = get_new_open_pr_builds() 263*01826a49SYabin Cui while True: 264*01826a49SYabin Cui for test_build in builds: 265*01826a49SYabin Cui if dictionary_filename == None: 266*01826a49SYabin Cui regressions = get_regressions( 267*01826a49SYabin Cui RELEASE_BUILD, test_build, iterations, filenames, levels 268*01826a49SYabin Cui ) 269*01826a49SYabin Cui else: 270*01826a49SYabin Cui regressions = get_regressions_dictionary( 271*01826a49SYabin Cui RELEASE_BUILD, test_build, filenames, dictionary_filename, levels, iterations 272*01826a49SYabin Cui ) 273*01826a49SYabin Cui body = "\n".join(regressions) 274*01826a49SYabin Cui if len(regressions) > 0: 275*01826a49SYabin Cui if emails != None: 276*01826a49SYabin Cui os.system( 277*01826a49SYabin Cui """ 278*01826a49SYabin Cui echo "{}" | mutt -s "[zstd regression] caused by new pr" {} 279*01826a49SYabin Cui """.format( 280*01826a49SYabin Cui body, emails 281*01826a49SYabin Cui ) 282*01826a49SYabin Cui ) 283*01826a49SYabin Cui print("Emails sent to {}".format(emails)) 284*01826a49SYabin Cui print(body) 285*01826a49SYabin Cui if not continuous: 286*01826a49SYabin Cui break 287*01826a49SYabin Cui time.sleep(frequency) 288*01826a49SYabin Cui 289*01826a49SYabin Cui 290*01826a49SYabin Cuiif __name__ == "__main__": 291*01826a49SYabin Cui parser = argparse.ArgumentParser() 292*01826a49SYabin Cui 293*01826a49SYabin Cui parser.add_argument("--directory", help="directory with files to benchmark", default="golden-compression") 294*01826a49SYabin Cui parser.add_argument("--levels", help="levels to test e.g. ('1,2,3')", default="1") 295*01826a49SYabin Cui parser.add_argument("--iterations", help="number of benchmark iterations to run", default="1") 296*01826a49SYabin Cui parser.add_argument("--emails", help="email addresses of people who will be alerted upon regression. Only for continuous mode", default=None) 297*01826a49SYabin Cui parser.add_argument("--frequency", help="specifies the number of seconds to wait before each successive check for new PRs in continuous mode", default=DEFAULT_MAX_API_CALL_FREQUENCY_SEC) 298*01826a49SYabin Cui parser.add_argument("--mode", help="'fastmode', 'onetime', 'current', or 'continuous' (see README.md for details)", default="current") 299*01826a49SYabin Cui parser.add_argument("--dict", help="filename of dictionary to use (when set, this dictionary will be used to compress the files provided inside --directory)", default=None) 300*01826a49SYabin Cui 301*01826a49SYabin Cui args = parser.parse_args() 302*01826a49SYabin Cui filenames = args.directory 303*01826a49SYabin Cui levels = [int(l) for l in args.levels.split(",")] 304*01826a49SYabin Cui mode = args.mode 305*01826a49SYabin Cui iterations = int(args.iterations) 306*01826a49SYabin Cui emails = args.emails 307*01826a49SYabin Cui frequency = int(args.frequency) 308*01826a49SYabin Cui dictionary_filename = args.dict 309*01826a49SYabin Cui 310*01826a49SYabin Cui if dictionary_filename == None: 311*01826a49SYabin Cui filenames = glob.glob("{}/**".format(filenames)) 312*01826a49SYabin Cui 313*01826a49SYabin Cui if (len(filenames) == 0): 314*01826a49SYabin Cui print("0 files found") 315*01826a49SYabin Cui quit() 316*01826a49SYabin Cui 317*01826a49SYabin Cui if mode == "onetime": 318*01826a49SYabin Cui main(filenames, levels, iterations, frequency=frequenc, dictionary_filename=dictionary_filename) 319*01826a49SYabin Cui elif mode == "current": 320*01826a49SYabin Cui builds = [{"user": None, "branch": "None", "hash": None}] 321*01826a49SYabin Cui main(filenames, levels, iterations, builds, frequency=frequency, dictionary_filename=dictionary_filename) 322*01826a49SYabin Cui elif mode == "fastmode": 323*01826a49SYabin Cui builds = [{"user": "facebook", "branch": "release", "hash": None}] 324*01826a49SYabin Cui main(filenames, levels, iterations, builds, frequency=frequency, dictionary_filename=dictionary_filename) 325*01826a49SYabin Cui else: 326*01826a49SYabin Cui main(filenames, levels, iterations, None, emails, True, frequency=frequency, dictionary_filename=dictionary_filename) 327