1import compileall 2import contextlib 3import filecmp 4import importlib.util 5import io 6import os 7import pathlib 8import py_compile 9import shutil 10import struct 11import sys 12import tempfile 13import test.test_importlib.util 14import time 15import unittest 16 17from unittest import mock, skipUnless 18try: 19 # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists 20 # and it can function. 21 from concurrent.futures import ProcessPoolExecutor 22 from concurrent.futures.process import _check_system_limits 23 _check_system_limits() 24 _have_multiprocessing = True 25except (NotImplementedError, ModuleNotFoundError): 26 _have_multiprocessing = False 27 28from test import support 29from test.support import os_helper 30from test.support import script_helper 31from test.test_py_compile import without_source_date_epoch 32from test.test_py_compile import SourceDateEpochTestMeta 33 34 35def get_pyc(script, opt): 36 if not opt: 37 # Replace None and 0 with '' 38 opt = '' 39 return importlib.util.cache_from_source(script, optimization=opt) 40 41 42def get_pycs(script): 43 return [get_pyc(script, opt) for opt in (0, 1, 2)] 44 45 46def is_hardlink(filename1, filename2): 47 """Returns True if two files have the same inode (hardlink)""" 48 inode1 = os.stat(filename1).st_ino 49 inode2 = os.stat(filename2).st_ino 50 return inode1 == inode2 51 52 53class CompileallTestsBase: 54 55 def setUp(self): 56 self.directory = tempfile.mkdtemp() 57 self.source_path = os.path.join(self.directory, '_test.py') 58 self.bc_path = importlib.util.cache_from_source(self.source_path) 59 with open(self.source_path, 'w', encoding="utf-8") as file: 60 file.write('x = 123\n') 61 self.source_path2 = os.path.join(self.directory, '_test2.py') 62 self.bc_path2 = importlib.util.cache_from_source(self.source_path2) 63 shutil.copyfile(self.source_path, self.source_path2) 64 self.subdirectory = os.path.join(self.directory, '_subdir') 65 os.mkdir(self.subdirectory) 66 self.source_path3 = os.path.join(self.subdirectory, '_test3.py') 67 shutil.copyfile(self.source_path, self.source_path3) 68 69 def tearDown(self): 70 shutil.rmtree(self.directory) 71 72 def add_bad_source_file(self): 73 self.bad_source_path = os.path.join(self.directory, '_test_bad.py') 74 with open(self.bad_source_path, 'w', encoding="utf-8") as file: 75 file.write('x (\n') 76 77 def timestamp_metadata(self): 78 with open(self.bc_path, 'rb') as file: 79 data = file.read(12) 80 mtime = int(os.stat(self.source_path).st_mtime) 81 compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0, 82 mtime & 0xFFFF_FFFF) 83 return data, compare 84 85 def test_year_2038_mtime_compilation(self): 86 # Test to make sure we can handle mtimes larger than what a 32-bit 87 # signed number can hold as part of bpo-34990 88 try: 89 os.utime(self.source_path, (2**32 - 1, 2**32 - 1)) 90 except (OverflowError, OSError): 91 self.skipTest("filesystem doesn't support timestamps near 2**32") 92 with contextlib.redirect_stdout(io.StringIO()): 93 self.assertTrue(compileall.compile_file(self.source_path)) 94 95 def test_larger_than_32_bit_times(self): 96 # This is similar to the test above but we skip it if the OS doesn't 97 # support modification times larger than 32-bits. 98 try: 99 os.utime(self.source_path, (2**35, 2**35)) 100 except (OverflowError, OSError): 101 self.skipTest("filesystem doesn't support large timestamps") 102 with contextlib.redirect_stdout(io.StringIO()): 103 self.assertTrue(compileall.compile_file(self.source_path)) 104 105 def recreation_check(self, metadata): 106 """Check that compileall recreates bytecode when the new metadata is 107 used.""" 108 if os.environ.get('SOURCE_DATE_EPOCH'): 109 raise unittest.SkipTest('SOURCE_DATE_EPOCH is set') 110 py_compile.compile(self.source_path) 111 self.assertEqual(*self.timestamp_metadata()) 112 with open(self.bc_path, 'rb') as file: 113 bc = file.read()[len(metadata):] 114 with open(self.bc_path, 'wb') as file: 115 file.write(metadata) 116 file.write(bc) 117 self.assertNotEqual(*self.timestamp_metadata()) 118 compileall.compile_dir(self.directory, force=False, quiet=True) 119 self.assertTrue(*self.timestamp_metadata()) 120 121 def test_mtime(self): 122 # Test a change in mtime leads to a new .pyc. 123 self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 124 0, 1)) 125 126 def test_magic_number(self): 127 # Test a change in mtime leads to a new .pyc. 128 self.recreation_check(b'\0\0\0\0') 129 130 def test_compile_files(self): 131 # Test compiling a single file, and complete directory 132 for fn in (self.bc_path, self.bc_path2): 133 try: 134 os.unlink(fn) 135 except: 136 pass 137 self.assertTrue(compileall.compile_file(self.source_path, 138 force=False, quiet=True)) 139 self.assertTrue(os.path.isfile(self.bc_path) and 140 not os.path.isfile(self.bc_path2)) 141 os.unlink(self.bc_path) 142 self.assertTrue(compileall.compile_dir(self.directory, force=False, 143 quiet=True)) 144 self.assertTrue(os.path.isfile(self.bc_path) and 145 os.path.isfile(self.bc_path2)) 146 os.unlink(self.bc_path) 147 os.unlink(self.bc_path2) 148 # Test against bad files 149 self.add_bad_source_file() 150 self.assertFalse(compileall.compile_file(self.bad_source_path, 151 force=False, quiet=2)) 152 self.assertFalse(compileall.compile_dir(self.directory, 153 force=False, quiet=2)) 154 155 def test_compile_file_pathlike(self): 156 self.assertFalse(os.path.isfile(self.bc_path)) 157 # we should also test the output 158 with support.captured_stdout() as stdout: 159 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path))) 160 self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)') 161 self.assertTrue(os.path.isfile(self.bc_path)) 162 163 def test_compile_file_pathlike_ddir(self): 164 self.assertFalse(os.path.isfile(self.bc_path)) 165 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), 166 ddir=pathlib.Path('ddir_path'), 167 quiet=2)) 168 self.assertTrue(os.path.isfile(self.bc_path)) 169 170 def test_compile_file_pathlike_stripdir(self): 171 self.assertFalse(os.path.isfile(self.bc_path)) 172 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), 173 stripdir=pathlib.Path('stripdir_path'), 174 quiet=2)) 175 self.assertTrue(os.path.isfile(self.bc_path)) 176 177 def test_compile_file_pathlike_prependdir(self): 178 self.assertFalse(os.path.isfile(self.bc_path)) 179 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), 180 prependdir=pathlib.Path('prependdir_path'), 181 quiet=2)) 182 self.assertTrue(os.path.isfile(self.bc_path)) 183 184 def test_compile_path(self): 185 with test.test_importlib.util.import_state(path=[self.directory]): 186 self.assertTrue(compileall.compile_path(quiet=2)) 187 188 with test.test_importlib.util.import_state(path=[self.directory]): 189 self.add_bad_source_file() 190 self.assertFalse(compileall.compile_path(skip_curdir=False, 191 force=True, quiet=2)) 192 193 def test_no_pycache_in_non_package(self): 194 # Bug 8563 reported that __pycache__ directories got created by 195 # compile_file() for non-.py files. 196 data_dir = os.path.join(self.directory, 'data') 197 data_file = os.path.join(data_dir, 'file') 198 os.mkdir(data_dir) 199 # touch data/file 200 with open(data_file, 'wb'): 201 pass 202 compileall.compile_file(data_file) 203 self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__'))) 204 205 206 def test_compile_file_encoding_fallback(self): 207 # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None 208 self.add_bad_source_file() 209 with contextlib.redirect_stdout(io.StringIO()): 210 self.assertFalse(compileall.compile_file(self.bad_source_path)) 211 212 213 def test_optimize(self): 214 # make sure compiling with different optimization settings than the 215 # interpreter's creates the correct file names 216 optimize, opt = (1, 1) if __debug__ else (0, '') 217 compileall.compile_dir(self.directory, quiet=True, optimize=optimize) 218 cached = importlib.util.cache_from_source(self.source_path, 219 optimization=opt) 220 self.assertTrue(os.path.isfile(cached)) 221 cached2 = importlib.util.cache_from_source(self.source_path2, 222 optimization=opt) 223 self.assertTrue(os.path.isfile(cached2)) 224 cached3 = importlib.util.cache_from_source(self.source_path3, 225 optimization=opt) 226 self.assertTrue(os.path.isfile(cached3)) 227 228 def test_compile_dir_pathlike(self): 229 self.assertFalse(os.path.isfile(self.bc_path)) 230 with support.captured_stdout() as stdout: 231 compileall.compile_dir(pathlib.Path(self.directory)) 232 line = stdout.getvalue().splitlines()[0] 233 self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') 234 self.assertTrue(os.path.isfile(self.bc_path)) 235 236 def test_compile_dir_pathlike_stripdir(self): 237 self.assertFalse(os.path.isfile(self.bc_path)) 238 self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), 239 stripdir=pathlib.Path('stripdir_path'), 240 quiet=2)) 241 self.assertTrue(os.path.isfile(self.bc_path)) 242 243 def test_compile_dir_pathlike_prependdir(self): 244 self.assertFalse(os.path.isfile(self.bc_path)) 245 self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), 246 prependdir=pathlib.Path('prependdir_path'), 247 quiet=2)) 248 self.assertTrue(os.path.isfile(self.bc_path)) 249 250 @skipUnless(_have_multiprocessing, "requires multiprocessing") 251 @mock.patch('concurrent.futures.ProcessPoolExecutor') 252 def test_compile_pool_called(self, pool_mock): 253 compileall.compile_dir(self.directory, quiet=True, workers=5) 254 self.assertTrue(pool_mock.called) 255 256 def test_compile_workers_non_positive(self): 257 with self.assertRaisesRegex(ValueError, 258 "workers must be greater or equal to 0"): 259 compileall.compile_dir(self.directory, workers=-1) 260 261 @skipUnless(_have_multiprocessing, "requires multiprocessing") 262 @mock.patch('concurrent.futures.ProcessPoolExecutor') 263 def test_compile_workers_cpu_count(self, pool_mock): 264 compileall.compile_dir(self.directory, quiet=True, workers=0) 265 self.assertEqual(pool_mock.call_args[1]['max_workers'], None) 266 267 @skipUnless(_have_multiprocessing, "requires multiprocessing") 268 @mock.patch('concurrent.futures.ProcessPoolExecutor') 269 @mock.patch('compileall.compile_file') 270 def test_compile_one_worker(self, compile_file_mock, pool_mock): 271 compileall.compile_dir(self.directory, quiet=True) 272 self.assertFalse(pool_mock.called) 273 self.assertTrue(compile_file_mock.called) 274 275 @skipUnless(_have_multiprocessing, "requires multiprocessing") 276 @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None) 277 @mock.patch('compileall.compile_file') 278 def test_compile_missing_multiprocessing(self, compile_file_mock): 279 compileall.compile_dir(self.directory, quiet=True, workers=5) 280 self.assertTrue(compile_file_mock.called) 281 282 def test_compile_dir_maxlevels(self): 283 # Test the actual impact of maxlevels parameter 284 depth = 3 285 path = self.directory 286 for i in range(1, depth + 1): 287 path = os.path.join(path, f"dir_{i}") 288 source = os.path.join(path, 'script.py') 289 os.mkdir(path) 290 shutil.copyfile(self.source_path, source) 291 pyc_filename = importlib.util.cache_from_source(source) 292 293 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1) 294 self.assertFalse(os.path.isfile(pyc_filename)) 295 296 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth) 297 self.assertTrue(os.path.isfile(pyc_filename)) 298 299 def _test_ddir_only(self, *, ddir, parallel=True): 300 """Recursive compile_dir ddir must contain package paths; bpo39769.""" 301 fullpath = ["test", "foo"] 302 path = self.directory 303 mods = [] 304 for subdir in fullpath: 305 path = os.path.join(path, subdir) 306 os.mkdir(path) 307 script_helper.make_script(path, "__init__", "") 308 mods.append(script_helper.make_script(path, "mod", 309 "def fn(): 1/0\nfn()\n")) 310 compileall.compile_dir( 311 self.directory, quiet=True, ddir=ddir, 312 workers=2 if parallel else 1) 313 self.assertTrue(mods) 314 for mod in mods: 315 self.assertTrue(mod.startswith(self.directory), mod) 316 modcode = importlib.util.cache_from_source(mod) 317 modpath = mod[len(self.directory+os.sep):] 318 _, _, err = script_helper.assert_python_failure(modcode) 319 expected_in = os.path.join(ddir, modpath) 320 mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode) 321 self.assertEqual(mod_code_obj.co_filename, expected_in) 322 self.assertIn(f'"{expected_in}"', os.fsdecode(err)) 323 324 def test_ddir_only_one_worker(self): 325 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 326 return self._test_ddir_only(ddir="<a prefix>", parallel=False) 327 328 @skipUnless(_have_multiprocessing, "requires multiprocessing") 329 def test_ddir_multiple_workers(self): 330 """Recursive compile_dir ddir= contains package paths; bpo39769.""" 331 return self._test_ddir_only(ddir="<a prefix>", parallel=True) 332 333 def test_ddir_empty_only_one_worker(self): 334 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 335 return self._test_ddir_only(ddir="", parallel=False) 336 337 @skipUnless(_have_multiprocessing, "requires multiprocessing") 338 def test_ddir_empty_multiple_workers(self): 339 """Recursive compile_dir ddir='' contains package paths; bpo39769.""" 340 return self._test_ddir_only(ddir="", parallel=True) 341 342 def test_strip_only(self): 343 fullpath = ["test", "build", "real", "path"] 344 path = os.path.join(self.directory, *fullpath) 345 os.makedirs(path) 346 script = script_helper.make_script(path, "test", "1 / 0") 347 bc = importlib.util.cache_from_source(script) 348 stripdir = os.path.join(self.directory, *fullpath[:2]) 349 compileall.compile_dir(path, quiet=True, stripdir=stripdir) 350 rc, out, err = script_helper.assert_python_failure(bc) 351 expected_in = os.path.join(*fullpath[2:]) 352 self.assertIn( 353 expected_in, 354 str(err, encoding=sys.getdefaultencoding()) 355 ) 356 self.assertNotIn( 357 stripdir, 358 str(err, encoding=sys.getdefaultencoding()) 359 ) 360 361 def test_prepend_only(self): 362 fullpath = ["test", "build", "real", "path"] 363 path = os.path.join(self.directory, *fullpath) 364 os.makedirs(path) 365 script = script_helper.make_script(path, "test", "1 / 0") 366 bc = importlib.util.cache_from_source(script) 367 prependdir = "/foo" 368 compileall.compile_dir(path, quiet=True, prependdir=prependdir) 369 rc, out, err = script_helper.assert_python_failure(bc) 370 expected_in = os.path.join(prependdir, self.directory, *fullpath) 371 self.assertIn( 372 expected_in, 373 str(err, encoding=sys.getdefaultencoding()) 374 ) 375 376 def test_strip_and_prepend(self): 377 fullpath = ["test", "build", "real", "path"] 378 path = os.path.join(self.directory, *fullpath) 379 os.makedirs(path) 380 script = script_helper.make_script(path, "test", "1 / 0") 381 bc = importlib.util.cache_from_source(script) 382 stripdir = os.path.join(self.directory, *fullpath[:2]) 383 prependdir = "/foo" 384 compileall.compile_dir(path, quiet=True, 385 stripdir=stripdir, prependdir=prependdir) 386 rc, out, err = script_helper.assert_python_failure(bc) 387 expected_in = os.path.join(prependdir, *fullpath[2:]) 388 self.assertIn( 389 expected_in, 390 str(err, encoding=sys.getdefaultencoding()) 391 ) 392 self.assertNotIn( 393 stripdir, 394 str(err, encoding=sys.getdefaultencoding()) 395 ) 396 397 def test_strip_prepend_and_ddir(self): 398 fullpath = ["test", "build", "real", "path", "ddir"] 399 path = os.path.join(self.directory, *fullpath) 400 os.makedirs(path) 401 script_helper.make_script(path, "test", "1 / 0") 402 with self.assertRaises(ValueError): 403 compileall.compile_dir(path, quiet=True, ddir="/bar", 404 stripdir="/foo", prependdir="/bar") 405 406 def test_multiple_optimization_levels(self): 407 script = script_helper.make_script(self.directory, 408 "test_optimization", 409 "a = 0") 410 bc = [] 411 for opt_level in "", 1, 2, 3: 412 bc.append(importlib.util.cache_from_source(script, 413 optimization=opt_level)) 414 test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]] 415 for opt_combination in test_combinations: 416 compileall.compile_file(script, quiet=True, 417 optimize=opt_combination) 418 for opt_level in opt_combination: 419 self.assertTrue(os.path.isfile(bc[opt_level])) 420 try: 421 os.unlink(bc[opt_level]) 422 except Exception: 423 pass 424 425 @os_helper.skip_unless_symlink 426 def test_ignore_symlink_destination(self): 427 # Create folders for allowed files, symlinks and prohibited area 428 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 429 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 430 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 431 os.makedirs(allowed_path) 432 os.makedirs(symlinks_path) 433 os.makedirs(prohibited_path) 434 435 # Create scripts and symlinks and remember their byte-compiled versions 436 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 437 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 438 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 439 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 440 os.symlink(allowed_script, allowed_symlink) 441 os.symlink(prohibited_script, prohibited_symlink) 442 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 443 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 444 445 compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path) 446 447 self.assertTrue(os.path.isfile(allowed_bc)) 448 self.assertFalse(os.path.isfile(prohibited_bc)) 449 450 451class CompileallTestsWithSourceEpoch(CompileallTestsBase, 452 unittest.TestCase, 453 metaclass=SourceDateEpochTestMeta, 454 source_date_epoch=True): 455 pass 456 457 458class CompileallTestsWithoutSourceEpoch(CompileallTestsBase, 459 unittest.TestCase, 460 metaclass=SourceDateEpochTestMeta, 461 source_date_epoch=False): 462 pass 463 464 465# WASI does not have a temp directory and uses cwd instead. The cwd contains 466# non-ASCII chars, so _walk_dir() fails to encode self.directory. 467@unittest.skipIf(support.is_wasi, "tempdir is not encodable on WASI") 468class EncodingTest(unittest.TestCase): 469 """Issue 6716: compileall should escape source code when printing errors 470 to stdout.""" 471 472 def setUp(self): 473 self.directory = tempfile.mkdtemp() 474 self.source_path = os.path.join(self.directory, '_test.py') 475 with open(self.source_path, 'w', encoding='utf-8') as file: 476 file.write('# -*- coding: utf-8 -*-\n') 477 file.write('print u"\u20ac"\n') 478 479 def tearDown(self): 480 shutil.rmtree(self.directory) 481 482 def test_error(self): 483 try: 484 orig_stdout = sys.stdout 485 sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii') 486 compileall.compile_dir(self.directory) 487 finally: 488 sys.stdout = orig_stdout 489 490 491class CommandLineTestsBase: 492 """Test compileall's CLI.""" 493 494 def setUp(self): 495 self.directory = tempfile.mkdtemp() 496 self.addCleanup(os_helper.rmtree, self.directory) 497 self.pkgdir = os.path.join(self.directory, 'foo') 498 os.mkdir(self.pkgdir) 499 self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__') 500 # Create the __init__.py and a package module. 501 self.initfn = script_helper.make_script(self.pkgdir, '__init__', '') 502 self.barfn = script_helper.make_script(self.pkgdir, 'bar', '') 503 504 @contextlib.contextmanager 505 def temporary_pycache_prefix(self): 506 """Adjust and restore sys.pycache_prefix.""" 507 old_prefix = sys.pycache_prefix 508 new_prefix = os.path.join(self.directory, '__testcache__') 509 try: 510 sys.pycache_prefix = new_prefix 511 yield { 512 'PYTHONPATH': self.directory, 513 'PYTHONPYCACHEPREFIX': new_prefix, 514 } 515 finally: 516 sys.pycache_prefix = old_prefix 517 518 def _get_run_args(self, args): 519 return [*support.optim_args_from_interpreter_flags(), 520 '-S', '-m', 'compileall', 521 *args] 522 523 def assertRunOK(self, *args, **env_vars): 524 rc, out, err = script_helper.assert_python_ok( 525 *self._get_run_args(args), **env_vars, 526 PYTHONIOENCODING='utf-8') 527 self.assertEqual(b'', err) 528 return out 529 530 def assertRunNotOK(self, *args, **env_vars): 531 rc, out, err = script_helper.assert_python_failure( 532 *self._get_run_args(args), **env_vars, 533 PYTHONIOENCODING='utf-8') 534 return rc, out, err 535 536 def assertCompiled(self, fn): 537 path = importlib.util.cache_from_source(fn) 538 self.assertTrue(os.path.exists(path)) 539 540 def assertNotCompiled(self, fn): 541 path = importlib.util.cache_from_source(fn) 542 self.assertFalse(os.path.exists(path)) 543 544 def test_no_args_compiles_path(self): 545 # Note that -l is implied for the no args case. 546 bazfn = script_helper.make_script(self.directory, 'baz', '') 547 with self.temporary_pycache_prefix() as env: 548 self.assertRunOK(**env) 549 self.assertCompiled(bazfn) 550 self.assertNotCompiled(self.initfn) 551 self.assertNotCompiled(self.barfn) 552 553 @without_source_date_epoch # timestamp invalidation test 554 def test_no_args_respects_force_flag(self): 555 bazfn = script_helper.make_script(self.directory, 'baz', '') 556 with self.temporary_pycache_prefix() as env: 557 self.assertRunOK(**env) 558 pycpath = importlib.util.cache_from_source(bazfn) 559 # Set atime/mtime backward to avoid file timestamp resolution issues 560 os.utime(pycpath, (time.time()-60,)*2) 561 mtime = os.stat(pycpath).st_mtime 562 # Without force, no recompilation 563 self.assertRunOK(**env) 564 mtime2 = os.stat(pycpath).st_mtime 565 self.assertEqual(mtime, mtime2) 566 # Now force it. 567 self.assertRunOK('-f', **env) 568 mtime2 = os.stat(pycpath).st_mtime 569 self.assertNotEqual(mtime, mtime2) 570 571 def test_no_args_respects_quiet_flag(self): 572 script_helper.make_script(self.directory, 'baz', '') 573 with self.temporary_pycache_prefix() as env: 574 noisy = self.assertRunOK(**env) 575 self.assertIn(b'Listing ', noisy) 576 quiet = self.assertRunOK('-q', **env) 577 self.assertNotIn(b'Listing ', quiet) 578 579 # Ensure that the default behavior of compileall's CLI is to create 580 # PEP 3147/PEP 488 pyc files. 581 for name, ext, switch in [ 582 ('normal', 'pyc', []), 583 ('optimize', 'opt-1.pyc', ['-O']), 584 ('doubleoptimize', 'opt-2.pyc', ['-OO']), 585 ]: 586 def f(self, ext=ext, switch=switch): 587 script_helper.assert_python_ok(*(switch + 588 ['-m', 'compileall', '-q', self.pkgdir])) 589 # Verify the __pycache__ directory contents. 590 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 591 expected = sorted(base.format(sys.implementation.cache_tag, ext) 592 for base in ('__init__.{}.{}', 'bar.{}.{}')) 593 self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected) 594 # Make sure there are no .pyc files in the source directory. 595 self.assertFalse([fn for fn in os.listdir(self.pkgdir) 596 if fn.endswith(ext)]) 597 locals()['test_pep3147_paths_' + name] = f 598 599 def test_legacy_paths(self): 600 # Ensure that with the proper switch, compileall leaves legacy 601 # pyc files, and no __pycache__ directory. 602 self.assertRunOK('-b', '-q', self.pkgdir) 603 # Verify the __pycache__ directory contents. 604 self.assertFalse(os.path.exists(self.pkgdir_cachedir)) 605 expected = sorted(['__init__.py', '__init__.pyc', 'bar.py', 606 'bar.pyc']) 607 self.assertEqual(sorted(os.listdir(self.pkgdir)), expected) 608 609 def test_multiple_runs(self): 610 # Bug 8527 reported that multiple calls produced empty 611 # __pycache__/__pycache__ directories. 612 self.assertRunOK('-q', self.pkgdir) 613 # Verify the __pycache__ directory contents. 614 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 615 cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__') 616 self.assertFalse(os.path.exists(cachecachedir)) 617 # Call compileall again. 618 self.assertRunOK('-q', self.pkgdir) 619 self.assertTrue(os.path.exists(self.pkgdir_cachedir)) 620 self.assertFalse(os.path.exists(cachecachedir)) 621 622 @without_source_date_epoch # timestamp invalidation test 623 def test_force(self): 624 self.assertRunOK('-q', self.pkgdir) 625 pycpath = importlib.util.cache_from_source(self.barfn) 626 # set atime/mtime backward to avoid file timestamp resolution issues 627 os.utime(pycpath, (time.time()-60,)*2) 628 mtime = os.stat(pycpath).st_mtime 629 # without force, no recompilation 630 self.assertRunOK('-q', self.pkgdir) 631 mtime2 = os.stat(pycpath).st_mtime 632 self.assertEqual(mtime, mtime2) 633 # now force it. 634 self.assertRunOK('-q', '-f', self.pkgdir) 635 mtime2 = os.stat(pycpath).st_mtime 636 self.assertNotEqual(mtime, mtime2) 637 638 def test_recursion_control(self): 639 subpackage = os.path.join(self.pkgdir, 'spam') 640 os.mkdir(subpackage) 641 subinitfn = script_helper.make_script(subpackage, '__init__', '') 642 hamfn = script_helper.make_script(subpackage, 'ham', '') 643 self.assertRunOK('-q', '-l', self.pkgdir) 644 self.assertNotCompiled(subinitfn) 645 self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__'))) 646 self.assertRunOK('-q', self.pkgdir) 647 self.assertCompiled(subinitfn) 648 self.assertCompiled(hamfn) 649 650 def test_recursion_limit(self): 651 subpackage = os.path.join(self.pkgdir, 'spam') 652 subpackage2 = os.path.join(subpackage, 'ham') 653 subpackage3 = os.path.join(subpackage2, 'eggs') 654 for pkg in (subpackage, subpackage2, subpackage3): 655 script_helper.make_pkg(pkg) 656 657 subinitfn = os.path.join(subpackage, '__init__.py') 658 hamfn = script_helper.make_script(subpackage, 'ham', '') 659 spamfn = script_helper.make_script(subpackage2, 'spam', '') 660 eggfn = script_helper.make_script(subpackage3, 'egg', '') 661 662 self.assertRunOK('-q', '-r 0', self.pkgdir) 663 self.assertNotCompiled(subinitfn) 664 self.assertFalse( 665 os.path.exists(os.path.join(subpackage, '__pycache__'))) 666 667 self.assertRunOK('-q', '-r 1', self.pkgdir) 668 self.assertCompiled(subinitfn) 669 self.assertCompiled(hamfn) 670 self.assertNotCompiled(spamfn) 671 672 self.assertRunOK('-q', '-r 2', self.pkgdir) 673 self.assertCompiled(subinitfn) 674 self.assertCompiled(hamfn) 675 self.assertCompiled(spamfn) 676 self.assertNotCompiled(eggfn) 677 678 self.assertRunOK('-q', '-r 5', self.pkgdir) 679 self.assertCompiled(subinitfn) 680 self.assertCompiled(hamfn) 681 self.assertCompiled(spamfn) 682 self.assertCompiled(eggfn) 683 684 @os_helper.skip_unless_symlink 685 def test_symlink_loop(self): 686 # Currently, compileall ignores symlinks to directories. 687 # If that limitation is ever lifted, it should protect against 688 # recursion in symlink loops. 689 pkg = os.path.join(self.pkgdir, 'spam') 690 script_helper.make_pkg(pkg) 691 os.symlink('.', os.path.join(pkg, 'evil')) 692 os.symlink('.', os.path.join(pkg, 'evil2')) 693 self.assertRunOK('-q', self.pkgdir) 694 self.assertCompiled(os.path.join( 695 self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py' 696 )) 697 698 def test_quiet(self): 699 noisy = self.assertRunOK(self.pkgdir) 700 quiet = self.assertRunOK('-q', self.pkgdir) 701 self.assertNotEqual(b'', noisy) 702 self.assertEqual(b'', quiet) 703 704 def test_silent(self): 705 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 706 _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir) 707 _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir) 708 self.assertNotEqual(b'', quiet) 709 self.assertEqual(b'', silent) 710 711 def test_regexp(self): 712 self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir) 713 self.assertNotCompiled(self.barfn) 714 self.assertCompiled(self.initfn) 715 716 def test_multiple_dirs(self): 717 pkgdir2 = os.path.join(self.directory, 'foo2') 718 os.mkdir(pkgdir2) 719 init2fn = script_helper.make_script(pkgdir2, '__init__', '') 720 bar2fn = script_helper.make_script(pkgdir2, 'bar2', '') 721 self.assertRunOK('-q', self.pkgdir, pkgdir2) 722 self.assertCompiled(self.initfn) 723 self.assertCompiled(self.barfn) 724 self.assertCompiled(init2fn) 725 self.assertCompiled(bar2fn) 726 727 def test_d_compile_error(self): 728 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax') 729 rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) 730 self.assertRegex(out, b'File "dinsdale') 731 732 def test_d_runtime_error(self): 733 bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') 734 self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) 735 fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz') 736 pyc = importlib.util.cache_from_source(bazfn) 737 os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc')) 738 os.remove(bazfn) 739 rc, out, err = script_helper.assert_python_failure(fn, __isolated=False) 740 self.assertRegex(err, b'File "dinsdale') 741 742 def test_include_bad_file(self): 743 rc, out, err = self.assertRunNotOK( 744 '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir) 745 self.assertRegex(out, b'rror.*nosuchfile') 746 self.assertNotRegex(err, b'Traceback') 747 self.assertFalse(os.path.exists(importlib.util.cache_from_source( 748 self.pkgdir_cachedir))) 749 750 def test_include_file_with_arg(self): 751 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 752 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 753 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 754 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 755 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: 756 l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep) 757 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 758 self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4) 759 self.assertCompiled(f1) 760 self.assertCompiled(f2) 761 self.assertNotCompiled(f3) 762 self.assertCompiled(f4) 763 764 def test_include_file_no_arg(self): 765 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 766 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 767 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 768 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 769 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1: 770 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep) 771 self.assertRunOK('-i', os.path.join(self.directory, 'l1')) 772 self.assertNotCompiled(f1) 773 self.assertCompiled(f2) 774 self.assertNotCompiled(f3) 775 self.assertNotCompiled(f4) 776 777 def test_include_on_stdin(self): 778 f1 = script_helper.make_script(self.pkgdir, 'f1', '') 779 f2 = script_helper.make_script(self.pkgdir, 'f2', '') 780 f3 = script_helper.make_script(self.pkgdir, 'f3', '') 781 f4 = script_helper.make_script(self.pkgdir, 'f4', '') 782 p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-'])) 783 p.stdin.write((f3+os.linesep).encode('ascii')) 784 script_helper.kill_python(p) 785 self.assertNotCompiled(f1) 786 self.assertNotCompiled(f2) 787 self.assertCompiled(f3) 788 self.assertNotCompiled(f4) 789 790 def test_compiles_as_much_as_possible(self): 791 bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error') 792 rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn, 793 bingfn, self.barfn) 794 self.assertRegex(out, b'rror') 795 self.assertNotCompiled(bingfn) 796 self.assertCompiled(self.initfn) 797 self.assertCompiled(self.barfn) 798 799 def test_invalid_arg_produces_message(self): 800 out = self.assertRunOK('badfilename') 801 self.assertRegex(out, b"Can't list 'badfilename'") 802 803 def test_pyc_invalidation_mode(self): 804 script_helper.make_script(self.pkgdir, 'f1', '') 805 pyc = importlib.util.cache_from_source( 806 os.path.join(self.pkgdir, 'f1.py')) 807 self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir) 808 with open(pyc, 'rb') as fp: 809 data = fp.read() 810 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11) 811 self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir) 812 with open(pyc, 'rb') as fp: 813 data = fp.read() 814 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01) 815 816 @skipUnless(_have_multiprocessing, "requires multiprocessing") 817 def test_workers(self): 818 bar2fn = script_helper.make_script(self.directory, 'bar2', '') 819 files = [] 820 for suffix in range(5): 821 pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix)) 822 os.mkdir(pkgdir) 823 fn = script_helper.make_script(pkgdir, '__init__', '') 824 files.append(script_helper.make_script(pkgdir, 'bar2', '')) 825 826 self.assertRunOK(self.directory, '-j', '0') 827 self.assertCompiled(bar2fn) 828 for file in files: 829 self.assertCompiled(file) 830 831 @mock.patch('compileall.compile_dir') 832 def test_workers_available_cores(self, compile_dir): 833 with mock.patch("sys.argv", 834 new=[sys.executable, self.directory, "-j0"]): 835 compileall.main() 836 self.assertTrue(compile_dir.called) 837 self.assertEqual(compile_dir.call_args[-1]['workers'], 0) 838 839 def test_strip_and_prepend(self): 840 fullpath = ["test", "build", "real", "path"] 841 path = os.path.join(self.directory, *fullpath) 842 os.makedirs(path) 843 script = script_helper.make_script(path, "test", "1 / 0") 844 bc = importlib.util.cache_from_source(script) 845 stripdir = os.path.join(self.directory, *fullpath[:2]) 846 prependdir = "/foo" 847 self.assertRunOK("-s", stripdir, "-p", prependdir, path) 848 rc, out, err = script_helper.assert_python_failure(bc) 849 expected_in = os.path.join(prependdir, *fullpath[2:]) 850 self.assertIn( 851 expected_in, 852 str(err, encoding=sys.getdefaultencoding()) 853 ) 854 self.assertNotIn( 855 stripdir, 856 str(err, encoding=sys.getdefaultencoding()) 857 ) 858 859 def test_multiple_optimization_levels(self): 860 path = os.path.join(self.directory, "optimizations") 861 os.makedirs(path) 862 script = script_helper.make_script(path, 863 "test_optimization", 864 "a = 0") 865 bc = [] 866 for opt_level in "", 1, 2, 3: 867 bc.append(importlib.util.cache_from_source(script, 868 optimization=opt_level)) 869 test_combinations = [["0", "1"], 870 ["1", "2"], 871 ["0", "2"], 872 ["0", "1", "2"]] 873 for opt_combination in test_combinations: 874 self.assertRunOK(path, *("-o" + str(n) for n in opt_combination)) 875 for opt_level in opt_combination: 876 self.assertTrue(os.path.isfile(bc[int(opt_level)])) 877 try: 878 os.unlink(bc[opt_level]) 879 except Exception: 880 pass 881 882 @os_helper.skip_unless_symlink 883 def test_ignore_symlink_destination(self): 884 # Create folders for allowed files, symlinks and prohibited area 885 allowed_path = os.path.join(self.directory, "test", "dir", "allowed") 886 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks") 887 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited") 888 os.makedirs(allowed_path) 889 os.makedirs(symlinks_path) 890 os.makedirs(prohibited_path) 891 892 # Create scripts and symlinks and remember their byte-compiled versions 893 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0") 894 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0") 895 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py") 896 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py") 897 os.symlink(allowed_script, allowed_symlink) 898 os.symlink(prohibited_script, prohibited_symlink) 899 allowed_bc = importlib.util.cache_from_source(allowed_symlink) 900 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink) 901 902 self.assertRunOK(symlinks_path, "-e", allowed_path) 903 904 self.assertTrue(os.path.isfile(allowed_bc)) 905 self.assertFalse(os.path.isfile(prohibited_bc)) 906 907 def test_hardlink_bad_args(self): 908 # Bad arguments combination, hardlink deduplication make sense 909 # only for more than one optimization level 910 self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") 911 912 def test_hardlink(self): 913 # 'a = 0' code produces the same bytecode for the 3 optimization 914 # levels. All three .pyc files must have the same inode (hardlinks). 915 # 916 # If deduplication is disabled, all pyc files must have different 917 # inodes. 918 for dedup in (True, False): 919 with tempfile.TemporaryDirectory() as path: 920 with self.subTest(dedup=dedup): 921 script = script_helper.make_script(path, "script", "a = 0") 922 pycs = get_pycs(script) 923 924 args = ["-q", "-o 0", "-o 1", "-o 2"] 925 if dedup: 926 args.append("--hardlink-dupes") 927 self.assertRunOK(path, *args) 928 929 self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) 930 self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) 931 self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) 932 933 934class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, 935 unittest.TestCase, 936 metaclass=SourceDateEpochTestMeta, 937 source_date_epoch=True): 938 pass 939 940 941class CommandLineTestsNoSourceEpoch(CommandLineTestsBase, 942 unittest.TestCase, 943 metaclass=SourceDateEpochTestMeta, 944 source_date_epoch=False): 945 pass 946 947 948 949@unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') 950class HardlinkDedupTestsBase: 951 # Test hardlink_dupes parameter of compileall.compile_dir() 952 953 def setUp(self): 954 self.path = None 955 956 @contextlib.contextmanager 957 def temporary_directory(self): 958 with tempfile.TemporaryDirectory() as path: 959 self.path = path 960 yield path 961 self.path = None 962 963 def make_script(self, code, name="script"): 964 return script_helper.make_script(self.path, name, code) 965 966 def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): 967 compileall.compile_dir(self.path, quiet=True, optimize=optimize, 968 hardlink_dupes=dedup, force=force) 969 970 def test_bad_args(self): 971 # Bad arguments combination, hardlink deduplication make sense 972 # only for more than one optimization level 973 with self.temporary_directory(): 974 self.make_script("pass") 975 with self.assertRaises(ValueError): 976 compileall.compile_dir(self.path, quiet=True, optimize=0, 977 hardlink_dupes=True) 978 with self.assertRaises(ValueError): 979 # same optimization level specified twice: 980 # compile_dir() removes duplicates 981 compileall.compile_dir(self.path, quiet=True, optimize=[0, 0], 982 hardlink_dupes=True) 983 984 def create_code(self, docstring=False, assertion=False): 985 lines = [] 986 if docstring: 987 lines.append("'module docstring'") 988 lines.append('x = 1') 989 if assertion: 990 lines.append("assert x == 1") 991 return '\n'.join(lines) 992 993 def iter_codes(self): 994 for docstring in (False, True): 995 for assertion in (False, True): 996 code = self.create_code(docstring=docstring, assertion=assertion) 997 yield (code, docstring, assertion) 998 999 def test_disabled(self): 1000 # Deduplication disabled, no hardlinks 1001 for code, docstring, assertion in self.iter_codes(): 1002 with self.subTest(docstring=docstring, assertion=assertion): 1003 with self.temporary_directory(): 1004 script = self.make_script(code) 1005 pycs = get_pycs(script) 1006 self.compile_dir(dedup=False) 1007 self.assertFalse(is_hardlink(pycs[0], pycs[1])) 1008 self.assertFalse(is_hardlink(pycs[0], pycs[2])) 1009 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 1010 1011 def check_hardlinks(self, script, docstring=False, assertion=False): 1012 pycs = get_pycs(script) 1013 self.assertEqual(is_hardlink(pycs[0], pycs[1]), 1014 not assertion) 1015 self.assertEqual(is_hardlink(pycs[0], pycs[2]), 1016 not assertion and not docstring) 1017 self.assertEqual(is_hardlink(pycs[1], pycs[2]), 1018 not docstring) 1019 1020 def test_hardlink(self): 1021 # Test deduplication on all combinations 1022 for code, docstring, assertion in self.iter_codes(): 1023 with self.subTest(docstring=docstring, assertion=assertion): 1024 with self.temporary_directory(): 1025 script = self.make_script(code) 1026 self.compile_dir() 1027 self.check_hardlinks(script, docstring, assertion) 1028 1029 def test_only_two_levels(self): 1030 # Don't build the 3 optimization levels, but only 2 1031 for opts in ((0, 1), (1, 2), (0, 2)): 1032 with self.subTest(opts=opts): 1033 with self.temporary_directory(): 1034 # code with no dostring and no assertion: 1035 # same bytecode for all optimization levels 1036 script = self.make_script(self.create_code()) 1037 self.compile_dir(optimize=opts) 1038 pyc1 = get_pyc(script, opts[0]) 1039 pyc2 = get_pyc(script, opts[1]) 1040 self.assertTrue(is_hardlink(pyc1, pyc2)) 1041 1042 def test_duplicated_levels(self): 1043 # compile_dir() must not fail if optimize contains duplicated 1044 # optimization levels and/or if optimization levels are not sorted. 1045 with self.temporary_directory(): 1046 # code with no dostring and no assertion: 1047 # same bytecode for all optimization levels 1048 script = self.make_script(self.create_code()) 1049 self.compile_dir(optimize=[1, 0, 1, 0]) 1050 pyc1 = get_pyc(script, 0) 1051 pyc2 = get_pyc(script, 1) 1052 self.assertTrue(is_hardlink(pyc1, pyc2)) 1053 1054 def test_recompilation(self): 1055 # Test compile_dir() when pyc files already exists and the script 1056 # content changed 1057 with self.temporary_directory(): 1058 script = self.make_script("a = 0") 1059 self.compile_dir() 1060 # All three levels have the same inode 1061 self.check_hardlinks(script) 1062 1063 pycs = get_pycs(script) 1064 inode = os.stat(pycs[0]).st_ino 1065 1066 # Change of the module content 1067 script = self.make_script("print(0)") 1068 1069 # Recompilation without -o 1 1070 self.compile_dir(optimize=[0, 2], force=True) 1071 1072 # opt-1.pyc should have the same inode as before and others should not 1073 self.assertEqual(inode, os.stat(pycs[1]).st_ino) 1074 self.assertTrue(is_hardlink(pycs[0], pycs[2])) 1075 self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) 1076 # opt-1.pyc and opt-2.pyc have different content 1077 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1078 1079 def test_import(self): 1080 # Test that import updates a single pyc file when pyc files already 1081 # exists and the script content changed 1082 with self.temporary_directory(): 1083 script = self.make_script(self.create_code(), name="module") 1084 self.compile_dir() 1085 # All three levels have the same inode 1086 self.check_hardlinks(script) 1087 1088 pycs = get_pycs(script) 1089 inode = os.stat(pycs[0]).st_ino 1090 1091 # Change of the module content 1092 script = self.make_script("print(0)", name="module") 1093 1094 # Import the module in Python with -O (optimization level 1) 1095 script_helper.assert_python_ok( 1096 "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path 1097 ) 1098 1099 # Only opt-1.pyc is changed 1100 self.assertEqual(inode, os.stat(pycs[0]).st_ino) 1101 self.assertEqual(inode, os.stat(pycs[2]).st_ino) 1102 self.assertFalse(is_hardlink(pycs[1], pycs[2])) 1103 # opt-1.pyc and opt-2.pyc have different content 1104 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) 1105 1106 1107class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, 1108 unittest.TestCase, 1109 metaclass=SourceDateEpochTestMeta, 1110 source_date_epoch=True): 1111 pass 1112 1113 1114class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, 1115 unittest.TestCase, 1116 metaclass=SourceDateEpochTestMeta, 1117 source_date_epoch=False): 1118 pass 1119 1120 1121if __name__ == "__main__": 1122 unittest.main() 1123