1#!/usr/bin/env python3 2# Copyright 2020 The Pigweed Authors 3# 4# Licensed under the Apache License, Version 2.0 (the "License"); you may not 5# use this file except in compliance with the License. You may obtain a copy of 6# the License at 7# 8# https://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13# License for the specific language governing permissions and limitations under 14# the License. 15"""Tests for the database module.""" 16 17import json 18import io 19import os 20from pathlib import Path 21import shutil 22import stat 23import subprocess 24import sys 25import tempfile 26import unittest 27from unittest import mock 28 29from pw_tokenizer import database 30 31# This is an ELF file with only the pw_tokenizer sections. It was created 32# from a tokenize_test binary built for the STM32F429i Discovery board. The 33# pw_tokenizer sections were extracted with this command: 34# 35# arm-none-eabi-objcopy -S --only-section ".pw_tokenize*" <ELF> <OUTPUT> 36# 37TOKENIZED_ENTRIES_ELF = Path(__file__).with_name( 38 'example_binary_with_tokenized_strings.elf' 39) 40 41CSV_DEFAULT_DOMAIN = '''\ 4200000000, ,"","" 43141c35d5, ,"","The answer: ""%s""" 4429aef586, ,"","1234" 452b78825f, ,"","[:-)" 462e668cd6, ,"","Jello, world!" 4731631781, ,"","%d" 4861fd1e26, ,"","%ld" 4968ab92da, ,"","%s there are %x (%.2f) of them%c" 507b940e2a, ,"","Hello %s! %hd %e" 517da55d52, ,"",">:-[]" 527f35a9a5, ,"","TestName" 53851beeb6, ,"","%u %d" 54881436a0, ,"","The answer is: %s" 5588808930, ,"","%u%d%02x%X%hu%hhd%d%ld%lu%lld%llu%c%c%c" 5692723f44, ,"","???" 57a09d6698, ,"","won-won-won-wonderful" 58aa9ffa66, ,"","void pw::tokenizer::{anonymous}::TestName()" 59ad002c97, ,"","%llx" 60b3653e13, ,"","Jello!" 61cc6d3131, ,"","Jello?" 62e13b0f94, ,"","%llu" 63e65aefef, ,"","Won't fit : %s%d" 64''' 65 66CSV_TEST_DOMAIN = """\ 6717fa86d3, ,"TEST_DOMAIN","hello" 6818c5017c, ,"TEST_DOMAIN","yes" 6959b2701c, ,"TEST_DOMAIN","The answer was: %s" 70881436a0, ,"TEST_DOMAIN","The answer is: %s" 71d18ada0f, ,"TEST_DOMAIN","something" 72""" 73 74CSV_ALL_DOMAINS = '''\ 7500000000, ,"","" 76141c35d5, ,"","The answer: ""%s""" 7729aef586, ,"","1234" 782b78825f, ,"","[:-)" 792e668cd6, ,"","Jello, world!" 8031631781, ,"","%d" 8161fd1e26, ,"","%ld" 8268ab92da, ,"","%s there are %x (%.2f) of them%c" 837b940e2a, ,"","Hello %s! %hd %e" 847da55d52, ,"",">:-[]" 857f35a9a5, ,"","TestName" 86851beeb6, ,"","%u %d" 87881436a0, ,"","The answer is: %s" 8888808930, ,"","%u%d%02x%X%hu%hhd%d%ld%lu%lld%llu%c%c%c" 8992723f44, ,"","???" 90a09d6698, ,"","won-won-won-wonderful" 91aa9ffa66, ,"","void pw::tokenizer::{anonymous}::TestName()" 92ad002c97, ,"","%llx" 93b3653e13, ,"","Jello!" 94cc6d3131, ,"","Jello?" 95e13b0f94, ,"","%llu" 96e65aefef, ,"","Won't fit : %s%d" 9717fa86d3, ,"TEST_DOMAIN","hello" 9818c5017c, ,"TEST_DOMAIN","yes" 9959b2701c, ,"TEST_DOMAIN","The answer was: %s" 100881436a0, ,"TEST_DOMAIN","The answer is: %s" 101d18ada0f, ,"TEST_DOMAIN","something" 102''' 103 104JSON_SOURCE_STRINGS = '''\ 105[ 106 "pigweed/pw_polyfill/standard_library_public/pw_polyfill/standard_library/assert.h", 107 "protocol_buffer/gen/pigweed/pw_protobuf/common_protos.proto_library/nanopb/pw_protobuf_protos/status.pb.h", 108 "pigweed/pw_rpc/client_server.cc", 109 "pigweed/pw_rpc/public/pw_rpc/client_server.h", 110 "This is a very long string that will produce two tokens; one for C++ and one for C. This is because this string exceeds the default C hash length." 111] 112''' 113 114CSV_STRINGS = '''\ 1152cbf627a, ,"","pigweed/pw_rpc/client_server.cc" 116666562a1, ,"","protocol_buffer/gen/pigweed/pw_protobuf/common_protos.proto_library/nanopb/pw_protobuf_protos/status.pb.h" 1176c1e6eb3, ,"","pigweed/pw_rpc/public/pw_rpc/client_server.h" 118b25a9932, ,"","This is a very long string that will produce two tokens; one for C++ and one for C. This is because this string exceeds the default C hash length." 119eadf017f, ,"","pigweed/pw_polyfill/standard_library_public/pw_polyfill/standard_library/assert.h" 120f815dc5c, ,"","This is a very long string that will produce two tokens; one for C++ and one for C. This is because this string exceeds the default C hash length." 121''' 122 123EXPECTED_REPORT = { 124 str(TOKENIZED_ENTRIES_ELF): { 125 '': { 126 'present_entries': 22, 127 'present_size_bytes': 289, 128 'total_entries': 22, 129 'total_size_bytes': 289, 130 'collisions': {}, 131 }, 132 'TEST_DOMAIN': { 133 'present_entries': 5, 134 'present_size_bytes': 57, 135 'total_entries': 5, 136 'total_size_bytes': 57, 137 'collisions': {}, 138 }, 139 } 140} 141 142 143def run_cli(*args) -> None: 144 original_argv = sys.argv 145 sys.argv = ['database.py', *(str(a) for a in args)] 146 # pylint: disable=protected-access 147 try: 148 database._main(*database._parse_args()) 149 finally: 150 # Remove the log handler added by _main to avoid duplicate logs. 151 if database._LOG.handlers: 152 database._LOG.handlers.pop() 153 # pylint: enable=protected-access 154 155 sys.argv = original_argv 156 157 158def _mock_output() -> io.TextIOWrapper: 159 output = io.BytesIO() 160 output.name = '<fake stdout>' 161 return io.TextIOWrapper(output, write_through=True) 162 163 164def _remove_readonly( # pylint: disable=unused-argument 165 func, path, excinfo 166) -> None: 167 """Changes file permission and recalls the calling function.""" 168 print('Path attempted to be deleted:', path) 169 if not os.access(path, os.W_OK): 170 # Change file permissions. 171 os.chmod(path, stat.S_IWUSR) 172 # Call the calling function again. 173 func(path) 174 175 176class DatabaseCommandLineTest(unittest.TestCase): 177 """Tests the database.py command line interface.""" 178 179 def setUp(self) -> None: 180 self._dir = Path(tempfile.mkdtemp('_pw_tokenizer_test')) 181 self._csv = self._dir / 'db.csv' 182 self._elf = TOKENIZED_ENTRIES_ELF 183 184 def tearDown(self) -> None: 185 shutil.rmtree(self._dir) 186 187 def test_create_csv(self) -> None: 188 run_cli('create', '--database', self._csv, self._elf) 189 190 self.assertEqual( 191 CSV_ALL_DOMAINS.splitlines(), self._csv.read_text().splitlines() 192 ) 193 194 def test_create_csv_from_three_column_csv(self) -> None: 195 three_col_db = self._dir.joinpath('legacy.csv') 196 # Remove the domain column 197 three_col_db.write_text(CSV_TEST_DOMAIN.replace('"TEST_DOMAIN",', '')) 198 run_cli('create', '--database', self._csv, three_col_db) 199 200 tokens_in_default = CSV_TEST_DOMAIN.replace('"TEST_DOMAIN",', '"",') 201 self.assertEqual( 202 tokens_in_default.splitlines(), self._csv.read_text().splitlines() 203 ) 204 205 def test_create_csv_test_domain(self) -> None: 206 run_cli('create', '--database', self._csv, f'{self._elf}#TEST_DOMAIN') 207 208 self.assertEqual( 209 CSV_TEST_DOMAIN.splitlines(), 210 self._csv.read_text().splitlines(), 211 ) 212 213 def test_create_csv_all_domains(self) -> None: 214 run_cli('create', '--database', self._csv, self._elf) 215 216 self.assertEqual( 217 CSV_ALL_DOMAINS.splitlines(), self._csv.read_text().splitlines() 218 ) 219 220 def test_create_csv_all_domains_regex(self) -> None: 221 run_cli('create', '--database', self._csv, f'{self._elf}#.*') 222 223 self.assertEqual( 224 CSV_ALL_DOMAINS.splitlines(), self._csv.read_text().splitlines() 225 ) 226 227 def test_invalid_domain_pattern(self) -> None: 228 with self.assertRaises(SystemExit): 229 run_cli('create', '--database', self._csv, f'{self._elf}#.*#') 230 231 def test_invalid_glob(self) -> None: 232 with self.assertRaises(SystemExit): 233 run_cli('create', '--database', self._csv, 'INVALID PATH') 234 235 def test_create_force(self) -> None: 236 self._csv.write_text(CSV_ALL_DOMAINS) 237 238 with self.assertRaises(FileExistsError): 239 run_cli('create', '--database', self._csv, self._elf) 240 241 run_cli('create', '--force', '--database', self._csv, self._elf) 242 243 def test_create_binary(self) -> None: 244 binary = self._dir / 'db.bin' 245 run_cli( 246 'create', 247 '--type', 248 'binary', 249 '--database', 250 binary, 251 f'{self._elf}#', # Only default domain since v1 DB excludes domain. 252 ) 253 254 # Write the binary database as CSV to verify its contents. 255 run_cli('create', '--database', self._csv, binary) 256 257 self.assertEqual( 258 CSV_DEFAULT_DOMAIN.splitlines(), self._csv.read_text().splitlines() 259 ) 260 261 def test_add_does_not_recalculate_tokens(self) -> None: 262 db_with_custom_token = '01234567, ,"","hello"' 263 264 to_add = self._dir / 'add_this.csv' 265 to_add.write_text(db_with_custom_token + '\n') 266 self._csv.touch() 267 268 run_cli('add', '--database', self._csv, to_add) 269 self.assertEqual( 270 db_with_custom_token.splitlines(), 271 self._csv.read_text().splitlines(), 272 ) 273 274 def test_mark_removed(self) -> None: 275 """Tests adding a removal date to tokens in a CSV database.""" 276 self._csv.write_text(CSV_ALL_DOMAINS) 277 278 run_cli( 279 'mark_removed', 280 '--database', 281 self._csv, 282 '--date', 283 '1998-09-04', 284 f'{self._elf}#', # Only load the default domain for this test. 285 ) 286 287 # Add the removal date to the tokens not in the default domain 288 new_csv = CSV_ALL_DOMAINS 289 new_csv = new_csv.replace( 290 '17fa86d3, ,"TEST_DOMAIN","hello"', 291 '17fa86d3,1998-09-04,"TEST_DOMAIN","hello"', 292 ) 293 new_csv = new_csv.replace( 294 '18c5017c, ,"TEST_DOMAIN","yes"', 295 '18c5017c,1998-09-04,"TEST_DOMAIN","yes"', 296 ) 297 new_csv = new_csv.replace( 298 '59b2701c, ,"TEST_DOMAIN","The answer was: %s"', 299 '59b2701c,1998-09-04,"TEST_DOMAIN","The answer was: %s"', 300 ) 301 new_csv = new_csv.replace( 302 'd18ada0f, ,"TEST_DOMAIN","something"', 303 'd18ada0f,1998-09-04,"TEST_DOMAIN","something"', 304 ) 305 new_csv = new_csv.replace( 306 '881436a0, ,"TEST_DOMAIN","The answer is: %s"', 307 '881436a0,1998-09-04,"TEST_DOMAIN","The answer is: %s"', 308 ) 309 self.assertNotEqual(CSV_ALL_DOMAINS, new_csv) 310 311 self.assertEqual( 312 new_csv.splitlines(), self._csv.read_text().splitlines() 313 ) 314 315 def test_purge(self) -> None: 316 self._csv.write_text(CSV_DEFAULT_DOMAIN) 317 318 first_5_csv = self._dir / 'first_5.csv' 319 first_5_csv.write_text( 320 ''.join(CSV_DEFAULT_DOMAIN.splitlines(keepends=True)[:5]) 321 ) 322 323 self._csv.write_text(CSV_DEFAULT_DOMAIN) 324 325 # Mark everything except for the first 5 entries as removed. 326 run_cli('mark_removed', '--database', self._csv, first_5_csv) 327 328 # Delete all entries except those in TEST_DOMAIN. 329 run_cli('purge', '--database', self._csv) 330 331 self.assertEqual( 332 first_5_csv.read_text().splitlines(), 333 self._csv.read_text().splitlines(), 334 ) 335 336 @mock.patch('sys.stdout', new_callable=_mock_output) 337 def test_report(self, mock_stdout) -> None: 338 run_cli('report', self._elf) 339 340 self.assertEqual( 341 json.loads(mock_stdout.buffer.getvalue()), EXPECTED_REPORT 342 ) 343 344 def test_replace(self) -> None: 345 sub = 'replace/ment' 346 run_cli( 347 'create', 348 '--database', 349 self._csv, 350 f'{self._elf}#', # Only load the default domain for this test. 351 '--replace', 352 r'(?i)\b[jh]ello\b/' + sub, 353 ) 354 self.assertEqual( 355 CSV_DEFAULT_DOMAIN.replace('Jello', sub).replace('Hello', sub), 356 self._csv.read_text(), 357 ) 358 359 def test_json_strings(self) -> None: 360 strings_file = self._dir / "strings.json" 361 362 with open(strings_file, 'w') as file: 363 file.write(JSON_SOURCE_STRINGS) 364 365 run_cli('create', '--force', '--database', self._csv, strings_file) 366 self.assertEqual( 367 CSV_STRINGS.splitlines(), self._csv.read_text().splitlines() 368 ) 369 370 371class DirectoryDatabaseCommandLineTest(unittest.TestCase): 372 """Tests the directory database command line interface.""" 373 374 def setUp(self) -> None: 375 self._dir = Path(tempfile.mkdtemp('_pw_tokenizer_test')) 376 self._db_dir = self._dir / '_dir_database_test' 377 self._db_dir.mkdir(exist_ok=True) 378 self._db_csv = self._db_dir / '8123913.pw_tokenizer.csv' 379 self._elf = TOKENIZED_ENTRIES_ELF 380 381 def _git(self, *command: str) -> None: 382 """Runs git in self._dir with forced user name and email values. 383 384 Prevents accidentally running git in the wrong directory and avoids 385 errors if the name and email are not configured. 386 """ 387 subprocess.run( 388 [ 389 'git', 390 '-c', 391 'user.name=pw_tokenizer tests', 392 '-c', 393 '[email protected]', 394 *command, 395 ], 396 cwd=self._dir, 397 check=True, 398 ) 399 400 def tearDown(self) -> None: 401 shutil.rmtree(self._dir, onerror=_remove_readonly) 402 403 def test_add_csv_to_dir(self) -> None: 404 """Tests a CSV can be created within the database.""" 405 run_cli('add', '--database', self._db_dir, f'{self._elf}#TEST_DOMAIN') 406 directory = list(self._db_dir.iterdir()) 407 408 self.assertEqual(1, len(directory)) 409 410 self._db_csv = directory.pop() 411 412 self.assertEqual( 413 CSV_TEST_DOMAIN.splitlines(), 414 self._db_csv.read_text().splitlines(), 415 ) 416 417 def test_add_all_domains_to_dir(self) -> None: 418 """Tests a CSV with all domains can be added to the database.""" 419 run_cli('add', '--database', self._db_dir, f'{self._elf}#.*') 420 directory = list(self._db_dir.iterdir()) 421 422 self.assertEqual(1, len(directory)) 423 424 self._db_csv = directory.pop() 425 426 self.assertEqual( 427 CSV_ALL_DOMAINS.splitlines(), self._db_csv.read_text().splitlines() 428 ) 429 430 def test_not_adding_existing_tokens(self) -> None: 431 """Tests duplicate tokens are not added to the database.""" 432 run_cli('add', '--database', self._db_dir, self._elf) 433 run_cli('add', '--database', self._db_dir, self._elf) 434 directory = list(self._db_dir.iterdir()) 435 436 self.assertEqual(1, len(directory)) 437 438 self._db_csv = directory.pop() 439 440 self.assertEqual( 441 CSV_ALL_DOMAINS.splitlines(), 442 self._db_csv.read_text().splitlines(), 443 ) 444 445 def test_adding_tokens_without_git_repo(self): 446 """Tests creating new files with new entries when no repo exists.""" 447 # Add a subset of entries to a new CSV in the directory database. 448 entry_subset = self._dir / 'entry_subset.csv' 449 entry_subset.write_text(CSV_TEST_DOMAIN) 450 451 run_cli('add', '--database', self._db_dir, entry_subset) 452 directory = list(self._db_dir.iterdir()) 453 454 self.assertEqual(1, len(directory)) 455 456 first_csv_in_db = directory.pop() 457 458 self.assertEqual( 459 CSV_TEST_DOMAIN.splitlines(), 460 first_csv_in_db.read_text().splitlines(), 461 ) 462 # Add a superset of entries to a new CSV in the directory database. 463 entry_superset = self._dir / 'entry_superset.csv' 464 entry_superset.write_text(CSV_ALL_DOMAINS) 465 466 run_cli('add', '--database', self._db_dir, entry_superset) 467 directory = list(self._db_dir.iterdir()) 468 # Assert two different CSVs were created to store new tokens. 469 self.assertEqual(2, len(directory)) 470 # Retrieve the other CSV in the directory. 471 second_csv_in_db = ( 472 directory[0] if directory[0] != first_csv_in_db else directory[1] 473 ) 474 475 self.assertNotEqual(first_csv_in_db, second_csv_in_db) 476 self.assertEqual( 477 CSV_TEST_DOMAIN.splitlines(), 478 first_csv_in_db.read_text().splitlines(), 479 ) 480 481 # Retrieve entries exclusively in the superset, not the subset. 482 entries_exclusively_in_superset = set( 483 CSV_ALL_DOMAINS.splitlines() 484 ) - set(CSV_TEST_DOMAIN.splitlines()) 485 # Ensure only new tokens not in the subset were added to the second CSV 486 # added to the directory database. 487 self.assertEqual( 488 entries_exclusively_in_superset, 489 set(second_csv_in_db.read_text().splitlines()), 490 ) 491 492 def test_untracked_files_in_dir(self): 493 """Tests untracked CSVs are reused by the database.""" 494 self._git('init') 495 # Add CSV_TEST_DOMAIN to a new CSV in the directory database. 496 run_cli( 497 'add', 498 '--database', 499 self._db_dir, 500 '--discard-temporary', 501 'HEAD', 502 f'{self._elf}#TEST_DOMAIN', 503 ) 504 directory = list(self._db_dir.iterdir()) 505 506 self.assertEqual(1, len(directory)) 507 508 first_path_in_db = directory.pop() 509 510 self.assertEqual( 511 CSV_TEST_DOMAIN.splitlines(), 512 first_path_in_db.read_text().splitlines(), 513 ) 514 # Retrieve the untracked CSV in the Git repository and discard 515 # tokens that do not exist in CSV_DEFAULT_DOMAIN. 516 run_cli( 517 'add', 518 '--database', 519 self._db_dir, 520 '--discard-temporary', 521 'HEAD', 522 self._elf, 523 ) 524 directory = list(self._db_dir.iterdir()) 525 526 self.assertEqual(1, len(directory)) 527 528 reused_path_in_db = directory.pop() 529 # Ensure the first path created is the same being reused. Also, 530 # the CSV content is the same as CSV_DEFAULT_DOMAIN. 531 self.assertEqual(first_path_in_db, reused_path_in_db) 532 self.assertEqual( 533 CSV_ALL_DOMAINS.splitlines(), 534 reused_path_in_db.read_text().splitlines(), 535 ) 536 537 def test_adding_multiple_elf_files(self) -> None: 538 """Tests adding multiple elf files to a file in the database.""" 539 # Add CSV_TEST_DOMAIN to a new CSV in the directory database. 540 run_cli( 541 'add', 542 '--database', 543 self._db_dir, 544 f'{self._elf}#TEST_DOMAIN', 545 self._elf, 546 ) 547 directory = list(self._db_dir.iterdir()) 548 549 self.assertEqual(1, len(directory)) 550 # Combines CSV_DEFAULT_DOMAIN and TEST_DOMAIN into a unique set 551 # of token entries. 552 entries_from_default_and_test_domain = set( 553 CSV_DEFAULT_DOMAIN.splitlines() 554 ).union(set(CSV_TEST_DOMAIN.splitlines())) 555 # Multiple ELF files were added at once to a single CSV. 556 self.assertEqual( 557 entries_from_default_and_test_domain, 558 set(directory.pop().read_text().splitlines()), 559 ) 560 561 def test_discarding_old_entries(self) -> None: 562 """Tests discarding old entries for new entries when re-adding.""" 563 self._git('init') 564 # Add CSV_ALL_DOMAINS to a new CSV in the directory database. 565 run_cli( 566 'add', 567 '--database', 568 self._db_dir, 569 '--discard-temporary', 570 'HEAD', 571 f'{self._elf}#.*', 572 ) 573 directory = list(self._db_dir.iterdir()) 574 575 self.assertEqual(1, len(directory)) 576 577 untracked_path_in_db = directory.pop() 578 579 self.assertEqual( 580 CSV_ALL_DOMAINS.splitlines(), 581 untracked_path_in_db.read_text().splitlines(), 582 ) 583 # Add CSV_DEFAULT_DOMAIN and CSV_TEST_DOMAIN to a CSV in the 584 # directory database, while replacing entries in CSV_ALL_DOMAINS 585 # that no longer exist. 586 run_cli( 587 'add', 588 '--database', 589 self._db_dir, 590 '--discard-temporary', 591 'HEAD', 592 f'{self._elf}#TEST_DOMAIN', 593 self._elf, 594 ) 595 directory = list(self._db_dir.iterdir()) 596 597 self.assertEqual(1, len(directory)) 598 599 reused_path_in_db = directory.pop() 600 # Combines CSV_DEFAULT_DOMAIN and TEST_DOMAIN. 601 entries_from_default_and_test_domain = set( 602 CSV_DEFAULT_DOMAIN.splitlines() 603 ).union(set(CSV_TEST_DOMAIN.splitlines())) 604 605 self.assertEqual(untracked_path_in_db, reused_path_in_db) 606 self.assertEqual( 607 entries_from_default_and_test_domain, 608 set(reused_path_in_db.read_text().splitlines()), 609 ) 610 611 def test_retrieving_csv_from_commit(self) -> None: 612 """Tests retrieving a CSV from a commit and removing temp tokens.""" 613 self._git('init') 614 self._git('commit', '--allow-empty', '-m', 'First Commit') 615 # Add CSV_ALL_DOMAINS to a new CSV in the directory database. 616 run_cli('add', '--database', self._db_dir, f'{self._elf}#.*') 617 directory = list(self._db_dir.iterdir()) 618 619 self.assertEqual(1, len(directory)) 620 621 tracked_path_in_db = directory.pop() 622 623 self.assertEqual( 624 CSV_ALL_DOMAINS.splitlines(), 625 tracked_path_in_db.read_text().splitlines(), 626 ) 627 # Commit the CSV to avoid retrieving the CSV with the checks 628 # for untracked changes. 629 self._git('add', '--all') 630 self._git('commit', '-m', 'Adding a CSV to a new commit.') 631 # Retrieve the CSV in HEAD~ and discard tokens that exist in 632 # CSV_ALL_DOMAINS and not exist in CSV_TEST_DOMAIN. 633 run_cli( 634 'add', 635 '--database', 636 self._db_dir, 637 '--discard-temporary', 638 'HEAD~2', 639 f'{self._elf}#TEST_DOMAIN', 640 ) 641 directory = list(self._db_dir.iterdir()) 642 643 self.assertEqual(1, len(directory)) 644 645 reused_path_in_db = directory.pop() 646 647 self.assertEqual( 648 CSV_TEST_DOMAIN.splitlines(), 649 reused_path_in_db.read_text().splitlines(), 650 ) 651 652 653if __name__ == '__main__': 654 unittest.main() 655