1 import compileall
2 import contextlib
3 import filecmp
4 import importlib.util
5 import io
6 import os
7 import pathlib
8 import py_compile
9 import shutil
10 import struct
11 import sys
12 import tempfile
13 import test.test_importlib.util
14 import time
15 import unittest
16
17 from unittest import mock, skipUnless
18 try:
19 # compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists
20 # and it can function.
21 from multiprocessing.util import _cleanup_tests as multiprocessing_cleanup_tests
22 from concurrent.futures import ProcessPoolExecutor
23 from concurrent.futures.process import _check_system_limits
24 _check_system_limits()
25 _have_multiprocessing = True
26 except (NotImplementedError, ModuleNotFoundError):
27 _have_multiprocessing = False
28
29 from test import support
30 from test.support import os_helper
31 from test.support import script_helper
32 from test.test_py_compile import without_source_date_epoch
33 from test.test_py_compile import SourceDateEpochTestMeta
34
35
36 def get_pyc(script, opt):
37 if not opt:
38 # Replace None and 0 with ''
39 opt = ''
40 return importlib.util.cache_from_source(script, optimization=opt)
41
42
43 def get_pycs(script):
44 return [get_pyc(script, opt) for opt in (0, 1, 2)]
45
46
47 def is_hardlink(filename1, filename2):
48 """Returns True if two files have the same inode (hardlink)"""
49 inode1 = os.stat(filename1).st_ino
50 inode2 = os.stat(filename2).st_ino
51 return inode1 == inode2
52
53
54 class ESC[4;38;5;81mCompileallTestsBase:
55
56 def setUp(self):
57 self.directory = tempfile.mkdtemp()
58 self.addCleanup(shutil.rmtree, self.directory)
59
60 self.source_path = os.path.join(self.directory, '_test.py')
61 self.bc_path = importlib.util.cache_from_source(self.source_path)
62 with open(self.source_path, 'w', encoding="utf-8") as file:
63 file.write('x = 123\n')
64 self.source_path2 = os.path.join(self.directory, '_test2.py')
65 self.bc_path2 = importlib.util.cache_from_source(self.source_path2)
66 shutil.copyfile(self.source_path, self.source_path2)
67 self.subdirectory = os.path.join(self.directory, '_subdir')
68 os.mkdir(self.subdirectory)
69 self.source_path3 = os.path.join(self.subdirectory, '_test3.py')
70 shutil.copyfile(self.source_path, self.source_path3)
71
72 def add_bad_source_file(self):
73 self.bad_source_path = os.path.join(self.directory, '_test_bad.py')
74 with open(self.bad_source_path, 'w', encoding="utf-8") as file:
75 file.write('x (\n')
76
77 def timestamp_metadata(self):
78 with open(self.bc_path, 'rb') as file:
79 data = file.read(12)
80 mtime = int(os.stat(self.source_path).st_mtime)
81 compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0,
82 mtime & 0xFFFF_FFFF)
83 return data, compare
84
85 def test_year_2038_mtime_compilation(self):
86 # Test to make sure we can handle mtimes larger than what a 32-bit
87 # signed number can hold as part of bpo-34990
88 try:
89 os.utime(self.source_path, (2**32 - 1, 2**32 - 1))
90 except (OverflowError, OSError):
91 self.skipTest("filesystem doesn't support timestamps near 2**32")
92 with contextlib.redirect_stdout(io.StringIO()):
93 self.assertTrue(compileall.compile_file(self.source_path))
94
95 def test_larger_than_32_bit_times(self):
96 # This is similar to the test above but we skip it if the OS doesn't
97 # support modification times larger than 32-bits.
98 try:
99 os.utime(self.source_path, (2**35, 2**35))
100 except (OverflowError, OSError):
101 self.skipTest("filesystem doesn't support large timestamps")
102 with contextlib.redirect_stdout(io.StringIO()):
103 self.assertTrue(compileall.compile_file(self.source_path))
104
105 def recreation_check(self, metadata):
106 """Check that compileall recreates bytecode when the new metadata is
107 used."""
108 if os.environ.get('SOURCE_DATE_EPOCH'):
109 raise unittest.SkipTest('SOURCE_DATE_EPOCH is set')
110 py_compile.compile(self.source_path)
111 self.assertEqual(*self.timestamp_metadata())
112 with open(self.bc_path, 'rb') as file:
113 bc = file.read()[len(metadata):]
114 with open(self.bc_path, 'wb') as file:
115 file.write(metadata)
116 file.write(bc)
117 self.assertNotEqual(*self.timestamp_metadata())
118 compileall.compile_dir(self.directory, force=False, quiet=True)
119 self.assertTrue(*self.timestamp_metadata())
120
121 def test_mtime(self):
122 # Test a change in mtime leads to a new .pyc.
123 self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
124 0, 1))
125
126 def test_magic_number(self):
127 # Test a change in mtime leads to a new .pyc.
128 self.recreation_check(b'\0\0\0\0')
129
130 def test_compile_files(self):
131 # Test compiling a single file, and complete directory
132 for fn in (self.bc_path, self.bc_path2):
133 try:
134 os.unlink(fn)
135 except:
136 pass
137 self.assertTrue(compileall.compile_file(self.source_path,
138 force=False, quiet=True))
139 self.assertTrue(os.path.isfile(self.bc_path) and
140 not os.path.isfile(self.bc_path2))
141 os.unlink(self.bc_path)
142 self.assertTrue(compileall.compile_dir(self.directory, force=False,
143 quiet=True))
144 self.assertTrue(os.path.isfile(self.bc_path) and
145 os.path.isfile(self.bc_path2))
146 os.unlink(self.bc_path)
147 os.unlink(self.bc_path2)
148 # Test against bad files
149 self.add_bad_source_file()
150 self.assertFalse(compileall.compile_file(self.bad_source_path,
151 force=False, quiet=2))
152 self.assertFalse(compileall.compile_dir(self.directory,
153 force=False, quiet=2))
154
155 def test_compile_file_pathlike(self):
156 self.assertFalse(os.path.isfile(self.bc_path))
157 # we should also test the output
158 with support.captured_stdout() as stdout:
159 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path)))
160 self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)')
161 self.assertTrue(os.path.isfile(self.bc_path))
162
163 def test_compile_file_pathlike_ddir(self):
164 self.assertFalse(os.path.isfile(self.bc_path))
165 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
166 ddir=pathlib.Path('ddir_path'),
167 quiet=2))
168 self.assertTrue(os.path.isfile(self.bc_path))
169
170 def test_compile_file_pathlike_stripdir(self):
171 self.assertFalse(os.path.isfile(self.bc_path))
172 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
173 stripdir=pathlib.Path('stripdir_path'),
174 quiet=2))
175 self.assertTrue(os.path.isfile(self.bc_path))
176
177 def test_compile_file_pathlike_prependdir(self):
178 self.assertFalse(os.path.isfile(self.bc_path))
179 self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path),
180 prependdir=pathlib.Path('prependdir_path'),
181 quiet=2))
182 self.assertTrue(os.path.isfile(self.bc_path))
183
184 def test_compile_path(self):
185 with test.test_importlib.util.import_state(path=[self.directory]):
186 self.assertTrue(compileall.compile_path(quiet=2))
187
188 with test.test_importlib.util.import_state(path=[self.directory]):
189 self.add_bad_source_file()
190 self.assertFalse(compileall.compile_path(skip_curdir=False,
191 force=True, quiet=2))
192
193 def test_no_pycache_in_non_package(self):
194 # Bug 8563 reported that __pycache__ directories got created by
195 # compile_file() for non-.py files.
196 data_dir = os.path.join(self.directory, 'data')
197 data_file = os.path.join(data_dir, 'file')
198 os.mkdir(data_dir)
199 # touch data/file
200 with open(data_file, 'wb'):
201 pass
202 compileall.compile_file(data_file)
203 self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__')))
204
205
206 def test_compile_file_encoding_fallback(self):
207 # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None
208 self.add_bad_source_file()
209 with contextlib.redirect_stdout(io.StringIO()):
210 self.assertFalse(compileall.compile_file(self.bad_source_path))
211
212
213 def test_optimize(self):
214 # make sure compiling with different optimization settings than the
215 # interpreter's creates the correct file names
216 optimize, opt = (1, 1) if __debug__ else (0, '')
217 compileall.compile_dir(self.directory, quiet=True, optimize=optimize)
218 cached = importlib.util.cache_from_source(self.source_path,
219 optimization=opt)
220 self.assertTrue(os.path.isfile(cached))
221 cached2 = importlib.util.cache_from_source(self.source_path2,
222 optimization=opt)
223 self.assertTrue(os.path.isfile(cached2))
224 cached3 = importlib.util.cache_from_source(self.source_path3,
225 optimization=opt)
226 self.assertTrue(os.path.isfile(cached3))
227
228 def test_compile_dir_pathlike(self):
229 self.assertFalse(os.path.isfile(self.bc_path))
230 with support.captured_stdout() as stdout:
231 compileall.compile_dir(pathlib.Path(self.directory))
232 line = stdout.getvalue().splitlines()[0]
233 self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)')
234 self.assertTrue(os.path.isfile(self.bc_path))
235
236 def test_compile_dir_pathlike_stripdir(self):
237 self.assertFalse(os.path.isfile(self.bc_path))
238 self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory),
239 stripdir=pathlib.Path('stripdir_path'),
240 quiet=2))
241 self.assertTrue(os.path.isfile(self.bc_path))
242
243 def test_compile_dir_pathlike_prependdir(self):
244 self.assertFalse(os.path.isfile(self.bc_path))
245 self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory),
246 prependdir=pathlib.Path('prependdir_path'),
247 quiet=2))
248 self.assertTrue(os.path.isfile(self.bc_path))
249
250 @skipUnless(_have_multiprocessing, "requires multiprocessing")
251 @mock.patch('concurrent.futures.ProcessPoolExecutor')
252 def test_compile_pool_called(self, pool_mock):
253 compileall.compile_dir(self.directory, quiet=True, workers=5)
254 self.assertTrue(pool_mock.called)
255
256 def test_compile_workers_non_positive(self):
257 with self.assertRaisesRegex(ValueError,
258 "workers must be greater or equal to 0"):
259 compileall.compile_dir(self.directory, workers=-1)
260
261 @skipUnless(_have_multiprocessing, "requires multiprocessing")
262 @mock.patch('concurrent.futures.ProcessPoolExecutor')
263 def test_compile_workers_cpu_count(self, pool_mock):
264 compileall.compile_dir(self.directory, quiet=True, workers=0)
265 self.assertEqual(pool_mock.call_args[1]['max_workers'], None)
266
267 @skipUnless(_have_multiprocessing, "requires multiprocessing")
268 @mock.patch('concurrent.futures.ProcessPoolExecutor')
269 @mock.patch('compileall.compile_file')
270 def test_compile_one_worker(self, compile_file_mock, pool_mock):
271 compileall.compile_dir(self.directory, quiet=True)
272 self.assertFalse(pool_mock.called)
273 self.assertTrue(compile_file_mock.called)
274
275 @skipUnless(_have_multiprocessing, "requires multiprocessing")
276 @mock.patch('concurrent.futures.ProcessPoolExecutor', new=None)
277 @mock.patch('compileall.compile_file')
278 def test_compile_missing_multiprocessing(self, compile_file_mock):
279 compileall.compile_dir(self.directory, quiet=True, workers=5)
280 self.assertTrue(compile_file_mock.called)
281
282 def test_compile_dir_maxlevels(self):
283 # Test the actual impact of maxlevels parameter
284 depth = 3
285 path = self.directory
286 for i in range(1, depth + 1):
287 path = os.path.join(path, f"dir_{i}")
288 source = os.path.join(path, 'script.py')
289 os.mkdir(path)
290 shutil.copyfile(self.source_path, source)
291 pyc_filename = importlib.util.cache_from_source(source)
292
293 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth - 1)
294 self.assertFalse(os.path.isfile(pyc_filename))
295
296 compileall.compile_dir(self.directory, quiet=True, maxlevels=depth)
297 self.assertTrue(os.path.isfile(pyc_filename))
298
299 def _test_ddir_only(self, *, ddir, parallel=True):
300 """Recursive compile_dir ddir must contain package paths; bpo39769."""
301 fullpath = ["test", "foo"]
302 path = self.directory
303 mods = []
304 for subdir in fullpath:
305 path = os.path.join(path, subdir)
306 os.mkdir(path)
307 script_helper.make_script(path, "__init__", "")
308 mods.append(script_helper.make_script(path, "mod",
309 "def fn(): 1/0\nfn()\n"))
310
311 if parallel:
312 self.addCleanup(multiprocessing_cleanup_tests)
313 compileall.compile_dir(
314 self.directory, quiet=True, ddir=ddir,
315 workers=2 if parallel else 1)
316
317 self.assertTrue(mods)
318 for mod in mods:
319 self.assertTrue(mod.startswith(self.directory), mod)
320 modcode = importlib.util.cache_from_source(mod)
321 modpath = mod[len(self.directory+os.sep):]
322 _, _, err = script_helper.assert_python_failure(modcode)
323 expected_in = os.path.join(ddir, modpath)
324 mod_code_obj = test.test_importlib.util.get_code_from_pyc(modcode)
325 self.assertEqual(mod_code_obj.co_filename, expected_in)
326 self.assertIn(f'"{expected_in}"', os.fsdecode(err))
327
328 def test_ddir_only_one_worker(self):
329 """Recursive compile_dir ddir= contains package paths; bpo39769."""
330 return self._test_ddir_only(ddir="<a prefix>", parallel=False)
331
332 @skipUnless(_have_multiprocessing, "requires multiprocessing")
333 def test_ddir_multiple_workers(self):
334 """Recursive compile_dir ddir= contains package paths; bpo39769."""
335 return self._test_ddir_only(ddir="<a prefix>", parallel=True)
336
337 def test_ddir_empty_only_one_worker(self):
338 """Recursive compile_dir ddir='' contains package paths; bpo39769."""
339 return self._test_ddir_only(ddir="", parallel=False)
340
341 @skipUnless(_have_multiprocessing, "requires multiprocessing")
342 def test_ddir_empty_multiple_workers(self):
343 """Recursive compile_dir ddir='' contains package paths; bpo39769."""
344 return self._test_ddir_only(ddir="", parallel=True)
345
346 def test_strip_only(self):
347 fullpath = ["test", "build", "real", "path"]
348 path = os.path.join(self.directory, *fullpath)
349 os.makedirs(path)
350 script = script_helper.make_script(path, "test", "1 / 0")
351 bc = importlib.util.cache_from_source(script)
352 stripdir = os.path.join(self.directory, *fullpath[:2])
353 compileall.compile_dir(path, quiet=True, stripdir=stripdir)
354 rc, out, err = script_helper.assert_python_failure(bc)
355 expected_in = os.path.join(*fullpath[2:])
356 self.assertIn(
357 expected_in,
358 str(err, encoding=sys.getdefaultencoding())
359 )
360 self.assertNotIn(
361 stripdir,
362 str(err, encoding=sys.getdefaultencoding())
363 )
364
365 def test_prepend_only(self):
366 fullpath = ["test", "build", "real", "path"]
367 path = os.path.join(self.directory, *fullpath)
368 os.makedirs(path)
369 script = script_helper.make_script(path, "test", "1 / 0")
370 bc = importlib.util.cache_from_source(script)
371 prependdir = "/foo"
372 compileall.compile_dir(path, quiet=True, prependdir=prependdir)
373 rc, out, err = script_helper.assert_python_failure(bc)
374 expected_in = os.path.join(prependdir, self.directory, *fullpath)
375 self.assertIn(
376 expected_in,
377 str(err, encoding=sys.getdefaultencoding())
378 )
379
380 def test_strip_and_prepend(self):
381 fullpath = ["test", "build", "real", "path"]
382 path = os.path.join(self.directory, *fullpath)
383 os.makedirs(path)
384 script = script_helper.make_script(path, "test", "1 / 0")
385 bc = importlib.util.cache_from_source(script)
386 stripdir = os.path.join(self.directory, *fullpath[:2])
387 prependdir = "/foo"
388 compileall.compile_dir(path, quiet=True,
389 stripdir=stripdir, prependdir=prependdir)
390 rc, out, err = script_helper.assert_python_failure(bc)
391 expected_in = os.path.join(prependdir, *fullpath[2:])
392 self.assertIn(
393 expected_in,
394 str(err, encoding=sys.getdefaultencoding())
395 )
396 self.assertNotIn(
397 stripdir,
398 str(err, encoding=sys.getdefaultencoding())
399 )
400
401 def test_strip_prepend_and_ddir(self):
402 fullpath = ["test", "build", "real", "path", "ddir"]
403 path = os.path.join(self.directory, *fullpath)
404 os.makedirs(path)
405 script_helper.make_script(path, "test", "1 / 0")
406 with self.assertRaises(ValueError):
407 compileall.compile_dir(path, quiet=True, ddir="/bar",
408 stripdir="/foo", prependdir="/bar")
409
410 def test_multiple_optimization_levels(self):
411 script = script_helper.make_script(self.directory,
412 "test_optimization",
413 "a = 0")
414 bc = []
415 for opt_level in "", 1, 2, 3:
416 bc.append(importlib.util.cache_from_source(script,
417 optimization=opt_level))
418 test_combinations = [[0, 1], [1, 2], [0, 2], [0, 1, 2]]
419 for opt_combination in test_combinations:
420 compileall.compile_file(script, quiet=True,
421 optimize=opt_combination)
422 for opt_level in opt_combination:
423 self.assertTrue(os.path.isfile(bc[opt_level]))
424 try:
425 os.unlink(bc[opt_level])
426 except Exception:
427 pass
428
429 @os_helper.skip_unless_symlink
430 def test_ignore_symlink_destination(self):
431 # Create folders for allowed files, symlinks and prohibited area
432 allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
433 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
434 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
435 os.makedirs(allowed_path)
436 os.makedirs(symlinks_path)
437 os.makedirs(prohibited_path)
438
439 # Create scripts and symlinks and remember their byte-compiled versions
440 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
441 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
442 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
443 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
444 os.symlink(allowed_script, allowed_symlink)
445 os.symlink(prohibited_script, prohibited_symlink)
446 allowed_bc = importlib.util.cache_from_source(allowed_symlink)
447 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
448
449 compileall.compile_dir(symlinks_path, quiet=True, limit_sl_dest=allowed_path)
450
451 self.assertTrue(os.path.isfile(allowed_bc))
452 self.assertFalse(os.path.isfile(prohibited_bc))
453
454
455 class ESC[4;38;5;81mCompileallTestsWithSourceEpoch(ESC[4;38;5;149mCompileallTestsBase,
456 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
457 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
458 source_date_epoch=ESC[4;38;5;149mTrue):
459 pass
460
461
462 class ESC[4;38;5;81mCompileallTestsWithoutSourceEpoch(ESC[4;38;5;149mCompileallTestsBase,
463 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
464 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
465 source_date_epoch=ESC[4;38;5;149mFalse):
466 pass
467
468
469 # WASI does not have a temp directory and uses cwd instead. The cwd contains
470 # non-ASCII chars, so _walk_dir() fails to encode self.directory.
471 @unittest.skipIf(support.is_wasi, "tempdir is not encodable on WASI")
472 class ESC[4;38;5;81mEncodingTest(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
473 """Issue 6716: compileall should escape source code when printing errors
474 to stdout."""
475
476 def setUp(self):
477 self.directory = tempfile.mkdtemp()
478 self.source_path = os.path.join(self.directory, '_test.py')
479 with open(self.source_path, 'w', encoding='utf-8') as file:
480 file.write('# -*- coding: utf-8 -*-\n')
481 file.write('print u"\u20ac"\n')
482
483 def tearDown(self):
484 shutil.rmtree(self.directory)
485
486 def test_error(self):
487 try:
488 orig_stdout = sys.stdout
489 sys.stdout = io.TextIOWrapper(io.BytesIO(),encoding='ascii')
490 compileall.compile_dir(self.directory)
491 finally:
492 sys.stdout = orig_stdout
493
494
495 class ESC[4;38;5;81mCommandLineTestsBase:
496 """Test compileall's CLI."""
497
498 def setUp(self):
499 self.directory = tempfile.mkdtemp()
500 self.addCleanup(os_helper.rmtree, self.directory)
501 self.pkgdir = os.path.join(self.directory, 'foo')
502 os.mkdir(self.pkgdir)
503 self.pkgdir_cachedir = os.path.join(self.pkgdir, '__pycache__')
504 # Create the __init__.py and a package module.
505 self.initfn = script_helper.make_script(self.pkgdir, '__init__', '')
506 self.barfn = script_helper.make_script(self.pkgdir, 'bar', '')
507
508 @contextlib.contextmanager
509 def temporary_pycache_prefix(self):
510 """Adjust and restore sys.pycache_prefix."""
511 old_prefix = sys.pycache_prefix
512 new_prefix = os.path.join(self.directory, '__testcache__')
513 try:
514 sys.pycache_prefix = new_prefix
515 yield {
516 'PYTHONPATH': self.directory,
517 'PYTHONPYCACHEPREFIX': new_prefix,
518 }
519 finally:
520 sys.pycache_prefix = old_prefix
521
522 def _get_run_args(self, args):
523 return [*support.optim_args_from_interpreter_flags(),
524 '-S', '-m', 'compileall',
525 *args]
526
527 def assertRunOK(self, *args, **env_vars):
528 rc, out, err = script_helper.assert_python_ok(
529 *self._get_run_args(args), **env_vars,
530 PYTHONIOENCODING='utf-8')
531 self.assertEqual(b'', err)
532 return out
533
534 def assertRunNotOK(self, *args, **env_vars):
535 rc, out, err = script_helper.assert_python_failure(
536 *self._get_run_args(args), **env_vars,
537 PYTHONIOENCODING='utf-8')
538 return rc, out, err
539
540 def assertCompiled(self, fn):
541 path = importlib.util.cache_from_source(fn)
542 self.assertTrue(os.path.exists(path))
543
544 def assertNotCompiled(self, fn):
545 path = importlib.util.cache_from_source(fn)
546 self.assertFalse(os.path.exists(path))
547
548 def test_no_args_compiles_path(self):
549 # Note that -l is implied for the no args case.
550 bazfn = script_helper.make_script(self.directory, 'baz', '')
551 with self.temporary_pycache_prefix() as env:
552 self.assertRunOK(**env)
553 self.assertCompiled(bazfn)
554 self.assertNotCompiled(self.initfn)
555 self.assertNotCompiled(self.barfn)
556
557 @without_source_date_epoch # timestamp invalidation test
558 @support.requires_resource('cpu')
559 def test_no_args_respects_force_flag(self):
560 bazfn = script_helper.make_script(self.directory, 'baz', '')
561 with self.temporary_pycache_prefix() as env:
562 self.assertRunOK(**env)
563 pycpath = importlib.util.cache_from_source(bazfn)
564 # Set atime/mtime backward to avoid file timestamp resolution issues
565 os.utime(pycpath, (time.time()-60,)*2)
566 mtime = os.stat(pycpath).st_mtime
567 # Without force, no recompilation
568 self.assertRunOK(**env)
569 mtime2 = os.stat(pycpath).st_mtime
570 self.assertEqual(mtime, mtime2)
571 # Now force it.
572 self.assertRunOK('-f', **env)
573 mtime2 = os.stat(pycpath).st_mtime
574 self.assertNotEqual(mtime, mtime2)
575
576 @support.requires_resource('cpu')
577 def test_no_args_respects_quiet_flag(self):
578 script_helper.make_script(self.directory, 'baz', '')
579 with self.temporary_pycache_prefix() as env:
580 noisy = self.assertRunOK(**env)
581 self.assertIn(b'Listing ', noisy)
582 quiet = self.assertRunOK('-q', **env)
583 self.assertNotIn(b'Listing ', quiet)
584
585 # Ensure that the default behavior of compileall's CLI is to create
586 # PEP 3147/PEP 488 pyc files.
587 for name, ext, switch in [
588 ('normal', 'pyc', []),
589 ('optimize', 'opt-1.pyc', ['-O']),
590 ('doubleoptimize', 'opt-2.pyc', ['-OO']),
591 ]:
592 def f(self, ext=ext, switch=switch):
593 script_helper.assert_python_ok(*(switch +
594 ['-m', 'compileall', '-q', self.pkgdir]))
595 # Verify the __pycache__ directory contents.
596 self.assertTrue(os.path.exists(self.pkgdir_cachedir))
597 expected = sorted(base.format(sys.implementation.cache_tag, ext)
598 for base in ('__init__.{}.{}', 'bar.{}.{}'))
599 self.assertEqual(sorted(os.listdir(self.pkgdir_cachedir)), expected)
600 # Make sure there are no .pyc files in the source directory.
601 self.assertFalse([fn for fn in os.listdir(self.pkgdir)
602 if fn.endswith(ext)])
603 locals()['test_pep3147_paths_' + name] = f
604
605 def test_legacy_paths(self):
606 # Ensure that with the proper switch, compileall leaves legacy
607 # pyc files, and no __pycache__ directory.
608 self.assertRunOK('-b', '-q', self.pkgdir)
609 # Verify the __pycache__ directory contents.
610 self.assertFalse(os.path.exists(self.pkgdir_cachedir))
611 expected = sorted(['__init__.py', '__init__.pyc', 'bar.py',
612 'bar.pyc'])
613 self.assertEqual(sorted(os.listdir(self.pkgdir)), expected)
614
615 def test_multiple_runs(self):
616 # Bug 8527 reported that multiple calls produced empty
617 # __pycache__/__pycache__ directories.
618 self.assertRunOK('-q', self.pkgdir)
619 # Verify the __pycache__ directory contents.
620 self.assertTrue(os.path.exists(self.pkgdir_cachedir))
621 cachecachedir = os.path.join(self.pkgdir_cachedir, '__pycache__')
622 self.assertFalse(os.path.exists(cachecachedir))
623 # Call compileall again.
624 self.assertRunOK('-q', self.pkgdir)
625 self.assertTrue(os.path.exists(self.pkgdir_cachedir))
626 self.assertFalse(os.path.exists(cachecachedir))
627
628 @without_source_date_epoch # timestamp invalidation test
629 def test_force(self):
630 self.assertRunOK('-q', self.pkgdir)
631 pycpath = importlib.util.cache_from_source(self.barfn)
632 # set atime/mtime backward to avoid file timestamp resolution issues
633 os.utime(pycpath, (time.time()-60,)*2)
634 mtime = os.stat(pycpath).st_mtime
635 # without force, no recompilation
636 self.assertRunOK('-q', self.pkgdir)
637 mtime2 = os.stat(pycpath).st_mtime
638 self.assertEqual(mtime, mtime2)
639 # now force it.
640 self.assertRunOK('-q', '-f', self.pkgdir)
641 mtime2 = os.stat(pycpath).st_mtime
642 self.assertNotEqual(mtime, mtime2)
643
644 def test_recursion_control(self):
645 subpackage = os.path.join(self.pkgdir, 'spam')
646 os.mkdir(subpackage)
647 subinitfn = script_helper.make_script(subpackage, '__init__', '')
648 hamfn = script_helper.make_script(subpackage, 'ham', '')
649 self.assertRunOK('-q', '-l', self.pkgdir)
650 self.assertNotCompiled(subinitfn)
651 self.assertFalse(os.path.exists(os.path.join(subpackage, '__pycache__')))
652 self.assertRunOK('-q', self.pkgdir)
653 self.assertCompiled(subinitfn)
654 self.assertCompiled(hamfn)
655
656 def test_recursion_limit(self):
657 subpackage = os.path.join(self.pkgdir, 'spam')
658 subpackage2 = os.path.join(subpackage, 'ham')
659 subpackage3 = os.path.join(subpackage2, 'eggs')
660 for pkg in (subpackage, subpackage2, subpackage3):
661 script_helper.make_pkg(pkg)
662
663 subinitfn = os.path.join(subpackage, '__init__.py')
664 hamfn = script_helper.make_script(subpackage, 'ham', '')
665 spamfn = script_helper.make_script(subpackage2, 'spam', '')
666 eggfn = script_helper.make_script(subpackage3, 'egg', '')
667
668 self.assertRunOK('-q', '-r 0', self.pkgdir)
669 self.assertNotCompiled(subinitfn)
670 self.assertFalse(
671 os.path.exists(os.path.join(subpackage, '__pycache__')))
672
673 self.assertRunOK('-q', '-r 1', self.pkgdir)
674 self.assertCompiled(subinitfn)
675 self.assertCompiled(hamfn)
676 self.assertNotCompiled(spamfn)
677
678 self.assertRunOK('-q', '-r 2', self.pkgdir)
679 self.assertCompiled(subinitfn)
680 self.assertCompiled(hamfn)
681 self.assertCompiled(spamfn)
682 self.assertNotCompiled(eggfn)
683
684 self.assertRunOK('-q', '-r 5', self.pkgdir)
685 self.assertCompiled(subinitfn)
686 self.assertCompiled(hamfn)
687 self.assertCompiled(spamfn)
688 self.assertCompiled(eggfn)
689
690 @os_helper.skip_unless_symlink
691 def test_symlink_loop(self):
692 # Currently, compileall ignores symlinks to directories.
693 # If that limitation is ever lifted, it should protect against
694 # recursion in symlink loops.
695 pkg = os.path.join(self.pkgdir, 'spam')
696 script_helper.make_pkg(pkg)
697 os.symlink('.', os.path.join(pkg, 'evil'))
698 os.symlink('.', os.path.join(pkg, 'evil2'))
699 self.assertRunOK('-q', self.pkgdir)
700 self.assertCompiled(os.path.join(
701 self.pkgdir, 'spam', 'evil', 'evil2', '__init__.py'
702 ))
703
704 def test_quiet(self):
705 noisy = self.assertRunOK(self.pkgdir)
706 quiet = self.assertRunOK('-q', self.pkgdir)
707 self.assertNotEqual(b'', noisy)
708 self.assertEqual(b'', quiet)
709
710 def test_silent(self):
711 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
712 _, quiet, _ = self.assertRunNotOK('-q', self.pkgdir)
713 _, silent, _ = self.assertRunNotOK('-qq', self.pkgdir)
714 self.assertNotEqual(b'', quiet)
715 self.assertEqual(b'', silent)
716
717 def test_regexp(self):
718 self.assertRunOK('-q', '-x', r'ba[^\\/]*$', self.pkgdir)
719 self.assertNotCompiled(self.barfn)
720 self.assertCompiled(self.initfn)
721
722 def test_multiple_dirs(self):
723 pkgdir2 = os.path.join(self.directory, 'foo2')
724 os.mkdir(pkgdir2)
725 init2fn = script_helper.make_script(pkgdir2, '__init__', '')
726 bar2fn = script_helper.make_script(pkgdir2, 'bar2', '')
727 self.assertRunOK('-q', self.pkgdir, pkgdir2)
728 self.assertCompiled(self.initfn)
729 self.assertCompiled(self.barfn)
730 self.assertCompiled(init2fn)
731 self.assertCompiled(bar2fn)
732
733 def test_d_compile_error(self):
734 script_helper.make_script(self.pkgdir, 'crunchyfrog', 'bad(syntax')
735 rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir)
736 self.assertRegex(out, b'File "dinsdale')
737
738 def test_d_runtime_error(self):
739 bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception')
740 self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir)
741 fn = script_helper.make_script(self.pkgdir, 'bing', 'import baz')
742 pyc = importlib.util.cache_from_source(bazfn)
743 os.rename(pyc, os.path.join(self.pkgdir, 'baz.pyc'))
744 os.remove(bazfn)
745 rc, out, err = script_helper.assert_python_failure(fn, __isolated=False)
746 self.assertRegex(err, b'File "dinsdale')
747
748 def test_include_bad_file(self):
749 rc, out, err = self.assertRunNotOK(
750 '-i', os.path.join(self.directory, 'nosuchfile'), self.pkgdir)
751 self.assertRegex(out, b'rror.*nosuchfile')
752 self.assertNotRegex(err, b'Traceback')
753 self.assertFalse(os.path.exists(importlib.util.cache_from_source(
754 self.pkgdir_cachedir)))
755
756 def test_include_file_with_arg(self):
757 f1 = script_helper.make_script(self.pkgdir, 'f1', '')
758 f2 = script_helper.make_script(self.pkgdir, 'f2', '')
759 f3 = script_helper.make_script(self.pkgdir, 'f3', '')
760 f4 = script_helper.make_script(self.pkgdir, 'f4', '')
761 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
762 l1.write(os.path.join(self.pkgdir, 'f1.py')+os.linesep)
763 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
764 self.assertRunOK('-i', os.path.join(self.directory, 'l1'), f4)
765 self.assertCompiled(f1)
766 self.assertCompiled(f2)
767 self.assertNotCompiled(f3)
768 self.assertCompiled(f4)
769
770 def test_include_file_no_arg(self):
771 f1 = script_helper.make_script(self.pkgdir, 'f1', '')
772 f2 = script_helper.make_script(self.pkgdir, 'f2', '')
773 f3 = script_helper.make_script(self.pkgdir, 'f3', '')
774 f4 = script_helper.make_script(self.pkgdir, 'f4', '')
775 with open(os.path.join(self.directory, 'l1'), 'w', encoding="utf-8") as l1:
776 l1.write(os.path.join(self.pkgdir, 'f2.py')+os.linesep)
777 self.assertRunOK('-i', os.path.join(self.directory, 'l1'))
778 self.assertNotCompiled(f1)
779 self.assertCompiled(f2)
780 self.assertNotCompiled(f3)
781 self.assertNotCompiled(f4)
782
783 def test_include_on_stdin(self):
784 f1 = script_helper.make_script(self.pkgdir, 'f1', '')
785 f2 = script_helper.make_script(self.pkgdir, 'f2', '')
786 f3 = script_helper.make_script(self.pkgdir, 'f3', '')
787 f4 = script_helper.make_script(self.pkgdir, 'f4', '')
788 p = script_helper.spawn_python(*(self._get_run_args(()) + ['-i', '-']))
789 p.stdin.write((f3+os.linesep).encode('ascii'))
790 script_helper.kill_python(p)
791 self.assertNotCompiled(f1)
792 self.assertNotCompiled(f2)
793 self.assertCompiled(f3)
794 self.assertNotCompiled(f4)
795
796 def test_compiles_as_much_as_possible(self):
797 bingfn = script_helper.make_script(self.pkgdir, 'bing', 'syntax(error')
798 rc, out, err = self.assertRunNotOK('nosuchfile', self.initfn,
799 bingfn, self.barfn)
800 self.assertRegex(out, b'rror')
801 self.assertNotCompiled(bingfn)
802 self.assertCompiled(self.initfn)
803 self.assertCompiled(self.barfn)
804
805 def test_invalid_arg_produces_message(self):
806 out = self.assertRunOK('badfilename')
807 self.assertRegex(out, b"Can't list 'badfilename'")
808
809 def test_pyc_invalidation_mode(self):
810 script_helper.make_script(self.pkgdir, 'f1', '')
811 pyc = importlib.util.cache_from_source(
812 os.path.join(self.pkgdir, 'f1.py'))
813 self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir)
814 with open(pyc, 'rb') as fp:
815 data = fp.read()
816 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11)
817 self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir)
818 with open(pyc, 'rb') as fp:
819 data = fp.read()
820 self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b01)
821
822 @skipUnless(_have_multiprocessing, "requires multiprocessing")
823 def test_workers(self):
824 bar2fn = script_helper.make_script(self.directory, 'bar2', '')
825 files = []
826 for suffix in range(5):
827 pkgdir = os.path.join(self.directory, 'foo{}'.format(suffix))
828 os.mkdir(pkgdir)
829 fn = script_helper.make_script(pkgdir, '__init__', '')
830 files.append(script_helper.make_script(pkgdir, 'bar2', ''))
831
832 self.assertRunOK(self.directory, '-j', '0')
833 self.assertCompiled(bar2fn)
834 for file in files:
835 self.assertCompiled(file)
836
837 @mock.patch('compileall.compile_dir')
838 def test_workers_available_cores(self, compile_dir):
839 with mock.patch("sys.argv",
840 new=[sys.executable, self.directory, "-j0"]):
841 compileall.main()
842 self.assertTrue(compile_dir.called)
843 self.assertEqual(compile_dir.call_args[-1]['workers'], 0)
844
845 def test_strip_and_prepend(self):
846 fullpath = ["test", "build", "real", "path"]
847 path = os.path.join(self.directory, *fullpath)
848 os.makedirs(path)
849 script = script_helper.make_script(path, "test", "1 / 0")
850 bc = importlib.util.cache_from_source(script)
851 stripdir = os.path.join(self.directory, *fullpath[:2])
852 prependdir = "/foo"
853 self.assertRunOK("-s", stripdir, "-p", prependdir, path)
854 rc, out, err = script_helper.assert_python_failure(bc)
855 expected_in = os.path.join(prependdir, *fullpath[2:])
856 self.assertIn(
857 expected_in,
858 str(err, encoding=sys.getdefaultencoding())
859 )
860 self.assertNotIn(
861 stripdir,
862 str(err, encoding=sys.getdefaultencoding())
863 )
864
865 def test_multiple_optimization_levels(self):
866 path = os.path.join(self.directory, "optimizations")
867 os.makedirs(path)
868 script = script_helper.make_script(path,
869 "test_optimization",
870 "a = 0")
871 bc = []
872 for opt_level in "", 1, 2, 3:
873 bc.append(importlib.util.cache_from_source(script,
874 optimization=opt_level))
875 test_combinations = [["0", "1"],
876 ["1", "2"],
877 ["0", "2"],
878 ["0", "1", "2"]]
879 for opt_combination in test_combinations:
880 self.assertRunOK(path, *("-o" + str(n) for n in opt_combination))
881 for opt_level in opt_combination:
882 self.assertTrue(os.path.isfile(bc[int(opt_level)]))
883 try:
884 os.unlink(bc[opt_level])
885 except Exception:
886 pass
887
888 @os_helper.skip_unless_symlink
889 def test_ignore_symlink_destination(self):
890 # Create folders for allowed files, symlinks and prohibited area
891 allowed_path = os.path.join(self.directory, "test", "dir", "allowed")
892 symlinks_path = os.path.join(self.directory, "test", "dir", "symlinks")
893 prohibited_path = os.path.join(self.directory, "test", "dir", "prohibited")
894 os.makedirs(allowed_path)
895 os.makedirs(symlinks_path)
896 os.makedirs(prohibited_path)
897
898 # Create scripts and symlinks and remember their byte-compiled versions
899 allowed_script = script_helper.make_script(allowed_path, "test_allowed", "a = 0")
900 prohibited_script = script_helper.make_script(prohibited_path, "test_prohibited", "a = 0")
901 allowed_symlink = os.path.join(symlinks_path, "test_allowed.py")
902 prohibited_symlink = os.path.join(symlinks_path, "test_prohibited.py")
903 os.symlink(allowed_script, allowed_symlink)
904 os.symlink(prohibited_script, prohibited_symlink)
905 allowed_bc = importlib.util.cache_from_source(allowed_symlink)
906 prohibited_bc = importlib.util.cache_from_source(prohibited_symlink)
907
908 self.assertRunOK(symlinks_path, "-e", allowed_path)
909
910 self.assertTrue(os.path.isfile(allowed_bc))
911 self.assertFalse(os.path.isfile(prohibited_bc))
912
913 def test_hardlink_bad_args(self):
914 # Bad arguments combination, hardlink deduplication make sense
915 # only for more than one optimization level
916 self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes")
917
918 def test_hardlink(self):
919 # 'a = 0' code produces the same bytecode for the 3 optimization
920 # levels. All three .pyc files must have the same inode (hardlinks).
921 #
922 # If deduplication is disabled, all pyc files must have different
923 # inodes.
924 for dedup in (True, False):
925 with tempfile.TemporaryDirectory() as path:
926 with self.subTest(dedup=dedup):
927 script = script_helper.make_script(path, "script", "a = 0")
928 pycs = get_pycs(script)
929
930 args = ["-q", "-o 0", "-o 1", "-o 2"]
931 if dedup:
932 args.append("--hardlink-dupes")
933 self.assertRunOK(path, *args)
934
935 self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup)
936 self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup)
937 self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup)
938
939
940 class ESC[4;38;5;81mCommandLineTestsWithSourceEpoch(ESC[4;38;5;149mCommandLineTestsBase,
941 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
942 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
943 source_date_epoch=ESC[4;38;5;149mTrue):
944 pass
945
946
947 class ESC[4;38;5;81mCommandLineTestsNoSourceEpoch(ESC[4;38;5;149mCommandLineTestsBase,
948 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
949 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
950 source_date_epoch=ESC[4;38;5;149mFalse):
951 pass
952
953
954
955 @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
956 class ESC[4;38;5;81mHardlinkDedupTestsBase:
957 # Test hardlink_dupes parameter of compileall.compile_dir()
958
959 def setUp(self):
960 self.path = None
961
962 @contextlib.contextmanager
963 def temporary_directory(self):
964 with tempfile.TemporaryDirectory() as path:
965 self.path = path
966 yield path
967 self.path = None
968
969 def make_script(self, code, name="script"):
970 return script_helper.make_script(self.path, name, code)
971
972 def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
973 compileall.compile_dir(self.path, quiet=True, optimize=optimize,
974 hardlink_dupes=dedup, force=force)
975
976 def test_bad_args(self):
977 # Bad arguments combination, hardlink deduplication make sense
978 # only for more than one optimization level
979 with self.temporary_directory():
980 self.make_script("pass")
981 with self.assertRaises(ValueError):
982 compileall.compile_dir(self.path, quiet=True, optimize=0,
983 hardlink_dupes=True)
984 with self.assertRaises(ValueError):
985 # same optimization level specified twice:
986 # compile_dir() removes duplicates
987 compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
988 hardlink_dupes=True)
989
990 def create_code(self, docstring=False, assertion=False):
991 lines = []
992 if docstring:
993 lines.append("'module docstring'")
994 lines.append('x = 1')
995 if assertion:
996 lines.append("assert x == 1")
997 return '\n'.join(lines)
998
999 def iter_codes(self):
1000 for docstring in (False, True):
1001 for assertion in (False, True):
1002 code = self.create_code(docstring=docstring, assertion=assertion)
1003 yield (code, docstring, assertion)
1004
1005 def test_disabled(self):
1006 # Deduplication disabled, no hardlinks
1007 for code, docstring, assertion in self.iter_codes():
1008 with self.subTest(docstring=docstring, assertion=assertion):
1009 with self.temporary_directory():
1010 script = self.make_script(code)
1011 pycs = get_pycs(script)
1012 self.compile_dir(dedup=False)
1013 self.assertFalse(is_hardlink(pycs[0], pycs[1]))
1014 self.assertFalse(is_hardlink(pycs[0], pycs[2]))
1015 self.assertFalse(is_hardlink(pycs[1], pycs[2]))
1016
1017 def check_hardlinks(self, script, docstring=False, assertion=False):
1018 pycs = get_pycs(script)
1019 self.assertEqual(is_hardlink(pycs[0], pycs[1]),
1020 not assertion)
1021 self.assertEqual(is_hardlink(pycs[0], pycs[2]),
1022 not assertion and not docstring)
1023 self.assertEqual(is_hardlink(pycs[1], pycs[2]),
1024 not docstring)
1025
1026 def test_hardlink(self):
1027 # Test deduplication on all combinations
1028 for code, docstring, assertion in self.iter_codes():
1029 with self.subTest(docstring=docstring, assertion=assertion):
1030 with self.temporary_directory():
1031 script = self.make_script(code)
1032 self.compile_dir()
1033 self.check_hardlinks(script, docstring, assertion)
1034
1035 def test_only_two_levels(self):
1036 # Don't build the 3 optimization levels, but only 2
1037 for opts in ((0, 1), (1, 2), (0, 2)):
1038 with self.subTest(opts=opts):
1039 with self.temporary_directory():
1040 # code with no dostring and no assertion:
1041 # same bytecode for all optimization levels
1042 script = self.make_script(self.create_code())
1043 self.compile_dir(optimize=opts)
1044 pyc1 = get_pyc(script, opts[0])
1045 pyc2 = get_pyc(script, opts[1])
1046 self.assertTrue(is_hardlink(pyc1, pyc2))
1047
1048 def test_duplicated_levels(self):
1049 # compile_dir() must not fail if optimize contains duplicated
1050 # optimization levels and/or if optimization levels are not sorted.
1051 with self.temporary_directory():
1052 # code with no dostring and no assertion:
1053 # same bytecode for all optimization levels
1054 script = self.make_script(self.create_code())
1055 self.compile_dir(optimize=[1, 0, 1, 0])
1056 pyc1 = get_pyc(script, 0)
1057 pyc2 = get_pyc(script, 1)
1058 self.assertTrue(is_hardlink(pyc1, pyc2))
1059
1060 def test_recompilation(self):
1061 # Test compile_dir() when pyc files already exists and the script
1062 # content changed
1063 with self.temporary_directory():
1064 script = self.make_script("a = 0")
1065 self.compile_dir()
1066 # All three levels have the same inode
1067 self.check_hardlinks(script)
1068
1069 pycs = get_pycs(script)
1070 inode = os.stat(pycs[0]).st_ino
1071
1072 # Change of the module content
1073 script = self.make_script("print(0)")
1074
1075 # Recompilation without -o 1
1076 self.compile_dir(optimize=[0, 2], force=True)
1077
1078 # opt-1.pyc should have the same inode as before and others should not
1079 self.assertEqual(inode, os.stat(pycs[1]).st_ino)
1080 self.assertTrue(is_hardlink(pycs[0], pycs[2]))
1081 self.assertNotEqual(inode, os.stat(pycs[2]).st_ino)
1082 # opt-1.pyc and opt-2.pyc have different content
1083 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
1084
1085 def test_import(self):
1086 # Test that import updates a single pyc file when pyc files already
1087 # exists and the script content changed
1088 with self.temporary_directory():
1089 script = self.make_script(self.create_code(), name="module")
1090 self.compile_dir()
1091 # All three levels have the same inode
1092 self.check_hardlinks(script)
1093
1094 pycs = get_pycs(script)
1095 inode = os.stat(pycs[0]).st_ino
1096
1097 # Change of the module content
1098 script = self.make_script("print(0)", name="module")
1099
1100 # Import the module in Python with -O (optimization level 1)
1101 script_helper.assert_python_ok(
1102 "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path
1103 )
1104
1105 # Only opt-1.pyc is changed
1106 self.assertEqual(inode, os.stat(pycs[0]).st_ino)
1107 self.assertEqual(inode, os.stat(pycs[2]).st_ino)
1108 self.assertFalse(is_hardlink(pycs[1], pycs[2]))
1109 # opt-1.pyc and opt-2.pyc have different content
1110 self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
1111
1112
1113 class ESC[4;38;5;81mHardlinkDedupTestsWithSourceEpoch(ESC[4;38;5;149mHardlinkDedupTestsBase,
1114 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
1115 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
1116 source_date_epoch=ESC[4;38;5;149mTrue):
1117 pass
1118
1119
1120 class ESC[4;38;5;81mHardlinkDedupTestsNoSourceEpoch(ESC[4;38;5;149mHardlinkDedupTestsBase,
1121 ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase,
1122 metaclass=ESC[4;38;5;149mSourceDateEpochTestMeta,
1123 source_date_epoch=ESC[4;38;5;149mFalse):
1124 pass
1125
1126
1127 if __name__ == "__main__":
1128 unittest.main()