Skip to content

Commit 1110c5b

Browse files
authored
gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265)
1 parent 8c813fa commit 1110c5b

18 files changed

+40
-25
lines changed

.gitattributes

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ PC/classicAppCompat.* binary
2424
[attr]noeol -text
2525

2626
Lib/test/cjkencodings/* noeol
27-
Lib/test/coding20731.py noeol
27+
Lib/test/tokenizedata/coding20731.py noeol
2828
Lib/test/decimaltestdata/*.decTest noeol
2929
Lib/test/test_email/data/*.txt noeol
3030
Lib/test/test_importlib/resources/data01/* noeol

.pre-commit-config.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ repos:
55
- id: check-yaml
66
- id: end-of-file-fixer
77
types: [python]
8-
exclude: Lib/test/coding20731.py
8+
exclude: Lib/test/tokenizedata/coding20731.py
99
- id: trailing-whitespace
1010
types_or: [c, python, rst]
1111

Lib/test/test_py_compile.py

+12-4
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,9 @@ def test_exceptions_propagate(self):
132132
os.chmod(self.directory, mode.st_mode)
133133

134134
def test_bad_coding(self):
135-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
135+
bad_coding = os.path.join(os.path.dirname(__file__),
136+
'tokenizedata',
137+
'bad_coding2.py')
136138
with support.captured_stderr():
137139
self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
138140
self.assertFalse(os.path.exists(
@@ -195,7 +197,9 @@ def test_invalidation_mode(self):
195197
self.assertEqual(flags, 0b1)
196198

197199
def test_quiet(self):
198-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
200+
bad_coding = os.path.join(os.path.dirname(__file__),
201+
'tokenizedata',
202+
'bad_coding2.py')
199203
with support.captured_stderr() as stderr:
200204
self.assertIsNone(py_compile.compile(bad_coding, doraise=False, quiet=2))
201205
self.assertIsNone(py_compile.compile(bad_coding, doraise=True, quiet=2))
@@ -260,14 +264,18 @@ def test_with_files(self):
260264
self.assertTrue(os.path.exists(self.cache_path))
261265

262266
def test_bad_syntax(self):
263-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
267+
bad_syntax = os.path.join(os.path.dirname(__file__),
268+
'tokenizedata',
269+
'badsyntax_3131.py')
264270
rc, stdout, stderr = self.pycompilecmd_failure(bad_syntax)
265271
self.assertEqual(rc, 1)
266272
self.assertEqual(stdout, b'')
267273
self.assertIn(b'SyntaxError', stderr)
268274

269275
def test_bad_syntax_with_quiet(self):
270-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
276+
bad_syntax = os.path.join(os.path.dirname(__file__),
277+
'tokenizedata',
278+
'badsyntax_3131.py')
271279
rc, stdout, stderr = self.pycompilecmd_failure('-q', bad_syntax)
272280
self.assertEqual(rc, 1)
273281
self.assertEqual(stdout, b'')

Lib/test/test_source_encoding.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ def test_issue7820(self):
6868
def test_20731(self):
6969
sub = subprocess.Popen([sys.executable,
7070
os.path.join(os.path.dirname(__file__),
71+
'tokenizedata',
7172
'coding20731.py')],
7273
stderr=subprocess.PIPE)
7374
err = sub.communicate()[1]
@@ -100,10 +101,10 @@ def test_bad_coding2(self):
100101
self.verify_bad_module(module_name)
101102

102103
def verify_bad_module(self, module_name):
103-
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
104+
self.assertRaises(SyntaxError, __import__, 'test.tokenizedata.' + module_name)
104105

105106
path = os.path.dirname(__file__)
106-
filename = os.path.join(path, module_name + '.py')
107+
filename = os.path.join(path, 'tokenizedata', module_name + '.py')
107108
with open(filename, "rb") as fp:
108109
bytes = fp.read()
109110
self.assertRaises(SyntaxError, compile, bytes, filename, 'exec')

Lib/test/test_tarfile.py

+18-11
Original file line numberDiff line numberDiff line change
@@ -2587,16 +2587,17 @@ def tarfilecmd_failure(self, *args):
25872587
return script_helper.assert_python_failure('-m', 'tarfile', *args)
25882588

25892589
def make_simple_tarfile(self, tar_name):
2590-
files = [support.findfile('tokenize_tests.txt'),
2590+
files = [support.findfile('tokenize_tests.txt',
2591+
subdir='tokenizedata'),
25912592
support.findfile('tokenize_tests-no-coding-cookie-'
2592-
'and-utf8-bom-sig-only.txt')]
2593+
'and-utf8-bom-sig-only.txt',
2594+
subdir='tokenizedata')]
25932595
self.addCleanup(os_helper.unlink, tar_name)
25942596
with tarfile.open(tar_name, 'w') as tf:
25952597
for tardata in files:
25962598
tf.add(tardata, arcname=os.path.basename(tardata))
25972599

25982600
def make_evil_tarfile(self, tar_name):
2599-
files = [support.findfile('tokenize_tests.txt')]
26002601
self.addCleanup(os_helper.unlink, tar_name)
26012602
with tarfile.open(tar_name, 'w') as tf:
26022603
benign = tarfile.TarInfo('benign')
@@ -2677,9 +2678,11 @@ def test_list_command_invalid_file(self):
26772678
self.assertEqual(rc, 1)
26782679

26792680
def test_create_command(self):
2680-
files = [support.findfile('tokenize_tests.txt'),
2681+
files = [support.findfile('tokenize_tests.txt',
2682+
subdir='tokenizedata'),
26812683
support.findfile('tokenize_tests-no-coding-cookie-'
2682-
'and-utf8-bom-sig-only.txt')]
2684+
'and-utf8-bom-sig-only.txt',
2685+
subdir='tokenizedata')]
26832686
for opt in '-c', '--create':
26842687
try:
26852688
out = self.tarfilecmd(opt, tmpname, *files)
@@ -2690,9 +2693,11 @@ def test_create_command(self):
26902693
os_helper.unlink(tmpname)
26912694

26922695
def test_create_command_verbose(self):
2693-
files = [support.findfile('tokenize_tests.txt'),
2696+
files = [support.findfile('tokenize_tests.txt',
2697+
subdir='tokenizedata'),
26942698
support.findfile('tokenize_tests-no-coding-cookie-'
2695-
'and-utf8-bom-sig-only.txt')]
2699+
'and-utf8-bom-sig-only.txt',
2700+
subdir='tokenizedata')]
26962701
for opt in '-v', '--verbose':
26972702
try:
26982703
out = self.tarfilecmd(opt, '-c', tmpname, *files,
@@ -2704,7 +2709,7 @@ def test_create_command_verbose(self):
27042709
os_helper.unlink(tmpname)
27052710

27062711
def test_create_command_dotless_filename(self):
2707-
files = [support.findfile('tokenize_tests.txt')]
2712+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
27082713
try:
27092714
out = self.tarfilecmd('-c', dotlessname, *files)
27102715
self.assertEqual(out, b'')
@@ -2715,7 +2720,7 @@ def test_create_command_dotless_filename(self):
27152720

27162721
def test_create_command_dot_started_filename(self):
27172722
tar_name = os.path.join(TEMPDIR, ".testtar")
2718-
files = [support.findfile('tokenize_tests.txt')]
2723+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
27192724
try:
27202725
out = self.tarfilecmd('-c', tar_name, *files)
27212726
self.assertEqual(out, b'')
@@ -2725,9 +2730,11 @@ def test_create_command_dot_started_filename(self):
27252730
os_helper.unlink(tar_name)
27262731

27272732
def test_create_command_compressed(self):
2728-
files = [support.findfile('tokenize_tests.txt'),
2733+
files = [support.findfile('tokenize_tests.txt',
2734+
subdir='tokenizedata'),
27292735
support.findfile('tokenize_tests-no-coding-cookie-'
2730-
'and-utf8-bom-sig-only.txt')]
2736+
'and-utf8-bom-sig-only.txt',
2737+
subdir='tokenizedata')]
27312738
for filetype in (GzipTest, Bz2Test, LzmaTest):
27322739
if not filetype.open:
27332740
continue

Lib/test/test_tokenize.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -1200,7 +1200,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
12001200
"""
12011201

12021202
def _testFile(self, filename):
1203-
path = os.path.join(os.path.dirname(__file__), filename)
1203+
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
12041204
with open(path, 'rb') as f:
12051205
TestRoundtrip.check_roundtrip(self, f)
12061206

@@ -1794,7 +1794,7 @@ def test_roundtrip(self):
17941794

17951795
self.check_roundtrip("if x == 1 : \n"
17961796
" print(x)\n")
1797-
fn = support.findfile("tokenize_tests.txt")
1797+
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
17981798
with open(fn, 'rb') as f:
17991799
self.check_roundtrip(f)
18001800
self.check_roundtrip("if x == 1:\n"
@@ -1849,8 +1849,7 @@ def test_random_files(self):
18491849
# pass the '-ucpu' option to process the full directory.
18501850

18511851
import glob, random
1852-
fn = support.findfile("tokenize_tests.txt")
1853-
tempdir = os.path.dirname(fn) or os.curdir
1852+
tempdir = os.path.dirname(__file__) or os.curdir
18541853
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
18551854

18561855
# Tokenize is broken on test_pep3131.py because regular expressions are

Lib/test/test_tools/test_reindent.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def test_help(self):
2525
self.assertGreater(err, b'')
2626

2727
def test_reindent_file_with_bad_encoding(self):
28-
bad_coding_path = findfile('bad_coding.py')
28+
bad_coding_path = findfile('bad_coding.py', subdir='tokenizedata')
2929
rc, out, err = assert_python_ok(self.script, '-r', bad_coding_path)
3030
self.assertEqual(out, b'')
3131
self.assertNotEqual(err, b'')

Lib/test/test_unicode_identifiers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def test_non_bmp_normalized(self):
1919

2020
def test_invalid(self):
2121
try:
22-
from test import badsyntax_3131
22+
from test.tokenizedata import badsyntax_3131
2323
except SyntaxError as err:
2424
self.assertEqual(str(err),
2525
"invalid character '€' (U+20AC) (badsyntax_3131.py, line 2)")

Lib/test/tokenizedata/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

0 commit comments

Comments
 (0)