Skip to content

Commit ed4ffd7

Browse files
vstinnersobolevn
andauthored
[3.12] gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265) (#109677)
* gh-108303: Move tokenize-related data to Lib/test/tokenizedata (GH-109265) (cherry picked from commit 1110c5b) * gh-108303: Add `Lib/test/tokenizedata` to `TESTSUBDIRS` (#109314) (cherry picked from commit 42ab2cb) --------- Co-authored-by: Nikita Sobolev <[email protected]>
1 parent 9bceb8a commit ed4ffd7

19 files changed

+41
-25
lines changed

.gitattributes

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ PC/classicAppCompat.* binary
2525
[attr]noeol -text
2626

2727
Lib/test/cjkencodings/* noeol
28-
Lib/test/coding20731.py noeol
28+
Lib/test/tokenizedata/coding20731.py noeol
2929
Lib/test/decimaltestdata/*.decTest noeol
3030
Lib/test/test_email/data/*.txt noeol
3131
Lib/test/test_importlib/resources/data01/* noeol

.pre-commit-config.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ repos:
1515
- id: check-yaml
1616
- id: end-of-file-fixer
1717
types: [python]
18-
exclude: Lib/test/coding20731.py
18+
exclude: Lib/test/tokenizedata/coding20731.py
1919
- id: trailing-whitespace
2020
types_or: [c, python, rst]
2121

Lib/test/test_py_compile.py

+12-4
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,9 @@ def test_exceptions_propagate(self):
132132
os.chmod(self.directory, mode.st_mode)
133133

134134
def test_bad_coding(self):
135-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
135+
bad_coding = os.path.join(os.path.dirname(__file__),
136+
'tokenizedata',
137+
'bad_coding2.py')
136138
with support.captured_stderr():
137139
self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
138140
self.assertFalse(os.path.exists(
@@ -195,7 +197,9 @@ def test_invalidation_mode(self):
195197
self.assertEqual(flags, 0b1)
196198

197199
def test_quiet(self):
198-
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
200+
bad_coding = os.path.join(os.path.dirname(__file__),
201+
'tokenizedata',
202+
'bad_coding2.py')
199203
with support.captured_stderr() as stderr:
200204
self.assertIsNone(py_compile.compile(bad_coding, doraise=False, quiet=2))
201205
self.assertIsNone(py_compile.compile(bad_coding, doraise=True, quiet=2))
@@ -260,14 +264,18 @@ def test_with_files(self):
260264
self.assertTrue(os.path.exists(self.cache_path))
261265

262266
def test_bad_syntax(self):
263-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
267+
bad_syntax = os.path.join(os.path.dirname(__file__),
268+
'tokenizedata',
269+
'badsyntax_3131.py')
264270
rc, stdout, stderr = self.pycompilecmd_failure(bad_syntax)
265271
self.assertEqual(rc, 1)
266272
self.assertEqual(stdout, b'')
267273
self.assertIn(b'SyntaxError', stderr)
268274

269275
def test_bad_syntax_with_quiet(self):
270-
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
276+
bad_syntax = os.path.join(os.path.dirname(__file__),
277+
'tokenizedata',
278+
'badsyntax_3131.py')
271279
rc, stdout, stderr = self.pycompilecmd_failure('-q', bad_syntax)
272280
self.assertEqual(rc, 1)
273281
self.assertEqual(stdout, b'')

Lib/test/test_source_encoding.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ def test_issue7820(self):
6868
def test_20731(self):
6969
sub = subprocess.Popen([sys.executable,
7070
os.path.join(os.path.dirname(__file__),
71+
'tokenizedata',
7172
'coding20731.py')],
7273
stderr=subprocess.PIPE)
7374
err = sub.communicate()[1]
@@ -100,10 +101,10 @@ def test_bad_coding2(self):
100101
self.verify_bad_module(module_name)
101102

102103
def verify_bad_module(self, module_name):
103-
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
104+
self.assertRaises(SyntaxError, __import__, 'test.tokenizedata.' + module_name)
104105

105106
path = os.path.dirname(__file__)
106-
filename = os.path.join(path, module_name + '.py')
107+
filename = os.path.join(path, 'tokenizedata', module_name + '.py')
107108
with open(filename, "rb") as fp:
108109
bytes = fp.read()
109110
self.assertRaises(SyntaxError, compile, bytes, filename, 'exec')

Lib/test/test_tarfile.py

+18-11
Original file line numberDiff line numberDiff line change
@@ -2564,16 +2564,17 @@ def tarfilecmd_failure(self, *args):
25642564
return script_helper.assert_python_failure('-m', 'tarfile', *args)
25652565

25662566
def make_simple_tarfile(self, tar_name):
2567-
files = [support.findfile('tokenize_tests.txt'),
2567+
files = [support.findfile('tokenize_tests.txt',
2568+
subdir='tokenizedata'),
25682569
support.findfile('tokenize_tests-no-coding-cookie-'
2569-
'and-utf8-bom-sig-only.txt')]
2570+
'and-utf8-bom-sig-only.txt',
2571+
subdir='tokenizedata')]
25702572
self.addCleanup(os_helper.unlink, tar_name)
25712573
with tarfile.open(tar_name, 'w') as tf:
25722574
for tardata in files:
25732575
tf.add(tardata, arcname=os.path.basename(tardata))
25742576

25752577
def make_evil_tarfile(self, tar_name):
2576-
files = [support.findfile('tokenize_tests.txt')]
25772578
self.addCleanup(os_helper.unlink, tar_name)
25782579
with tarfile.open(tar_name, 'w') as tf:
25792580
benign = tarfile.TarInfo('benign')
@@ -2654,9 +2655,11 @@ def test_list_command_invalid_file(self):
26542655
self.assertEqual(rc, 1)
26552656

26562657
def test_create_command(self):
2657-
files = [support.findfile('tokenize_tests.txt'),
2658+
files = [support.findfile('tokenize_tests.txt',
2659+
subdir='tokenizedata'),
26582660
support.findfile('tokenize_tests-no-coding-cookie-'
2659-
'and-utf8-bom-sig-only.txt')]
2661+
'and-utf8-bom-sig-only.txt',
2662+
subdir='tokenizedata')]
26602663
for opt in '-c', '--create':
26612664
try:
26622665
out = self.tarfilecmd(opt, tmpname, *files)
@@ -2667,9 +2670,11 @@ def test_create_command(self):
26672670
os_helper.unlink(tmpname)
26682671

26692672
def test_create_command_verbose(self):
2670-
files = [support.findfile('tokenize_tests.txt'),
2673+
files = [support.findfile('tokenize_tests.txt',
2674+
subdir='tokenizedata'),
26712675
support.findfile('tokenize_tests-no-coding-cookie-'
2672-
'and-utf8-bom-sig-only.txt')]
2676+
'and-utf8-bom-sig-only.txt',
2677+
subdir='tokenizedata')]
26732678
for opt in '-v', '--verbose':
26742679
try:
26752680
out = self.tarfilecmd(opt, '-c', tmpname, *files,
@@ -2681,7 +2686,7 @@ def test_create_command_verbose(self):
26812686
os_helper.unlink(tmpname)
26822687

26832688
def test_create_command_dotless_filename(self):
2684-
files = [support.findfile('tokenize_tests.txt')]
2689+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
26852690
try:
26862691
out = self.tarfilecmd('-c', dotlessname, *files)
26872692
self.assertEqual(out, b'')
@@ -2692,7 +2697,7 @@ def test_create_command_dotless_filename(self):
26922697

26932698
def test_create_command_dot_started_filename(self):
26942699
tar_name = os.path.join(TEMPDIR, ".testtar")
2695-
files = [support.findfile('tokenize_tests.txt')]
2700+
files = [support.findfile('tokenize_tests.txt', subdir='tokenizedata')]
26962701
try:
26972702
out = self.tarfilecmd('-c', tar_name, *files)
26982703
self.assertEqual(out, b'')
@@ -2702,9 +2707,11 @@ def test_create_command_dot_started_filename(self):
27022707
os_helper.unlink(tar_name)
27032708

27042709
def test_create_command_compressed(self):
2705-
files = [support.findfile('tokenize_tests.txt'),
2710+
files = [support.findfile('tokenize_tests.txt',
2711+
subdir='tokenizedata'),
27062712
support.findfile('tokenize_tests-no-coding-cookie-'
2707-
'and-utf8-bom-sig-only.txt')]
2713+
'and-utf8-bom-sig-only.txt',
2714+
subdir='tokenizedata')]
27082715
for filetype in (GzipTest, Bz2Test, LzmaTest):
27092716
if not filetype.open:
27102717
continue

Lib/test/test_tokenize.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -1198,7 +1198,7 @@ class TestTokenizerAdheresToPep0263(TestCase):
11981198
"""
11991199

12001200
def _testFile(self, filename):
1201-
path = os.path.join(os.path.dirname(__file__), filename)
1201+
path = os.path.join(os.path.dirname(__file__), 'tokenizedata', filename)
12021202
with open(path, 'rb') as f:
12031203
TestRoundtrip.check_roundtrip(self, f)
12041204

@@ -1791,7 +1791,7 @@ def test_roundtrip(self):
17911791

17921792
self.check_roundtrip("if x == 1 : \n"
17931793
" print(x)\n")
1794-
fn = support.findfile("tokenize_tests.txt")
1794+
fn = support.findfile("tokenize_tests.txt", subdir="tokenizedata")
17951795
with open(fn, 'rb') as f:
17961796
self.check_roundtrip(f)
17971797
self.check_roundtrip("if x == 1:\n"
@@ -1846,8 +1846,7 @@ def test_random_files(self):
18461846
# pass the '-ucpu' option to process the full directory.
18471847

18481848
import glob, random
1849-
fn = support.findfile("tokenize_tests.txt")
1850-
tempdir = os.path.dirname(fn) or os.curdir
1849+
tempdir = os.path.dirname(__file__) or os.curdir
18511850
testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
18521851

18531852
# Tokenize is broken on test_pep3131.py because regular expressions are

Lib/test/test_tools/test_reindent.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def test_help(self):
2525
self.assertGreater(err, b'')
2626

2727
def test_reindent_file_with_bad_encoding(self):
28-
bad_coding_path = findfile('bad_coding.py')
28+
bad_coding_path = findfile('bad_coding.py', subdir='tokenizedata')
2929
rc, out, err = assert_python_ok(self.script, '-r', bad_coding_path)
3030
self.assertEqual(out, b'')
3131
self.assertNotEqual(err, b'')

Lib/test/test_unicode_identifiers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def test_non_bmp_normalized(self):
1919

2020
def test_invalid(self):
2121
try:
22-
from test import badsyntax_3131
22+
from test.tokenizedata import badsyntax_3131
2323
except SyntaxError as err:
2424
self.assertEqual(str(err),
2525
"invalid character '€' (U+20AC) (badsyntax_3131.py, line 2)")

Lib/test/tokenizedata/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

Makefile.pre.in

+1
Original file line numberDiff line numberDiff line change
@@ -2226,6 +2226,7 @@ TESTSUBDIRS= idlelib/idle_test \
22262226
test/test_zipfile/_path \
22272227
test/test_zoneinfo \
22282228
test/test_zoneinfo/data \
2229+
test/tokenizedata \
22292230
test/tracedmodules \
22302231
test/typinganndata \
22312232
test/xmltestdata \

0 commit comments

Comments
 (0)