Skip to content

Commit cdfc30a

Browse files
authored
Merge branch 'main' into tokenizer-return-token
2 parents 35b6dd9 + 23e83a8 commit cdfc30a

File tree

2 files changed

+24
-2
lines changed

2 files changed

+24
-2
lines changed

Lib/asyncio/base_subprocess.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,9 @@ def _process_exited(self, returncode):
216216
self._proc.returncode = returncode
217217
self._call(self._protocol.process_exited)
218218
for p in self._pipes.values():
219-
p.pipe.close()
219+
if p is not None:
220+
p.pipe.close()
221+
220222
self._try_finish()
221223

222224
async def _wait(self):

Lib/test/test_tokenize.py

+21-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
44
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
55
open as tokenize_open, Untokenizer, generate_tokens,
6-
NEWLINE, _generate_tokens_from_c_tokenizer)
6+
NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT)
77
from io import BytesIO, StringIO
88
import unittest
99
from textwrap import dedent
@@ -2512,6 +2512,26 @@ def get_tokens(string):
25122512
self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
25132513
self.assertRaises(SyntaxError, get_tokens, "]")
25142514

2515+
def test_max_indent(self):
2516+
MAXINDENT = 100
2517+
2518+
def generate_source(indents):
2519+
source = ''.join((' ' * x) + 'if True:\n' for x in range(indents))
2520+
source += ' ' * indents + 'pass\n'
2521+
return source
2522+
2523+
valid = generate_source(MAXINDENT - 1)
2524+
tokens = list(_generate_tokens_from_c_tokenizer(valid))
2525+
self.assertEqual(tokens[-1].type, DEDENT)
2526+
compile(valid, "<string>", "exec")
2527+
2528+
invalid = generate_source(MAXINDENT)
2529+
tokens = list(_generate_tokens_from_c_tokenizer(invalid))
2530+
self.assertEqual(tokens[-1].type, NEWLINE)
2531+
self.assertRaises(
2532+
IndentationError, compile, invalid, "<string>", "exec"
2533+
)
2534+
25152535
def test_continuation_lines_indentation(self):
25162536
def get_tokens(string):
25172537
return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]

0 commit comments

Comments
 (0)