3
3
4
4
import argparse
5
5
import ast
6
- import contextlib
7
6
import logging
8
- import sys
9
7
import tokenize
10
8
from typing import Any
11
9
from typing import Generator
14
12
15
13
from flake8 import defaults
16
14
from flake8 import utils
15
+ from flake8 ._compat import FSTRING_END
16
+ from flake8 ._compat import FSTRING_MIDDLE
17
17
from flake8 .plugins .finder import LoadedPlugin
18
18
19
19
LOG = logging .getLogger (__name__ )
@@ -117,6 +117,7 @@ def __init__(
117
117
self ._file_tokens : list [tokenize .TokenInfo ] | None = None
118
118
# map from line number to the line we'll search for `noqa` in
119
119
self ._noqa_line_mapping : dict [int , str ] | None = None
120
+ self ._fstring_start = - 1
120
121
121
122
@property
122
123
def file_tokens (self ) -> list [tokenize .TokenInfo ]:
@@ -129,14 +130,26 @@ def file_tokens(self) -> list[tokenize.TokenInfo]:
129
130
130
131
return self ._file_tokens
131
132
132
- @contextlib .contextmanager
133
- def inside_multiline (
134
- self , line_number : int
135
- ) -> Generator [None , None , None ]:
136
- """Context-manager to toggle the multiline attribute."""
137
- self .line_number = line_number
133
+ def fstring_start (self , lineno : int ) -> None :
134
+ """Signal the beginning of an fstring."""
135
+ self ._fstring_start = lineno
136
+
137
+ def multiline_string (
138
+ self , token : tokenize .TokenInfo
139
+ ) -> Generator [str , None , None ]:
140
+ """Iterate through the lines of a multiline string."""
141
+ if token .type == FSTRING_END :
142
+ start = self ._fstring_start
143
+ else :
144
+ start = token .start [0 ]
145
+
138
146
self .multiline = True
139
- yield
147
+ self .line_number = start
148
+ # intentionally don't include the last line, that line will be
149
+ # terminated later by a future end-of-line
150
+ for _ in range (start , token .end [0 ]):
151
+ yield self .lines [self .line_number - 1 ]
152
+ self .line_number += 1
140
153
self .multiline = False
141
154
142
155
def reset_blank_before (self ) -> None :
@@ -196,10 +209,7 @@ def build_logical_line_tokens(self) -> _Logical: # noqa: C901
196
209
continue
197
210
if token_type == tokenize .STRING :
198
211
text = mutate_string (text )
199
- elif (
200
- sys .version_info >= (3 , 12 )
201
- and token_type == tokenize .FSTRING_MIDDLE
202
- ):
212
+ elif token_type == FSTRING_MIDDLE :
203
213
text = "x" * len (text )
204
214
if previous_row :
205
215
(start_row , start_column ) = start
@@ -231,19 +241,6 @@ def build_logical_line(self) -> tuple[str, str, _LogicalMapping]:
231
241
self .statistics ["logical lines" ] += 1
232
242
return joined_comments , self .logical_line , mapping_list
233
243
234
- def split_line (
235
- self , token : tokenize .TokenInfo
236
- ) -> Generator [str , None , None ]:
237
- """Split a physical line's line based on new-lines.
238
-
239
- This also auto-increments the line number for the caller.
240
- """
241
- # intentionally don't include the last line, that line will be
242
- # terminated later by a future end-of-line
243
- for line_no in range (token .start [0 ], token .end [0 ]):
244
- yield self .lines [line_no - 1 ]
245
- self .line_number += 1
246
-
247
244
def keyword_arguments_for (
248
245
self ,
249
246
parameters : dict [str , bool ],
@@ -398,7 +395,9 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
398
395
399
396
def is_multiline_string (token : tokenize .TokenInfo ) -> bool :
400
397
"""Check if this is a multiline string."""
401
- return token [0 ] == tokenize .STRING and "\n " in token [1 ]
398
+ return token .type == FSTRING_END or (
399
+ token .type == tokenize .STRING and "\n " in token .string
400
+ )
402
401
403
402
404
403
def token_is_newline (token : tokenize .TokenInfo ) -> bool :
0 commit comments