Skip to content

Commit e7a2c40

Browse files
authoredJun 20, 2025
Rollup merge of #142713 - tgross35:mbe-transcribe-refactor, r=petrochenkov
mbe: Refactor transcription Introduce `MacroTcbCtx` that holds everything relevant to transcription. This allows for the following changes: * Split `transcribe_sequence` and `transcribe_metavar` out of the heavily nested `transcribe` * Split `metavar_expr_concat` out of `transcribe_metavar_expr` This is a nonfunctional change.
2 parents 38600a6 + 67a9fb2 commit e7a2c40

File tree

1 file changed

+404
-363
lines changed

1 file changed

+404
-363
lines changed
 

‎compiler/rustc_expand/src/mbe/transcribe.rs

Lines changed: 404 additions & 363 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use rustc_data_structures::fx::FxHashMap;
99
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
1010
use rustc_parse::lexer::nfc_normalize;
1111
use rustc_parse::parser::ParseNtResult;
12-
use rustc_session::parse::{ParseSess, SymbolGallery};
12+
use rustc_session::parse::ParseSess;
1313
use rustc_span::hygiene::{LocalExpnId, Transparency};
1414
use rustc_span::{
1515
Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
@@ -25,20 +25,77 @@ use crate::mbe::macro_parser::NamedMatch::*;
2525
use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
2626
use crate::mbe::{self, KleeneOp, MetaVarExpr};
2727

28-
// A Marker adds the given mark to the syntax context.
29-
struct Marker(LocalExpnId, Transparency, FxHashMap<SyntaxContext, SyntaxContext>);
28+
/// Context needed to perform transcription of metavariable expressions.
29+
struct TranscrCtx<'psess, 'itp> {
30+
psess: &'psess ParseSess,
31+
32+
/// Map from metavars to matched tokens
33+
interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
34+
35+
/// Allow marking spans.
36+
marker: Marker,
37+
38+
/// The stack of things yet to be completely expanded.
39+
///
40+
/// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
41+
/// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
42+
/// choice of spacing values doesn't matter.
43+
stack: SmallVec<[Frame<'itp>; 1]>,
44+
45+
/// A stack of where we are in the repeat expansion.
46+
///
47+
/// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
48+
/// `repeats` keeps track of where we are in matching at each level, with the last element
49+
/// being the most deeply nested sequence. This is used as a stack.
50+
repeats: Vec<(usize, usize)>,
51+
52+
/// The resulting token stream from the `TokenTree` we just finished processing.
53+
///
54+
/// At the end, this will contain the full result of transcription, but at arbitrary points
55+
/// during `transcribe`, `result` will contain subsets of the final result.
56+
///
57+
/// Specifically, as we descend into each TokenTree, we will push the existing results onto the
58+
/// `result_stack` and clear `results`. We will then produce the results of transcribing the
59+
/// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
60+
/// `result_stack` and append `results` too it to produce the new `results` up to that point.
61+
///
62+
/// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
63+
/// again, and we are done transcribing.
64+
result: Vec<TokenTree>,
65+
66+
/// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a
67+
/// new entry.
68+
result_stack: Vec<Vec<TokenTree>>,
69+
}
70+
71+
impl<'psess> TranscrCtx<'psess, '_> {
72+
/// Span marked with the correct expansion and transparency.
73+
fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
74+
let mut span = dspan.entire();
75+
self.marker.mark_span(&mut span);
76+
span
77+
}
78+
}
79+
80+
/// A Marker adds the given mark to the syntax context.
81+
struct Marker {
82+
expand_id: LocalExpnId,
83+
transparency: Transparency,
84+
cache: FxHashMap<SyntaxContext, SyntaxContext>,
85+
}
3086

3187
impl Marker {
88+
/// Mark a span with the stored expansion ID and transparency.
3289
fn mark_span(&mut self, span: &mut Span) {
3390
// `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
3491
// by itself. All tokens in a macro body typically have the same syntactic context, unless
3592
// it's some advanced case with macro-generated macros. So if we cache the marked version
3693
// of that context once, we'll typically have a 100% cache hit rate after that.
37-
let Marker(expn_id, transparency, ref mut cache) = *self;
3894
*span = span.map_ctxt(|ctxt| {
39-
*cache
95+
*self
96+
.cache
4097
.entry(ctxt)
41-
.or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency))
98+
.or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
4299
});
43100
}
44101
}
@@ -116,52 +173,36 @@ pub(super) fn transcribe<'a>(
116173
return Ok(TokenStream::default());
117174
}
118175

119-
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
120-
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
121-
// choice of spacing values doesn't matter.
122-
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
123-
src,
124-
src_span,
125-
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
126-
)];
127-
128-
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
129-
// `repeats` keeps track of where we are in matching at each level, with the last element being
130-
// the most deeply nested sequence. This is used as a stack.
131-
let mut repeats: Vec<(usize, usize)> = Vec::new();
132-
133-
// `result` contains resulting token stream from the TokenTree we just finished processing. At
134-
// the end, this will contain the full result of transcription, but at arbitrary points during
135-
// `transcribe`, `result` will contain subsets of the final result.
136-
//
137-
// Specifically, as we descend into each TokenTree, we will push the existing results onto the
138-
// `result_stack` and clear `results`. We will then produce the results of transcribing the
139-
// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
140-
// `result_stack` and append `results` too it to produce the new `results` up to that point.
141-
//
142-
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
143-
// again, and we are done transcribing.
144-
let mut result: Vec<TokenTree> = Vec::new();
145-
let mut result_stack = Vec::new();
146-
let mut marker = Marker(expand_id, transparency, Default::default());
147-
148-
let dcx = psess.dcx();
176+
let mut tscx = TranscrCtx {
177+
psess,
178+
interp,
179+
marker: Marker { expand_id, transparency, cache: Default::default() },
180+
repeats: Vec::new(),
181+
stack: smallvec![Frame::new_delimited(
182+
src,
183+
src_span,
184+
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
185+
)],
186+
result: Vec::new(),
187+
result_stack: Vec::new(),
188+
};
189+
149190
loop {
150191
// Look at the last frame on the stack.
151192
// If it still has a TokenTree we have not looked at yet, use that tree.
152-
let Some(tree) = stack.last_mut().unwrap().next() else {
193+
let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
153194
// This else-case never produces a value for `tree` (it `continue`s or `return`s).
154195

155196
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
156197
// go back to the beginning of the sequence.
157-
let frame = stack.last_mut().unwrap();
198+
let frame = tscx.stack.last_mut().unwrap();
158199
if let FrameKind::Sequence { sep, .. } = &frame.kind {
159-
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
200+
let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
160201
*repeat_idx += 1;
161202
if repeat_idx < repeat_len {
162203
frame.idx = 0;
163204
if let Some(sep) = sep {
164-
result.push(TokenTree::Token(*sep, Spacing::Alone));
205+
tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
165206
}
166207
continue;
167208
}
@@ -170,10 +211,10 @@ pub(super) fn transcribe<'a>(
170211
// We are done with the top of the stack. Pop it. Depending on what it was, we do
171212
// different things. Note that the outermost item must be the delimited, wrapped RHS
172213
// that was passed in originally to `transcribe`.
173-
match stack.pop().unwrap().kind {
214+
match tscx.stack.pop().unwrap().kind {
174215
// Done with a sequence. Pop from repeats.
175216
FrameKind::Sequence { .. } => {
176-
repeats.pop();
217+
tscx.repeats.pop();
177218
}
178219

179220
// We are done processing a Delimited. If this is the top-level delimited, we are
@@ -185,15 +226,16 @@ pub(super) fn transcribe<'a>(
185226
if delim == Delimiter::Bracket {
186227
spacing.close = Spacing::Alone;
187228
}
188-
if result_stack.is_empty() {
229+
if tscx.result_stack.is_empty() {
189230
// No results left to compute! We are back at the top-level.
190-
return Ok(TokenStream::new(result));
231+
return Ok(TokenStream::new(tscx.result));
191232
}
192233

193234
// Step back into the parent Delimited.
194-
let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
195-
result = result_stack.pop().unwrap();
196-
result.push(tree);
235+
let tree =
236+
TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
237+
tscx.result = tscx.result_stack.pop().unwrap();
238+
tscx.result.push(tree);
197239
}
198240
}
199241
continue;
@@ -202,223 +244,19 @@ pub(super) fn transcribe<'a>(
202244
// At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
203245
// `tree` contains the next `TokenTree` to be processed.
204246
match tree {
205-
// We are descending into a sequence. We first make sure that the matchers in the RHS
206-
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
207-
// macro writer has made a mistake.
247+
// Replace the sequence with its expansion.
208248
seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
209-
match lockstep_iter_size(seq, interp, &repeats) {
210-
LockstepIterSize::Unconstrained => {
211-
return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
212-
}
213-
214-
LockstepIterSize::Contradiction(msg) => {
215-
// FIXME: this really ought to be caught at macro definition time... It
216-
// happens when two meta-variables are used in the same repetition in a
217-
// sequence, but they come from different sequence matchers and repeat
218-
// different amounts.
219-
return Err(
220-
dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })
221-
);
222-
}
223-
224-
LockstepIterSize::Constraint(len, _) => {
225-
// We do this to avoid an extra clone above. We know that this is a
226-
// sequence already.
227-
let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
228-
229-
// Is the repetition empty?
230-
if len == 0 {
231-
if seq.kleene.op == KleeneOp::OneOrMore {
232-
// FIXME: this really ought to be caught at macro definition
233-
// time... It happens when the Kleene operator in the matcher and
234-
// the body for the same meta-variable do not match.
235-
return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
236-
}
237-
} else {
238-
// 0 is the initial counter (we have done 0 repetitions so far). `len`
239-
// is the total number of repetitions we should generate.
240-
repeats.push((0, len));
241-
242-
// The first time we encounter the sequence we push it to the stack. It
243-
// then gets reused (see the beginning of the loop) until we are done
244-
// repeating.
245-
stack.push(Frame::new_sequence(
246-
seq_rep,
247-
seq.separator.clone(),
248-
seq.kleene.op,
249-
));
250-
}
251-
}
252-
}
249+
transcribe_sequence(&mut tscx, seq, seq_rep)?;
253250
}
254251

255252
// Replace the meta-var with the matched token tree from the invocation.
256-
&mbe::TokenTree::MetaVar(mut sp, mut original_ident) => {
257-
// Find the matched nonterminal from the macro invocation, and use it to replace
258-
// the meta-var.
259-
//
260-
// We use `Spacing::Alone` everywhere here, because that's the conservative choice
261-
// and spacing of declarative macros is tricky. E.g. in this macro:
262-
// ```
263-
// macro_rules! idents {
264-
// ($($a:ident,)*) => { stringify!($($a)*) }
265-
// }
266-
// ```
267-
// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
268-
// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
269-
// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
270-
// producing "xyz", which is bad because it effectively merges tokens.
271-
// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
272-
// some of the unnecessary whitespace.
273-
let ident = MacroRulesNormalizedIdent::new(original_ident);
274-
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
275-
// We wrap the tokens in invisible delimiters, unless they are already wrapped
276-
// in invisible delimiters with the same `MetaVarKind`. Because some proc
277-
// macros can't handle multiple layers of invisible delimiters of the same
278-
// `MetaVarKind`. This loses some span info, though it hopefully won't matter.
279-
let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
280-
if stream.len() == 1 {
281-
let tree = stream.iter().next().unwrap();
282-
if let TokenTree::Delimited(_, _, delim, inner) = tree
283-
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
284-
&& mv_kind == *mvk
285-
{
286-
stream = inner.clone();
287-
}
288-
}
289-
290-
// Emit as a token stream within `Delimiter::Invisible` to maintain
291-
// parsing priorities.
292-
marker.mark_span(&mut sp);
293-
with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
294-
// Both the open delim and close delim get the same span, which covers the
295-
// `$foo` in the decl macro RHS.
296-
TokenTree::Delimited(
297-
DelimSpan::from_single(sp),
298-
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
299-
Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
300-
stream,
301-
)
302-
};
303-
let tt = match cur_matched {
304-
MatchedSingle(ParseNtResult::Tt(tt)) => {
305-
// `tt`s are emitted into the output stream directly as "raw tokens",
306-
// without wrapping them into groups. Other variables are emitted into
307-
// the output stream as groups with `Delimiter::Invisible` to maintain
308-
// parsing priorities.
309-
maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
310-
}
311-
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
312-
marker.mark_span(&mut sp);
313-
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
314-
let kind = token::NtIdent(*ident, *is_raw);
315-
TokenTree::token_alone(kind, sp)
316-
}
317-
MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
318-
marker.mark_span(&mut sp);
319-
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
320-
let kind = token::NtLifetime(*ident, *is_raw);
321-
TokenTree::token_alone(kind, sp)
322-
}
323-
MatchedSingle(ParseNtResult::Item(item)) => {
324-
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
325-
}
326-
MatchedSingle(ParseNtResult::Block(block)) => mk_delimited(
327-
block.span,
328-
MetaVarKind::Block,
329-
TokenStream::from_ast(block),
330-
),
331-
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
332-
let stream = if let StmtKind::Empty = stmt.kind {
333-
// FIXME: Properly collect tokens for empty statements.
334-
TokenStream::token_alone(token::Semi, stmt.span)
335-
} else {
336-
TokenStream::from_ast(stmt)
337-
};
338-
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
339-
}
340-
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
341-
pat.span,
342-
MetaVarKind::Pat(*pat_kind),
343-
TokenStream::from_ast(pat),
344-
),
345-
MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
346-
let (can_begin_literal_maybe_minus, can_begin_string_literal) =
347-
match &expr.kind {
348-
ExprKind::Lit(_) => (true, true),
349-
ExprKind::Unary(UnOp::Neg, e)
350-
if matches!(&e.kind, ExprKind::Lit(_)) =>
351-
{
352-
(true, false)
353-
}
354-
_ => (false, false),
355-
};
356-
mk_delimited(
357-
expr.span,
358-
MetaVarKind::Expr {
359-
kind: *kind,
360-
can_begin_literal_maybe_minus,
361-
can_begin_string_literal,
362-
},
363-
TokenStream::from_ast(expr),
364-
)
365-
}
366-
MatchedSingle(ParseNtResult::Literal(lit)) => {
367-
mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
368-
}
369-
MatchedSingle(ParseNtResult::Ty(ty)) => {
370-
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
371-
mk_delimited(
372-
ty.span,
373-
MetaVarKind::Ty { is_path },
374-
TokenStream::from_ast(ty),
375-
)
376-
}
377-
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
378-
let has_meta_form = attr_item.meta_kind().is_some();
379-
mk_delimited(
380-
attr_item.span(),
381-
MetaVarKind::Meta { has_meta_form },
382-
TokenStream::from_ast(attr_item),
383-
)
384-
}
385-
MatchedSingle(ParseNtResult::Path(path)) => {
386-
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
387-
}
388-
MatchedSingle(ParseNtResult::Vis(vis)) => {
389-
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
390-
}
391-
MatchedSeq(..) => {
392-
// We were unable to descend far enough. This is an error.
393-
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
394-
}
395-
};
396-
result.push(tt)
397-
} else {
398-
// If we aren't able to match the meta-var, we push it back into the result but
399-
// with modified syntax context. (I believe this supports nested macros).
400-
marker.mark_span(&mut sp);
401-
marker.mark_span(&mut original_ident.span);
402-
result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
403-
result.push(TokenTree::Token(
404-
Token::from_ast_ident(original_ident),
405-
Spacing::Alone,
406-
));
407-
}
253+
&mbe::TokenTree::MetaVar(sp, original_ident) => {
254+
transcribe_metavar(&mut tscx, sp, original_ident)?;
408255
}
409256

410257
// Replace meta-variable expressions with the result of their expansion.
411-
mbe::TokenTree::MetaVarExpr(sp, expr) => {
412-
transcribe_metavar_expr(
413-
dcx,
414-
expr,
415-
interp,
416-
&mut marker,
417-
&repeats,
418-
&mut result,
419-
sp,
420-
&psess.symbol_gallery,
421-
)?;
258+
mbe::TokenTree::MetaVarExpr(dspan, expr) => {
259+
transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
422260
}
423261

424262
// If we are entering a new delimiter, we push its contents to the `stack` to be
@@ -427,21 +265,21 @@ pub(super) fn transcribe<'a>(
427265
// jump back out of the Delimited, pop the result_stack and add the new results back to
428266
// the previous results (from outside the Delimited).
429267
&mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
430-
marker.mark_span(&mut span.open);
431-
marker.mark_span(&mut span.close);
432-
stack.push(Frame::new_delimited(delimited, span, *spacing));
433-
result_stack.push(mem::take(&mut result));
268+
tscx.marker.mark_span(&mut span.open);
269+
tscx.marker.mark_span(&mut span.close);
270+
tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
271+
tscx.result_stack.push(mem::take(&mut tscx.result));
434272
}
435273

436274
// Nothing much to do here. Just push the token to the result, being careful to
437275
// preserve syntax context.
438276
&mbe::TokenTree::Token(mut token) => {
439-
marker.mark_span(&mut token.span);
277+
tscx.marker.mark_span(&mut token.span);
440278
if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
441-
marker.mark_span(&mut ident.span);
279+
tscx.marker.mark_span(&mut ident.span);
442280
}
443281
let tt = TokenTree::Token(token, Spacing::Alone);
444-
result.push(tt);
282+
tscx.result.push(tt);
445283
}
446284

447285
// There should be no meta-var declarations in the invocation of a macro.
@@ -450,6 +288,305 @@ pub(super) fn transcribe<'a>(
450288
}
451289
}
452290

291+
/// Turn `$(...)*` sequences into tokens.
292+
fn transcribe_sequence<'tx, 'itp>(
293+
tscx: &mut TranscrCtx<'tx, 'itp>,
294+
seq: &mbe::TokenTree,
295+
seq_rep: &'itp mbe::SequenceRepetition,
296+
) -> PResult<'tx, ()> {
297+
let dcx = tscx.psess.dcx();
298+
299+
// We are descending into a sequence. We first make sure that the matchers in the RHS
300+
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
301+
// macro writer has made a mistake.
302+
match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
303+
LockstepIterSize::Unconstrained => {
304+
return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
305+
}
306+
307+
LockstepIterSize::Contradiction(msg) => {
308+
// FIXME: this really ought to be caught at macro definition time... It
309+
// happens when two meta-variables are used in the same repetition in a
310+
// sequence, but they come from different sequence matchers and repeat
311+
// different amounts.
312+
return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
313+
}
314+
315+
LockstepIterSize::Constraint(len, _) => {
316+
// We do this to avoid an extra clone above. We know that this is a
317+
// sequence already.
318+
let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
319+
320+
// Is the repetition empty?
321+
if len == 0 {
322+
if seq.kleene.op == KleeneOp::OneOrMore {
323+
// FIXME: this really ought to be caught at macro definition
324+
// time... It happens when the Kleene operator in the matcher and
325+
// the body for the same meta-variable do not match.
326+
return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
327+
}
328+
} else {
329+
// 0 is the initial counter (we have done 0 repetitions so far). `len`
330+
// is the total number of repetitions we should generate.
331+
tscx.repeats.push((0, len));
332+
333+
// The first time we encounter the sequence we push it to the stack. It
334+
// then gets reused (see the beginning of the loop) until we are done
335+
// repeating.
336+
tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
337+
}
338+
}
339+
}
340+
341+
Ok(())
342+
}
343+
344+
/// Find the matched nonterminal from the macro invocation, and use it to replace
345+
/// the meta-var.
346+
///
347+
/// We use `Spacing::Alone` everywhere here, because that's the conservative choice
348+
/// and spacing of declarative macros is tricky. E.g. in this macro:
349+
/// ```
350+
/// macro_rules! idents {
351+
/// ($($a:ident,)*) => { stringify!($($a)*) }
352+
/// }
353+
/// ```
354+
/// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
355+
/// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
356+
/// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
357+
/// producing "xyz", which is bad because it effectively merges tokens.
358+
/// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
359+
/// some of the unnecessary whitespace.
360+
fn transcribe_metavar<'tx>(
361+
tscx: &mut TranscrCtx<'tx, '_>,
362+
mut sp: Span,
363+
mut original_ident: Ident,
364+
) -> PResult<'tx, ()> {
365+
let dcx = tscx.psess.dcx();
366+
367+
let ident = MacroRulesNormalizedIdent::new(original_ident);
368+
let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
369+
// If we aren't able to match the meta-var, we push it back into the result but
370+
// with modified syntax context. (I believe this supports nested macros).
371+
tscx.marker.mark_span(&mut sp);
372+
tscx.marker.mark_span(&mut original_ident.span);
373+
tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
374+
tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
375+
return Ok(());
376+
};
377+
378+
// We wrap the tokens in invisible delimiters, unless they are already wrapped
379+
// in invisible delimiters with the same `MetaVarKind`. Because some proc
380+
// macros can't handle multiple layers of invisible delimiters of the same
381+
// `MetaVarKind`. This loses some span info, though it hopefully won't matter.
382+
let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
383+
if stream.len() == 1 {
384+
let tree = stream.iter().next().unwrap();
385+
if let TokenTree::Delimited(_, _, delim, inner) = tree
386+
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
387+
&& mv_kind == *mvk
388+
{
389+
stream = inner.clone();
390+
}
391+
}
392+
393+
// Emit as a token stream within `Delimiter::Invisible` to maintain
394+
// parsing priorities.
395+
tscx.marker.mark_span(&mut sp);
396+
with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
397+
// Both the open delim and close delim get the same span, which covers the
398+
// `$foo` in the decl macro RHS.
399+
TokenTree::Delimited(
400+
DelimSpan::from_single(sp),
401+
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
402+
Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
403+
stream,
404+
)
405+
};
406+
407+
let tt = match cur_matched {
408+
MatchedSingle(ParseNtResult::Tt(tt)) => {
409+
// `tt`s are emitted into the output stream directly as "raw tokens",
410+
// without wrapping them into groups. Other variables are emitted into
411+
// the output stream as groups with `Delimiter::Invisible` to maintain
412+
// parsing priorities.
413+
maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
414+
}
415+
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
416+
tscx.marker.mark_span(&mut sp);
417+
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
418+
let kind = token::NtIdent(*ident, *is_raw);
419+
TokenTree::token_alone(kind, sp)
420+
}
421+
MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
422+
tscx.marker.mark_span(&mut sp);
423+
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
424+
let kind = token::NtLifetime(*ident, *is_raw);
425+
TokenTree::token_alone(kind, sp)
426+
}
427+
MatchedSingle(ParseNtResult::Item(item)) => {
428+
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
429+
}
430+
MatchedSingle(ParseNtResult::Block(block)) => {
431+
mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
432+
}
433+
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
434+
let stream = if let StmtKind::Empty = stmt.kind {
435+
// FIXME: Properly collect tokens for empty statements.
436+
TokenStream::token_alone(token::Semi, stmt.span)
437+
} else {
438+
TokenStream::from_ast(stmt)
439+
};
440+
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
441+
}
442+
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
443+
mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
444+
}
445+
MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
446+
let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
447+
ExprKind::Lit(_) => (true, true),
448+
ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
449+
(true, false)
450+
}
451+
_ => (false, false),
452+
};
453+
mk_delimited(
454+
expr.span,
455+
MetaVarKind::Expr {
456+
kind: *kind,
457+
can_begin_literal_maybe_minus,
458+
can_begin_string_literal,
459+
},
460+
TokenStream::from_ast(expr),
461+
)
462+
}
463+
MatchedSingle(ParseNtResult::Literal(lit)) => {
464+
mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
465+
}
466+
MatchedSingle(ParseNtResult::Ty(ty)) => {
467+
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
468+
mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
469+
}
470+
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
471+
let has_meta_form = attr_item.meta_kind().is_some();
472+
mk_delimited(
473+
attr_item.span(),
474+
MetaVarKind::Meta { has_meta_form },
475+
TokenStream::from_ast(attr_item),
476+
)
477+
}
478+
MatchedSingle(ParseNtResult::Path(path)) => {
479+
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
480+
}
481+
MatchedSingle(ParseNtResult::Vis(vis)) => {
482+
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
483+
}
484+
MatchedSeq(..) => {
485+
// We were unable to descend far enough. This is an error.
486+
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
487+
}
488+
};
489+
490+
tscx.result.push(tt);
491+
Ok(())
492+
}
493+
494+
/// Turn `${expr(...)}` metavariable expressionss into tokens.
495+
fn transcribe_metavar_expr<'tx>(
496+
tscx: &mut TranscrCtx<'tx, '_>,
497+
dspan: DelimSpan,
498+
expr: &MetaVarExpr,
499+
) -> PResult<'tx, ()> {
500+
let dcx = tscx.psess.dcx();
501+
let tt = match *expr {
502+
MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
503+
MetaVarExpr::Count(original_ident, depth) => {
504+
let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
505+
let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
506+
TokenTree::token_alone(
507+
TokenKind::lit(token::Integer, sym::integer(count), None),
508+
tscx.visited_dspan(dspan),
509+
)
510+
}
511+
MetaVarExpr::Ignore(original_ident) => {
512+
// Used to ensure that `original_ident` is present in the LHS
513+
let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
514+
return Ok(());
515+
}
516+
MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
517+
Some((index, _)) => TokenTree::token_alone(
518+
TokenKind::lit(token::Integer, sym::integer(*index), None),
519+
tscx.visited_dspan(dspan),
520+
),
521+
None => {
522+
return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
523+
}
524+
},
525+
MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
526+
Some((_, length)) => TokenTree::token_alone(
527+
TokenKind::lit(token::Integer, sym::integer(*length), None),
528+
tscx.visited_dspan(dspan),
529+
),
530+
None => {
531+
return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
532+
}
533+
},
534+
};
535+
tscx.result.push(tt);
536+
Ok(())
537+
}
538+
539+
/// Handle the `${concat(...)}` metavariable expression.
540+
fn metavar_expr_concat<'tx>(
541+
tscx: &mut TranscrCtx<'tx, '_>,
542+
dspan: DelimSpan,
543+
elements: &[MetaVarExprConcatElem],
544+
) -> PResult<'tx, TokenTree> {
545+
let dcx = tscx.psess.dcx();
546+
let mut concatenated = String::new();
547+
for element in elements.into_iter() {
548+
let symbol = match element {
549+
MetaVarExprConcatElem::Ident(elem) => elem.name,
550+
MetaVarExprConcatElem::Literal(elem) => *elem,
551+
MetaVarExprConcatElem::Var(ident) => {
552+
match matched_from_ident(dcx, *ident, tscx.interp)? {
553+
NamedMatch::MatchedSeq(named_matches) => {
554+
let Some((curr_idx, _)) = tscx.repeats.last() else {
555+
return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
556+
};
557+
match &named_matches[*curr_idx] {
558+
// FIXME(c410-f3r) Nested repetitions are unimplemented
559+
MatchedSeq(_) => unimplemented!(),
560+
MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
561+
}
562+
}
563+
NamedMatch::MatchedSingle(pnr) => {
564+
extract_symbol_from_pnr(dcx, pnr, ident.span)?
565+
}
566+
}
567+
}
568+
};
569+
concatenated.push_str(symbol.as_str());
570+
}
571+
let symbol = nfc_normalize(&concatenated);
572+
let concatenated_span = tscx.visited_dspan(dspan);
573+
if !rustc_lexer::is_ident(symbol.as_str()) {
574+
return Err(dcx.struct_span_err(
575+
concatenated_span,
576+
"`${concat(..)}` is not generating a valid identifier",
577+
));
578+
}
579+
tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
580+
581+
// The current implementation marks the span as coming from the macro regardless of
582+
// contexts of the concatenated identifiers but this behavior may change in the
583+
// future.
584+
Ok(TokenTree::Token(
585+
Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
586+
Spacing::Alone,
587+
))
588+
}
589+
453590
/// Store the metavariable span for this original span into a side table.
454591
/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
455592
/// An optimal encoding for inlined spans will need to be selected to minimize regressions.
@@ -671,13 +808,13 @@ fn lockstep_iter_size(
671808
/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
672809
/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
673810
/// declared inside a single repetition and the index `1` implies two nested repetitions.
674-
fn count_repetitions<'a>(
675-
dcx: DiagCtxtHandle<'a>,
811+
fn count_repetitions<'dx>(
812+
dcx: DiagCtxtHandle<'dx>,
676813
depth_user: usize,
677814
mut matched: &NamedMatch,
678815
repeats: &[(usize, usize)],
679816
sp: &DelimSpan,
680-
) -> PResult<'a, usize> {
817+
) -> PResult<'dx, usize> {
681818
// Recursively count the number of matches in `matched` at given depth
682819
// (or at the top-level of `matched` if no depth is given).
683820
fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
@@ -762,102 +899,6 @@ fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &s
762899
dcx.struct_span_err(span, msg)
763900
}
764901

765-
fn transcribe_metavar_expr<'a>(
766-
dcx: DiagCtxtHandle<'a>,
767-
expr: &MetaVarExpr,
768-
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
769-
marker: &mut Marker,
770-
repeats: &[(usize, usize)],
771-
result: &mut Vec<TokenTree>,
772-
sp: &DelimSpan,
773-
symbol_gallery: &SymbolGallery,
774-
) -> PResult<'a, ()> {
775-
let mut visited_span = || {
776-
let mut span = sp.entire();
777-
marker.mark_span(&mut span);
778-
span
779-
};
780-
match *expr {
781-
MetaVarExpr::Concat(ref elements) => {
782-
let mut concatenated = String::new();
783-
for element in elements.into_iter() {
784-
let symbol = match element {
785-
MetaVarExprConcatElem::Ident(elem) => elem.name,
786-
MetaVarExprConcatElem::Literal(elem) => *elem,
787-
MetaVarExprConcatElem::Var(ident) => {
788-
match matched_from_ident(dcx, *ident, interp)? {
789-
NamedMatch::MatchedSeq(named_matches) => {
790-
let Some((curr_idx, _)) = repeats.last() else {
791-
return Err(dcx.struct_span_err(sp.entire(), "invalid syntax"));
792-
};
793-
match &named_matches[*curr_idx] {
794-
// FIXME(c410-f3r) Nested repetitions are unimplemented
795-
MatchedSeq(_) => unimplemented!(),
796-
MatchedSingle(pnr) => {
797-
extract_symbol_from_pnr(dcx, pnr, ident.span)?
798-
}
799-
}
800-
}
801-
NamedMatch::MatchedSingle(pnr) => {
802-
extract_symbol_from_pnr(dcx, pnr, ident.span)?
803-
}
804-
}
805-
}
806-
};
807-
concatenated.push_str(symbol.as_str());
808-
}
809-
let symbol = nfc_normalize(&concatenated);
810-
let concatenated_span = visited_span();
811-
if !rustc_lexer::is_ident(symbol.as_str()) {
812-
return Err(dcx.struct_span_err(
813-
concatenated_span,
814-
"`${concat(..)}` is not generating a valid identifier",
815-
));
816-
}
817-
symbol_gallery.insert(symbol, concatenated_span);
818-
// The current implementation marks the span as coming from the macro regardless of
819-
// contexts of the concatenated identifiers but this behavior may change in the
820-
// future.
821-
result.push(TokenTree::Token(
822-
Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
823-
Spacing::Alone,
824-
));
825-
}
826-
MetaVarExpr::Count(original_ident, depth) => {
827-
let matched = matched_from_ident(dcx, original_ident, interp)?;
828-
let count = count_repetitions(dcx, depth, matched, repeats, sp)?;
829-
let tt = TokenTree::token_alone(
830-
TokenKind::lit(token::Integer, sym::integer(count), None),
831-
visited_span(),
832-
);
833-
result.push(tt);
834-
}
835-
MetaVarExpr::Ignore(original_ident) => {
836-
// Used to ensure that `original_ident` is present in the LHS
837-
let _ = matched_from_ident(dcx, original_ident, interp)?;
838-
}
839-
MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
840-
Some((index, _)) => {
841-
result.push(TokenTree::token_alone(
842-
TokenKind::lit(token::Integer, sym::integer(*index), None),
843-
visited_span(),
844-
));
845-
}
846-
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")),
847-
},
848-
MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) {
849-
Some((_, length)) => {
850-
result.push(TokenTree::token_alone(
851-
TokenKind::lit(token::Integer, sym::integer(*length), None),
852-
visited_span(),
853-
));
854-
}
855-
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")),
856-
},
857-
}
858-
Ok(())
859-
}
860-
861902
/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
862903
fn extract_symbol_from_pnr<'a>(
863904
dcx: DiagCtxtHandle<'a>,

0 commit comments

Comments
 (0)
Please sign in to comment.