Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit f9d4d12

Browse files
committedApr 14, 2022
Auto merge of #95928 - nnethercote:rm-TokenTree-Clone, r=petrochenkov
Remove `<mbe::TokenTree as Clone>` `mbe::TokenTree` doesn't really need to implement `Clone`, and getting rid of that impl leads to some speed-ups. r? `@petrochenkov`
2 parents f387c93 + dd9028a commit f9d4d12

File tree

5 files changed

+164
-106
lines changed

5 files changed

+164
-106
lines changed
 

‎compiler/rustc_expand/src/mbe.rs

Lines changed: 5 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -13,32 +13,19 @@ crate mod transcribe;
1313
use metavar_expr::MetaVarExpr;
1414
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
1515
use rustc_ast::tokenstream::DelimSpan;
16-
use rustc_data_structures::sync::Lrc;
1716
use rustc_span::symbol::Ident;
1817
use rustc_span::Span;
1918

2019
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiters
2120
/// might be `NoDelim`, but they are not represented explicitly.
22-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
21+
#[derive(PartialEq, Encodable, Decodable, Debug)]
2322
struct Delimited {
2423
delim: token::DelimToken,
2524
/// FIXME: #67062 has details about why this is sub-optimal.
2625
tts: Vec<TokenTree>,
2726
}
2827

29-
impl Delimited {
30-
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
31-
fn open_tt(&self, span: DelimSpan) -> TokenTree {
32-
TokenTree::token(token::OpenDelim(self.delim), span.open)
33-
}
34-
35-
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
36-
fn close_tt(&self, span: DelimSpan) -> TokenTree {
37-
TokenTree::token(token::CloseDelim(self.delim), span.close)
38-
}
39-
}
40-
41-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
28+
#[derive(PartialEq, Encodable, Decodable, Debug)]
4229
struct SequenceRepetition {
4330
/// The sequence of token trees
4431
tts: Vec<TokenTree>,
@@ -76,13 +63,13 @@ enum KleeneOp {
7663

7764
/// Similar to `tokenstream::TokenTree`, except that `Sequence`, `MetaVar`, `MetaVarDecl`, and
7865
/// `MetaVarExpr` are "first-class" token trees. Useful for parsing macros.
79-
#[derive(Debug, Clone, PartialEq, Encodable, Decodable)]
66+
#[derive(Debug, PartialEq, Encodable, Decodable)]
8067
enum TokenTree {
8168
Token(Token),
8269
/// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
83-
Delimited(DelimSpan, Lrc<Delimited>),
70+
Delimited(DelimSpan, Delimited),
8471
/// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
85-
Sequence(DelimSpan, Lrc<SequenceRepetition>),
72+
Sequence(DelimSpan, SequenceRepetition),
8673
/// e.g., `$var`.
8774
MetaVar(Span, Ident),
8875
/// e.g., `$var:expr`. Only appears on the LHS.

‎compiler/rustc_expand/src/mbe/macro_parser.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,10 +142,13 @@ pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<Match
142142
locs.push(MatcherLoc::Token { token: token.clone() });
143143
}
144144
TokenTree::Delimited(span, delimited) => {
145+
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
146+
let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
147+
145148
locs.push(MatcherLoc::Delimited);
146-
inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth);
149+
locs.push(MatcherLoc::Token { token: open_token });
147150
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
148-
inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth);
151+
locs.push(MatcherLoc::Token { token: close_token });
149152
}
150153
TokenTree::Sequence(_, seq) => {
151154
// We can't determine `idx_first_after` and construct the final

‎compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 112 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,12 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
88
use crate::mbe::transcribe::transcribe;
99

1010
use rustc_ast as ast;
11-
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind::*};
11+
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*};
1212
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
1313
use rustc_ast::{NodeId, DUMMY_NODE_ID};
1414
use rustc_ast_pretty::pprust;
1515
use rustc_attr::{self as attr, TransparencyError};
1616
use rustc_data_structures::fx::FxHashMap;
17-
use rustc_data_structures::sync::Lrc;
1817
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder};
1918
use rustc_feature::Features;
2019
use rustc_lint_defs::builtin::{
@@ -263,14 +262,14 @@ fn generic_extension<'cx, 'tt>(
263262

264263
// Ignore the delimiters on the RHS.
265264
let rhs = match &rhses[i] {
266-
mbe::TokenTree::Delimited(_, delimited) => delimited.tts.to_vec(),
265+
mbe::TokenTree::Delimited(_, delimited) => &delimited.tts,
267266
_ => cx.span_bug(sp, "malformed macro rhs"),
268267
};
269268
let arm_span = rhses[i].span();
270269

271270
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
272271
// rhs has holes ( `$id` and `$(...)` that need filled)
273-
let mut tts = match transcribe(cx, &named_matches, rhs, transparency) {
272+
let mut tts = match transcribe(cx, &named_matches, &rhs, transparency) {
274273
Ok(tts) => tts,
275274
Err(mut err) => {
276275
err.emit();
@@ -407,7 +406,7 @@ pub fn compile_declarative_macro(
407406
let argument_gram = vec![
408407
mbe::TokenTree::Sequence(
409408
DelimSpan::dummy(),
410-
Lrc::new(mbe::SequenceRepetition {
409+
mbe::SequenceRepetition {
411410
tts: vec![
412411
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
413412
mbe::TokenTree::token(token::FatArrow, def.span),
@@ -419,20 +418,20 @@ pub fn compile_declarative_macro(
419418
)),
420419
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
421420
num_captures: 2,
422-
}),
421+
},
423422
),
424423
// to phase into semicolon-termination instead of semicolon-separation
425424
mbe::TokenTree::Sequence(
426425
DelimSpan::dummy(),
427-
Lrc::new(mbe::SequenceRepetition {
426+
mbe::SequenceRepetition {
428427
tts: vec![mbe::TokenTree::token(
429428
if macro_rules { token::Semi } else { token::Comma },
430429
def.span,
431430
)],
432431
separator: None,
433432
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
434433
num_captures: 0,
435-
}),
434+
},
436435
),
437436
];
438437
// Convert it into `MatcherLoc` form.
@@ -658,18 +657,18 @@ fn check_matcher(
658657
// that do not try to inject artificial span information. My plan is
659658
// to try to catch such cases ahead of time and not include them in
660659
// the precomputed mapping.)
661-
struct FirstSets {
660+
struct FirstSets<'tt> {
662661
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
663662
// span in the original matcher to the First set for the inner sequence `tt ...`.
664663
//
665664
// If two sequences have the same span in a matcher, then map that
666665
// span to None (invalidating the mapping here and forcing the code to
667666
// use a slow path).
668-
first: FxHashMap<Span, Option<TokenSet>>,
667+
first: FxHashMap<Span, Option<TokenSet<'tt>>>,
669668
}
670669

671-
impl FirstSets {
672-
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
670+
impl<'tt> FirstSets<'tt> {
671+
fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
673672
use mbe::TokenTree;
674673

675674
let mut sets = FirstSets { first: FxHashMap::default() };
@@ -679,19 +678,22 @@ impl FirstSets {
679678
// walks backward over `tts`, returning the FIRST for `tts`
680679
// and updating `sets` at the same time for all sequence
681680
// substructure we find within `tts`.
682-
fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
681+
fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
683682
let mut first = TokenSet::empty();
684683
for tt in tts.iter().rev() {
685684
match *tt {
686685
TokenTree::Token(..)
687686
| TokenTree::MetaVar(..)
688687
| TokenTree::MetaVarDecl(..)
689688
| TokenTree::MetaVarExpr(..) => {
690-
first.replace_with(tt.clone());
689+
first.replace_with(TtHandle::TtRef(tt));
691690
}
692691
TokenTree::Delimited(span, ref delimited) => {
693692
build_recur(sets, &delimited.tts);
694-
first.replace_with(delimited.open_tt(span));
693+
first.replace_with(TtHandle::from_token_kind(
694+
token::OpenDelim(delimited.delim),
695+
span.open,
696+
));
695697
}
696698
TokenTree::Sequence(sp, ref seq_rep) => {
697699
let subfirst = build_recur(sets, &seq_rep.tts);
@@ -715,7 +717,7 @@ impl FirstSets {
715717
// token could be the separator token itself.
716718

717719
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
718-
first.add_one_maybe(TokenTree::Token(sep.clone()));
720+
first.add_one_maybe(TtHandle::from_token(sep.clone()));
719721
}
720722

721723
// Reverse scan: Sequence comes before `first`.
@@ -741,7 +743,7 @@ impl FirstSets {
741743

742744
// walks forward over `tts` until all potential FIRST tokens are
743745
// identified.
744-
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
746+
fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
745747
use mbe::TokenTree;
746748

747749
let mut first = TokenSet::empty();
@@ -752,11 +754,14 @@ impl FirstSets {
752754
| TokenTree::MetaVar(..)
753755
| TokenTree::MetaVarDecl(..)
754756
| TokenTree::MetaVarExpr(..) => {
755-
first.add_one(tt.clone());
757+
first.add_one(TtHandle::TtRef(tt));
756758
return first;
757759
}
758760
TokenTree::Delimited(span, ref delimited) => {
759-
first.add_one(delimited.open_tt(span));
761+
first.add_one(TtHandle::from_token_kind(
762+
token::OpenDelim(delimited.delim),
763+
span.open,
764+
));
760765
return first;
761766
}
762767
TokenTree::Sequence(sp, ref seq_rep) => {
@@ -775,7 +780,7 @@ impl FirstSets {
775780
// If the sequence contents can be empty, then the first
776781
// token could be the separator token itself.
777782
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
778-
first.add_one_maybe(TokenTree::Token(sep.clone()));
783+
first.add_one_maybe(TtHandle::from_token(sep.clone()));
779784
}
780785

781786
assert!(first.maybe_empty);
@@ -803,6 +808,62 @@ impl FirstSets {
803808
}
804809
}
805810

811+
// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
812+
// implicitly, such as opening/closing delimiters and sequence repetition ops.
813+
// This type encapsulates both kinds. It implements `Clone` while avoiding the
814+
// need for `mbe::TokenTree` to implement `Clone`.
815+
#[derive(Debug)]
816+
enum TtHandle<'tt> {
817+
/// This is used in most cases.
818+
TtRef(&'tt mbe::TokenTree),
819+
820+
/// This is only used for implicit token trees. The `mbe::TokenTree` *must*
821+
/// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
822+
/// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
823+
/// `&mbe::TokenTree`.
824+
Token(mbe::TokenTree),
825+
}
826+
827+
impl<'tt> TtHandle<'tt> {
828+
fn from_token(tok: Token) -> Self {
829+
TtHandle::Token(mbe::TokenTree::Token(tok))
830+
}
831+
832+
fn from_token_kind(kind: TokenKind, span: Span) -> Self {
833+
TtHandle::from_token(Token::new(kind, span))
834+
}
835+
836+
// Get a reference to a token tree.
837+
fn get(&'tt self) -> &'tt mbe::TokenTree {
838+
match self {
839+
TtHandle::TtRef(tt) => tt,
840+
TtHandle::Token(token_tt) => &token_tt,
841+
}
842+
}
843+
}
844+
845+
impl<'tt> PartialEq for TtHandle<'tt> {
846+
fn eq(&self, other: &TtHandle<'tt>) -> bool {
847+
self.get() == other.get()
848+
}
849+
}
850+
851+
impl<'tt> Clone for TtHandle<'tt> {
852+
fn clone(&self) -> Self {
853+
match self {
854+
TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
855+
856+
// This variant *must* contain a `mbe::TokenTree::Token`, and not
857+
// any other variant of `mbe::TokenTree`.
858+
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
859+
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
860+
}
861+
862+
_ => unreachable!(),
863+
}
864+
}
865+
}
866+
806867
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
807868
// (for macro-by-example syntactic variables). It also carries the
808869
// `maybe_empty` flag; that is true if and only if the matcher can
@@ -814,28 +875,28 @@ impl FirstSets {
814875
//
815876
// (Notably, we must allow for *-op to occur zero times.)
816877
#[derive(Clone, Debug)]
817-
struct TokenSet {
818-
tokens: Vec<mbe::TokenTree>,
878+
struct TokenSet<'tt> {
879+
tokens: Vec<TtHandle<'tt>>,
819880
maybe_empty: bool,
820881
}
821882

822-
impl TokenSet {
883+
impl<'tt> TokenSet<'tt> {
823884
// Returns a set for the empty sequence.
824885
fn empty() -> Self {
825886
TokenSet { tokens: Vec::new(), maybe_empty: true }
826887
}
827888

828889
// Returns the set `{ tok }` for the single-token (and thus
829890
// non-empty) sequence [tok].
830-
fn singleton(tok: mbe::TokenTree) -> Self {
831-
TokenSet { tokens: vec![tok], maybe_empty: false }
891+
fn singleton(tt: TtHandle<'tt>) -> Self {
892+
TokenSet { tokens: vec![tt], maybe_empty: false }
832893
}
833894

834895
// Changes self to be the set `{ tok }`.
835896
// Since `tok` is always present, marks self as non-empty.
836-
fn replace_with(&mut self, tok: mbe::TokenTree) {
897+
fn replace_with(&mut self, tt: TtHandle<'tt>) {
837898
self.tokens.clear();
838-
self.tokens.push(tok);
899+
self.tokens.push(tt);
839900
self.maybe_empty = false;
840901
}
841902

@@ -848,17 +909,17 @@ impl TokenSet {
848909
}
849910

850911
// Adds `tok` to the set for `self`, marking sequence as non-empy.
851-
fn add_one(&mut self, tok: mbe::TokenTree) {
852-
if !self.tokens.contains(&tok) {
853-
self.tokens.push(tok);
912+
fn add_one(&mut self, tt: TtHandle<'tt>) {
913+
if !self.tokens.contains(&tt) {
914+
self.tokens.push(tt);
854915
}
855916
self.maybe_empty = false;
856917
}
857918

858919
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
859-
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
860-
if !self.tokens.contains(&tok) {
861-
self.tokens.push(tok);
920+
fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
921+
if !self.tokens.contains(&tt) {
922+
self.tokens.push(tt);
862923
}
863924
}
864925

@@ -870,9 +931,9 @@ impl TokenSet {
870931
// setting of the empty flag of `self`. If `other` is guaranteed
871932
// non-empty, then `self` is marked non-empty.
872933
fn add_all(&mut self, other: &Self) {
873-
for tok in &other.tokens {
874-
if !self.tokens.contains(tok) {
875-
self.tokens.push(tok.clone());
934+
for tt in &other.tokens {
935+
if !self.tokens.contains(tt) {
936+
self.tokens.push(tt.clone());
876937
}
877938
}
878939
if !other.maybe_empty {
@@ -892,14 +953,14 @@ impl TokenSet {
892953
//
893954
// Requires that `first_sets` is pre-computed for `matcher`;
894955
// see `FirstSets::new`.
895-
fn check_matcher_core(
956+
fn check_matcher_core<'tt>(
896957
sess: &ParseSess,
897958
features: &Features,
898959
def: &ast::Item,
899-
first_sets: &FirstSets,
900-
matcher: &[mbe::TokenTree],
901-
follow: &TokenSet,
902-
) -> TokenSet {
960+
first_sets: &FirstSets<'tt>,
961+
matcher: &'tt [mbe::TokenTree],
962+
follow: &TokenSet<'tt>,
963+
) -> TokenSet<'tt> {
903964
use mbe::TokenTree;
904965

905966
let mut last = TokenSet::empty();
@@ -938,12 +999,15 @@ fn check_matcher_core(
938999
// followed by anything against SUFFIX.
9391000
continue 'each_token;
9401001
} else {
941-
last.replace_with(token.clone());
1002+
last.replace_with(TtHandle::TtRef(token));
9421003
suffix_first = build_suffix_first();
9431004
}
9441005
}
9451006
TokenTree::Delimited(span, ref d) => {
946-
let my_suffix = TokenSet::singleton(d.close_tt(span));
1007+
let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1008+
token::CloseDelim(d.delim),
1009+
span.close,
1010+
));
9471011
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
9481012
// don't track non NT tokens
9491013
last.replace_with_irrelevant();
@@ -967,7 +1031,7 @@ fn check_matcher_core(
9671031
let mut new;
9681032
let my_suffix = if let Some(sep) = &seq_rep.separator {
9691033
new = suffix_first.clone();
970-
new.add_one_maybe(TokenTree::Token(sep.clone()));
1034+
new.add_one_maybe(TtHandle::from_token(sep.clone()));
9711035
&new
9721036
} else {
9731037
&suffix_first
@@ -994,9 +1058,11 @@ fn check_matcher_core(
9941058

9951059
// Now `last` holds the complete set of NT tokens that could
9961060
// end the sequence before SUFFIX. Check that every one works with `suffix`.
997-
for token in &last.tokens {
998-
if let TokenTree::MetaVarDecl(span, name, Some(kind)) = *token {
1061+
for tt in &last.tokens {
1062+
if let &TokenTree::MetaVarDecl(span, name, Some(kind)) = tt.get() {
9991063
for next_token in &suffix_first.tokens {
1064+
let next_token = next_token.get();
1065+
10001066
// Check if the old pat is used and the next token is `|`
10011067
// to warn about incompatibility with Rust 2021.
10021068
// We only emit this lint if we're parsing the original

‎compiler/rustc_expand/src/mbe/quoted.rs

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@ use rustc_span::symbol::{kw, sym, Ident};
1111
use rustc_span::edition::Edition;
1212
use rustc_span::{Span, SyntaxContext};
1313

14-
use rustc_data_structures::sync::Lrc;
15-
1614
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
1715
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
1816
`literal`, `path`, `meta`, `tt`, `item` and `vis`";
@@ -213,12 +211,7 @@ fn parse_tree(
213211
if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
214212
TokenTree::Sequence(
215213
delim_span,
216-
Lrc::new(SequenceRepetition {
217-
tts: sequence,
218-
separator,
219-
kleene,
220-
num_captures,
221-
}),
214+
SequenceRepetition { tts: sequence, separator, kleene, num_captures },
222215
)
223216
}
224217

@@ -269,10 +262,10 @@ fn parse_tree(
269262
// descend into the delimited set and further parse it.
270263
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
271264
span,
272-
Lrc::new(Delimited {
265+
Delimited {
273266
delim,
274267
tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
275-
}),
268+
},
276269
),
277270
}
278271
}

‎compiler/rustc_expand/src/mbe/transcribe.rs

Lines changed: 39 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ use rustc_ast::mut_visit::{self, MutVisitor};
55
use rustc_ast::token::{self, Token, TokenKind};
66
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
77
use rustc_data_structures::fx::FxHashMap;
8-
use rustc_data_structures::sync::Lrc;
98
use rustc_errors::{pluralize, PResult};
109
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
1110
use rustc_span::hygiene::{LocalExpnId, Transparency};
@@ -27,31 +26,35 @@ impl MutVisitor for Marker {
2726
}
2827

2928
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
30-
enum Frame {
31-
Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
32-
Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
29+
enum Frame<'a> {
30+
Delimited {
31+
tts: &'a [mbe::TokenTree],
32+
delim_token: token::DelimToken,
33+
idx: usize,
34+
span: DelimSpan,
35+
},
36+
Sequence {
37+
tts: &'a [mbe::TokenTree],
38+
idx: usize,
39+
sep: Option<Token>,
40+
},
3341
}
3442

35-
impl Frame {
43+
impl<'a> Frame<'a> {
3644
/// Construct a new frame around the delimited set of tokens.
37-
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
38-
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
39-
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
45+
fn new(tts: &'a [mbe::TokenTree]) -> Frame<'a> {
46+
Frame::Delimited { tts, delim_token: token::NoDelim, idx: 0, span: DelimSpan::dummy() }
4047
}
4148
}
4249

43-
impl Iterator for Frame {
44-
type Item = mbe::TokenTree;
50+
impl<'a> Iterator for Frame<'a> {
51+
type Item = &'a mbe::TokenTree;
4552

46-
fn next(&mut self) -> Option<mbe::TokenTree> {
47-
match *self {
48-
Frame::Delimited { ref forest, ref mut idx, .. } => {
49-
let res = forest.tts.get(*idx).cloned();
50-
*idx += 1;
51-
res
52-
}
53-
Frame::Sequence { ref forest, ref mut idx, .. } => {
54-
let res = forest.tts.get(*idx).cloned();
53+
fn next(&mut self) -> Option<&'a mbe::TokenTree> {
54+
match self {
55+
Frame::Delimited { tts, ref mut idx, .. }
56+
| Frame::Sequence { tts, ref mut idx, .. } => {
57+
let res = tts.get(*idx);
5558
*idx += 1;
5659
res
5760
}
@@ -82,7 +85,7 @@ impl Iterator for Frame {
8285
pub(super) fn transcribe<'a>(
8386
cx: &ExtCtxt<'a>,
8487
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
85-
src: Vec<mbe::TokenTree>,
88+
src: &[mbe::TokenTree],
8689
transparency: Transparency,
8790
) -> PResult<'a, TokenStream> {
8891
// Nothing for us to transcribe...
@@ -92,7 +95,7 @@ pub(super) fn transcribe<'a>(
9295

9396
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
9497
// we have yet to expand/are still expanding. We start the stack off with the whole RHS.
95-
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
98+
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src)];
9699

97100
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
98101
// `repeats` keeps track of where we are in matching at each level, with the last element being
@@ -146,14 +149,14 @@ pub(super) fn transcribe<'a>(
146149
// We are done processing a Delimited. If this is the top-level delimited, we are
147150
// done. Otherwise, we unwind the result_stack to append what we have produced to
148151
// any previous results.
149-
Frame::Delimited { forest, span, .. } => {
152+
Frame::Delimited { delim_token, span, .. } => {
150153
if result_stack.is_empty() {
151154
// No results left to compute! We are back at the top-level.
152155
return Ok(TokenStream::new(result));
153156
}
154157

155158
// Step back into the parent Delimited.
156-
let tree = TokenTree::Delimited(span, forest.delim, TokenStream::new(result));
159+
let tree = TokenTree::Delimited(span, delim_token, TokenStream::new(result));
157160
result = result_stack.pop().unwrap();
158161
result.push(tree.into());
159162
}
@@ -167,7 +170,7 @@ pub(super) fn transcribe<'a>(
167170
// We are descending into a sequence. We first make sure that the matchers in the RHS
168171
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
169172
// macro writer has made a mistake.
170-
seq @ mbe::TokenTree::Sequence(..) => {
173+
seq @ mbe::TokenTree::Sequence(_, delimited) => {
171174
match lockstep_iter_size(&seq, interp, &repeats) {
172175
LockstepIterSize::Unconstrained => {
173176
return Err(cx.struct_span_err(
@@ -214,7 +217,7 @@ pub(super) fn transcribe<'a>(
214217
stack.push(Frame::Sequence {
215218
idx: 0,
216219
sep: seq.separator.clone(),
217-
forest: seq,
220+
tts: &delimited.tts,
218221
});
219222
}
220223
}
@@ -272,15 +275,21 @@ pub(super) fn transcribe<'a>(
272275
// the previous results (from outside the Delimited).
273276
mbe::TokenTree::Delimited(mut span, delimited) => {
274277
mut_visit::visit_delim_span(&mut span, &mut marker);
275-
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
278+
stack.push(Frame::Delimited {
279+
tts: &delimited.tts,
280+
delim_token: delimited.delim,
281+
idx: 0,
282+
span,
283+
});
276284
result_stack.push(mem::take(&mut result));
277285
}
278286

279287
// Nothing much to do here. Just push the token to the result, being careful to
280288
// preserve syntax context.
281289
mbe::TokenTree::Token(token) => {
282-
let mut tt = TokenTree::Token(token);
283-
mut_visit::visit_tt(&mut tt, &mut marker);
290+
let mut token = token.clone();
291+
mut_visit::visit_token(&mut token, &mut marker);
292+
let tt = TokenTree::Token(token);
284293
result.push(tt.into());
285294
}
286295

@@ -516,7 +525,7 @@ fn out_of_bounds_err<'a>(
516525

517526
fn transcribe_metavar_expr<'a>(
518527
cx: &ExtCtxt<'a>,
519-
expr: MetaVarExpr,
528+
expr: &MetaVarExpr,
520529
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
521530
marker: &mut Marker,
522531
repeats: &[(usize, usize)],
@@ -528,7 +537,7 @@ fn transcribe_metavar_expr<'a>(
528537
marker.visit_span(&mut span);
529538
span
530539
};
531-
match expr {
540+
match *expr {
532541
MetaVarExpr::Count(original_ident, depth_opt) => {
533542
let matched = matched_from_ident(cx, original_ident, interp)?;
534543
let count = count_repetitions(cx, depth_opt, matched, &repeats, sp)?;

0 commit comments

Comments
 (0)
Please sign in to comment.