@@ -8,13 +8,12 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
8
8
use crate :: mbe:: transcribe:: transcribe;
9
9
10
10
use rustc_ast as ast;
11
- use rustc_ast:: token:: { self , NonterminalKind , Token , TokenKind :: * } ;
11
+ use rustc_ast:: token:: { self , NonterminalKind , Token , TokenKind , TokenKind :: * } ;
12
12
use rustc_ast:: tokenstream:: { DelimSpan , TokenStream } ;
13
13
use rustc_ast:: { NodeId , DUMMY_NODE_ID } ;
14
14
use rustc_ast_pretty:: pprust;
15
15
use rustc_attr:: { self as attr, TransparencyError } ;
16
16
use rustc_data_structures:: fx:: FxHashMap ;
17
- use rustc_data_structures:: sync:: Lrc ;
18
17
use rustc_errors:: { Applicability , Diagnostic , DiagnosticBuilder } ;
19
18
use rustc_feature:: Features ;
20
19
use rustc_lint_defs:: builtin:: {
@@ -263,14 +262,14 @@ fn generic_extension<'cx, 'tt>(
263
262
264
263
// Ignore the delimiters on the RHS.
265
264
let rhs = match & rhses[ i] {
266
- mbe:: TokenTree :: Delimited ( _, delimited) => delimited. tts . to_vec ( ) ,
265
+ mbe:: TokenTree :: Delimited ( _, delimited) => & delimited. tts ,
267
266
_ => cx. span_bug ( sp, "malformed macro rhs" ) ,
268
267
} ;
269
268
let arm_span = rhses[ i] . span ( ) ;
270
269
271
270
let rhs_spans = rhs. iter ( ) . map ( |t| t. span ( ) ) . collect :: < Vec < _ > > ( ) ;
272
271
// rhs has holes ( `$id` and `$(...)` that need filled)
273
- let mut tts = match transcribe ( cx, & named_matches, rhs, transparency) {
272
+ let mut tts = match transcribe ( cx, & named_matches, & rhs, transparency) {
274
273
Ok ( tts) => tts,
275
274
Err ( mut err) => {
276
275
err. emit ( ) ;
@@ -407,7 +406,7 @@ pub fn compile_declarative_macro(
407
406
let argument_gram = vec ! [
408
407
mbe:: TokenTree :: Sequence (
409
408
DelimSpan :: dummy( ) ,
410
- Lrc :: new ( mbe:: SequenceRepetition {
409
+ mbe:: SequenceRepetition {
411
410
tts: vec![
412
411
mbe:: TokenTree :: MetaVarDecl ( def. span, lhs_nm, tt_spec) ,
413
412
mbe:: TokenTree :: token( token:: FatArrow , def. span) ,
@@ -419,20 +418,20 @@ pub fn compile_declarative_macro(
419
418
) ) ,
420
419
kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: OneOrMore , def. span) ,
421
420
num_captures: 2 ,
422
- } ) ,
421
+ } ,
423
422
) ,
424
423
// to phase into semicolon-termination instead of semicolon-separation
425
424
mbe:: TokenTree :: Sequence (
426
425
DelimSpan :: dummy( ) ,
427
- Lrc :: new ( mbe:: SequenceRepetition {
426
+ mbe:: SequenceRepetition {
428
427
tts: vec![ mbe:: TokenTree :: token(
429
428
if macro_rules { token:: Semi } else { token:: Comma } ,
430
429
def. span,
431
430
) ] ,
432
431
separator: None ,
433
432
kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: ZeroOrMore , def. span) ,
434
433
num_captures: 0 ,
435
- } ) ,
434
+ } ,
436
435
) ,
437
436
] ;
438
437
// Convert it into `MatcherLoc` form.
@@ -658,18 +657,18 @@ fn check_matcher(
658
657
// that do not try to inject artificial span information. My plan is
659
658
// to try to catch such cases ahead of time and not include them in
660
659
// the precomputed mapping.)
661
- struct FirstSets {
660
+ struct FirstSets < ' tt > {
662
661
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
663
662
// span in the original matcher to the First set for the inner sequence `tt ...`.
664
663
//
665
664
// If two sequences have the same span in a matcher, then map that
666
665
// span to None (invalidating the mapping here and forcing the code to
667
666
// use a slow path).
668
- first : FxHashMap < Span , Option < TokenSet > > ,
667
+ first : FxHashMap < Span , Option < TokenSet < ' tt > > > ,
669
668
}
670
669
671
- impl FirstSets {
672
- fn new ( tts : & [ mbe:: TokenTree ] ) -> FirstSets {
670
+ impl < ' tt > FirstSets < ' tt > {
671
+ fn new ( tts : & ' tt [ mbe:: TokenTree ] ) -> FirstSets < ' tt > {
673
672
use mbe:: TokenTree ;
674
673
675
674
let mut sets = FirstSets { first : FxHashMap :: default ( ) } ;
@@ -679,19 +678,22 @@ impl FirstSets {
679
678
// walks backward over `tts`, returning the FIRST for `tts`
680
679
// and updating `sets` at the same time for all sequence
681
680
// substructure we find within `tts`.
682
- fn build_recur ( sets : & mut FirstSets , tts : & [ TokenTree ] ) -> TokenSet {
681
+ fn build_recur < ' tt > ( sets : & mut FirstSets < ' tt > , tts : & ' tt [ TokenTree ] ) -> TokenSet < ' tt > {
683
682
let mut first = TokenSet :: empty ( ) ;
684
683
for tt in tts. iter ( ) . rev ( ) {
685
684
match * tt {
686
685
TokenTree :: Token ( ..)
687
686
| TokenTree :: MetaVar ( ..)
688
687
| TokenTree :: MetaVarDecl ( ..)
689
688
| TokenTree :: MetaVarExpr ( ..) => {
690
- first. replace_with ( tt . clone ( ) ) ;
689
+ first. replace_with ( TtHandle :: TtRef ( tt ) ) ;
691
690
}
692
691
TokenTree :: Delimited ( span, ref delimited) => {
693
692
build_recur ( sets, & delimited. tts ) ;
694
- first. replace_with ( delimited. open_tt ( span) ) ;
693
+ first. replace_with ( TtHandle :: from_token_kind (
694
+ token:: OpenDelim ( delimited. delim ) ,
695
+ span. open ,
696
+ ) ) ;
695
697
}
696
698
TokenTree :: Sequence ( sp, ref seq_rep) => {
697
699
let subfirst = build_recur ( sets, & seq_rep. tts ) ;
@@ -715,7 +717,7 @@ impl FirstSets {
715
717
// token could be the separator token itself.
716
718
717
719
if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
718
- first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
720
+ first. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
719
721
}
720
722
721
723
// Reverse scan: Sequence comes before `first`.
@@ -741,7 +743,7 @@ impl FirstSets {
741
743
742
744
// walks forward over `tts` until all potential FIRST tokens are
743
745
// identified.
744
- fn first ( & self , tts : & [ mbe:: TokenTree ] ) -> TokenSet {
746
+ fn first ( & self , tts : & ' tt [ mbe:: TokenTree ] ) -> TokenSet < ' tt > {
745
747
use mbe:: TokenTree ;
746
748
747
749
let mut first = TokenSet :: empty ( ) ;
@@ -752,11 +754,14 @@ impl FirstSets {
752
754
| TokenTree :: MetaVar ( ..)
753
755
| TokenTree :: MetaVarDecl ( ..)
754
756
| TokenTree :: MetaVarExpr ( ..) => {
755
- first. add_one ( tt . clone ( ) ) ;
757
+ first. add_one ( TtHandle :: TtRef ( tt ) ) ;
756
758
return first;
757
759
}
758
760
TokenTree :: Delimited ( span, ref delimited) => {
759
- first. add_one ( delimited. open_tt ( span) ) ;
761
+ first. add_one ( TtHandle :: from_token_kind (
762
+ token:: OpenDelim ( delimited. delim ) ,
763
+ span. open ,
764
+ ) ) ;
760
765
return first;
761
766
}
762
767
TokenTree :: Sequence ( sp, ref seq_rep) => {
@@ -775,7 +780,7 @@ impl FirstSets {
775
780
// If the sequence contents can be empty, then the first
776
781
// token could be the separator token itself.
777
782
if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
778
- first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
783
+ first. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
779
784
}
780
785
781
786
assert ! ( first. maybe_empty) ;
@@ -803,6 +808,62 @@ impl FirstSets {
803
808
}
804
809
}
805
810
811
+ // Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
812
+ // implicitly, such as opening/closing delimiters and sequence repetition ops.
813
+ // This type encapsulates both kinds. It implements `Clone` while avoiding the
814
+ // need for `mbe::TokenTree` to implement `Clone`.
815
+ #[ derive( Debug ) ]
816
+ enum TtHandle < ' tt > {
817
+ /// This is used in most cases.
818
+ TtRef ( & ' tt mbe:: TokenTree ) ,
819
+
820
+ /// This is only used for implicit token trees. The `mbe::TokenTree` *must*
821
+ /// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
822
+ /// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
823
+ /// `&mbe::TokenTree`.
824
+ Token ( mbe:: TokenTree ) ,
825
+ }
826
+
827
+ impl < ' tt > TtHandle < ' tt > {
828
+ fn from_token ( tok : Token ) -> Self {
829
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok) )
830
+ }
831
+
832
+ fn from_token_kind ( kind : TokenKind , span : Span ) -> Self {
833
+ TtHandle :: from_token ( Token :: new ( kind, span) )
834
+ }
835
+
836
+ // Get a reference to a token tree.
837
+ fn get ( & ' tt self ) -> & ' tt mbe:: TokenTree {
838
+ match self {
839
+ TtHandle :: TtRef ( tt) => tt,
840
+ TtHandle :: Token ( token_tt) => & token_tt,
841
+ }
842
+ }
843
+ }
844
+
845
+ impl < ' tt > PartialEq for TtHandle < ' tt > {
846
+ fn eq ( & self , other : & TtHandle < ' tt > ) -> bool {
847
+ self . get ( ) == other. get ( )
848
+ }
849
+ }
850
+
851
+ impl < ' tt > Clone for TtHandle < ' tt > {
852
+ fn clone ( & self ) -> Self {
853
+ match self {
854
+ TtHandle :: TtRef ( tt) => TtHandle :: TtRef ( tt) ,
855
+
856
+ // This variant *must* contain a `mbe::TokenTree::Token`, and not
857
+ // any other variant of `mbe::TokenTree`.
858
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok) ) => {
859
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok. clone ( ) ) )
860
+ }
861
+
862
+ _ => unreachable ! ( ) ,
863
+ }
864
+ }
865
+ }
866
+
806
867
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
807
868
// (for macro-by-example syntactic variables). It also carries the
808
869
// `maybe_empty` flag; that is true if and only if the matcher can
@@ -814,28 +875,28 @@ impl FirstSets {
814
875
//
815
876
// (Notably, we must allow for *-op to occur zero times.)
816
877
#[ derive( Clone , Debug ) ]
817
- struct TokenSet {
818
- tokens : Vec < mbe :: TokenTree > ,
878
+ struct TokenSet < ' tt > {
879
+ tokens : Vec < TtHandle < ' tt > > ,
819
880
maybe_empty : bool ,
820
881
}
821
882
822
- impl TokenSet {
883
+ impl < ' tt > TokenSet < ' tt > {
823
884
// Returns a set for the empty sequence.
824
885
fn empty ( ) -> Self {
825
886
TokenSet { tokens : Vec :: new ( ) , maybe_empty : true }
826
887
}
827
888
828
889
// Returns the set `{ tok }` for the single-token (and thus
829
890
// non-empty) sequence [tok].
830
- fn singleton ( tok : mbe :: TokenTree ) -> Self {
831
- TokenSet { tokens : vec ! [ tok ] , maybe_empty : false }
891
+ fn singleton ( tt : TtHandle < ' tt > ) -> Self {
892
+ TokenSet { tokens : vec ! [ tt ] , maybe_empty : false }
832
893
}
833
894
834
895
// Changes self to be the set `{ tok }`.
835
896
// Since `tok` is always present, marks self as non-empty.
836
- fn replace_with ( & mut self , tok : mbe :: TokenTree ) {
897
+ fn replace_with ( & mut self , tt : TtHandle < ' tt > ) {
837
898
self . tokens . clear ( ) ;
838
- self . tokens . push ( tok ) ;
899
+ self . tokens . push ( tt ) ;
839
900
self . maybe_empty = false ;
840
901
}
841
902
@@ -848,17 +909,17 @@ impl TokenSet {
848
909
}
849
910
850
911
// Adds `tok` to the set for `self`, marking sequence as non-empy.
851
- fn add_one ( & mut self , tok : mbe :: TokenTree ) {
852
- if !self . tokens . contains ( & tok ) {
853
- self . tokens . push ( tok ) ;
912
+ fn add_one ( & mut self , tt : TtHandle < ' tt > ) {
913
+ if !self . tokens . contains ( & tt ) {
914
+ self . tokens . push ( tt ) ;
854
915
}
855
916
self . maybe_empty = false ;
856
917
}
857
918
858
919
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
859
- fn add_one_maybe ( & mut self , tok : mbe :: TokenTree ) {
860
- if !self . tokens . contains ( & tok ) {
861
- self . tokens . push ( tok ) ;
920
+ fn add_one_maybe ( & mut self , tt : TtHandle < ' tt > ) {
921
+ if !self . tokens . contains ( & tt ) {
922
+ self . tokens . push ( tt ) ;
862
923
}
863
924
}
864
925
@@ -870,9 +931,9 @@ impl TokenSet {
870
931
// setting of the empty flag of `self`. If `other` is guaranteed
871
932
// non-empty, then `self` is marked non-empty.
872
933
fn add_all ( & mut self , other : & Self ) {
873
- for tok in & other. tokens {
874
- if !self . tokens . contains ( tok ) {
875
- self . tokens . push ( tok . clone ( ) ) ;
934
+ for tt in & other. tokens {
935
+ if !self . tokens . contains ( tt ) {
936
+ self . tokens . push ( tt . clone ( ) ) ;
876
937
}
877
938
}
878
939
if !other. maybe_empty {
@@ -892,14 +953,14 @@ impl TokenSet {
892
953
//
893
954
// Requires that `first_sets` is pre-computed for `matcher`;
894
955
// see `FirstSets::new`.
895
- fn check_matcher_core (
956
+ fn check_matcher_core < ' tt > (
896
957
sess : & ParseSess ,
897
958
features : & Features ,
898
959
def : & ast:: Item ,
899
- first_sets : & FirstSets ,
900
- matcher : & [ mbe:: TokenTree ] ,
901
- follow : & TokenSet ,
902
- ) -> TokenSet {
960
+ first_sets : & FirstSets < ' tt > ,
961
+ matcher : & ' tt [ mbe:: TokenTree ] ,
962
+ follow : & TokenSet < ' tt > ,
963
+ ) -> TokenSet < ' tt > {
903
964
use mbe:: TokenTree ;
904
965
905
966
let mut last = TokenSet :: empty ( ) ;
@@ -938,12 +999,15 @@ fn check_matcher_core(
938
999
// followed by anything against SUFFIX.
939
1000
continue ' each_token;
940
1001
} else {
941
- last. replace_with ( token . clone ( ) ) ;
1002
+ last. replace_with ( TtHandle :: TtRef ( token ) ) ;
942
1003
suffix_first = build_suffix_first ( ) ;
943
1004
}
944
1005
}
945
1006
TokenTree :: Delimited ( span, ref d) => {
946
- let my_suffix = TokenSet :: singleton ( d. close_tt ( span) ) ;
1007
+ let my_suffix = TokenSet :: singleton ( TtHandle :: from_token_kind (
1008
+ token:: CloseDelim ( d. delim ) ,
1009
+ span. close ,
1010
+ ) ) ;
947
1011
check_matcher_core ( sess, features, def, first_sets, & d. tts , & my_suffix) ;
948
1012
// don't track non NT tokens
949
1013
last. replace_with_irrelevant ( ) ;
@@ -967,7 +1031,7 @@ fn check_matcher_core(
967
1031
let mut new;
968
1032
let my_suffix = if let Some ( sep) = & seq_rep. separator {
969
1033
new = suffix_first. clone ( ) ;
970
- new. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
1034
+ new. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
971
1035
& new
972
1036
} else {
973
1037
& suffix_first
@@ -994,9 +1058,11 @@ fn check_matcher_core(
994
1058
995
1059
// Now `last` holds the complete set of NT tokens that could
996
1060
// end the sequence before SUFFIX. Check that every one works with `suffix`.
997
- for token in & last. tokens {
998
- if let TokenTree :: MetaVarDecl ( span, name, Some ( kind) ) = * token {
1061
+ for tt in & last. tokens {
1062
+ if let & TokenTree :: MetaVarDecl ( span, name, Some ( kind) ) = tt . get ( ) {
999
1063
for next_token in & suffix_first. tokens {
1064
+ let next_token = next_token. get ( ) ;
1065
+
1000
1066
// Check if the old pat is used and the next token is `|`
1001
1067
// to warn about incompatibility with Rust 2021.
1002
1068
// We only emit this lint if we're parsing the original
0 commit comments