Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 85ed21e

Browse files
committedJun 23, 2019
Auto merge of #62070 - ia0:rustfmt, r=petrochenkov
Run rustfmt on some libsyntax files As part of #62008, run rustfmt on: - src/libsyntax/ext/tt/macro_rules.rs - src/libsyntax/ext/tt/quoted.rs There is no semantic change. To fix potential merge conflicts, simply choose the other side then run rustfmt and fix any tidy check (like line length).
2 parents de7c4e4 + 0aeab41 commit 85ed21e

File tree

2 files changed

+273
-235
lines changed

2 files changed

+273
-235
lines changed
 

‎src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 260 additions & 206 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,37 @@
1-
use crate::{ast, attr};
21
use crate::edition::Edition;
3-
use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
42
use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
3+
use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
54
use crate::ext::expand::{AstFragment, AstFragmentKind};
65
use crate::ext::hygiene::Transparency;
7-
use crate::ext::tt::macro_parser::{Success, Error, Failure};
8-
use crate::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
96
use crate::ext::tt::macro_parser::{parse, parse_failure_msg};
7+
use crate::ext::tt::macro_parser::{Error, Failure, Success};
8+
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq};
109
use crate::ext::tt::quoted;
1110
use crate::ext::tt::transcribe::transcribe;
1211
use crate::feature_gate::Features;
13-
use crate::parse::{Directory, ParseSess};
1412
use crate::parse::parser::Parser;
15-
use crate::parse::token::{self, Token, NtTT};
1613
use crate::parse::token::TokenKind::*;
17-
use crate::symbol::{Symbol, kw, sym};
14+
use crate::parse::token::{self, NtTT, Token};
15+
use crate::parse::{Directory, ParseSess};
16+
use crate::symbol::{kw, sym, Symbol};
1817
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
18+
use crate::{ast, attr};
1919

2020
use errors::FatalError;
21-
use syntax_pos::{Span, symbol::Ident};
2221
use log::debug;
22+
use syntax_pos::{symbol::Ident, Span};
2323

24-
use rustc_data_structures::fx::{FxHashMap};
24+
use rustc_data_structures::fx::FxHashMap;
2525
use std::borrow::Cow;
2626
use std::collections::hash_map::Entry;
2727
use std::slice;
2828

29-
use rustc_data_structures::sync::Lrc;
3029
use errors::Applicability;
30+
use rustc_data_structures::sync::Lrc;
3131

3232
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
33-
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \
34-
`path`, `meta`, `tt`, `item` and `vis`";
33+
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
34+
`literal`, `path`, `meta`, `tt`, `item` and `vis`";
3535

3636
pub struct ParserAnyMacro<'a> {
3737
parser: Parser<'a>,
@@ -48,7 +48,8 @@ impl<'a> ParserAnyMacro<'a> {
4848
let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
4949
let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
5050
if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
51-
if !e.span.is_dummy() { // early end of macro arm (#52866)
51+
if !e.span.is_dummy() {
52+
// early end of macro arm (#52866)
5253
e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
5354
}
5455
let msg = &e.message[0];
@@ -60,7 +61,8 @@ impl<'a> ParserAnyMacro<'a> {
6061
msg.1,
6162
);
6263
}
63-
if e.span.is_dummy() { // Get around lack of span in error (#30128)
64+
if e.span.is_dummy() {
65+
// Get around lack of span in error (#30128)
6466
e.replace_span_with(site_span);
6567
if parser.sess.source_map().span_to_filename(arm_span).is_real() {
6668
e.span_label(arm_span, "in this macro arm");
@@ -99,17 +101,11 @@ impl TTMacroExpander for MacroRulesMacroExpander {
99101
sp: Span,
100102
input: TokenStream,
101103
def_span: Option<Span>,
102-
) -> Box<dyn MacResult+'cx> {
104+
) -> Box<dyn MacResult + 'cx> {
103105
if !self.valid {
104106
return DummyResult::any(sp);
105107
}
106-
generic_extension(cx,
107-
sp,
108-
def_span,
109-
self.name,
110-
input,
111-
&self.lhses,
112-
&self.rhses)
108+
generic_extension(cx, sp, def_span, self.name, input, &self.lhses, &self.rhses)
113109
}
114110
}
115111

@@ -119,25 +115,27 @@ fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
119115
}
120116

121117
/// Given `lhses` and `rhses`, this is the new macro we create
122-
fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
123-
sp: Span,
124-
def_span: Option<Span>,
125-
name: ast::Ident,
126-
arg: TokenStream,
127-
lhses: &[quoted::TokenTree],
128-
rhses: &[quoted::TokenTree])
129-
-> Box<dyn MacResult+'cx> {
118+
fn generic_extension<'cx>(
119+
cx: &'cx mut ExtCtxt<'_>,
120+
sp: Span,
121+
def_span: Option<Span>,
122+
name: ast::Ident,
123+
arg: TokenStream,
124+
lhses: &[quoted::TokenTree],
125+
rhses: &[quoted::TokenTree],
126+
) -> Box<dyn MacResult + 'cx> {
130127
if cx.trace_macros() {
131128
trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
132129
}
133130

134131
// Which arm's failure should we report? (the one furthest along)
135132
let mut best_failure: Option<(Token, &str)> = None;
136133

137-
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
134+
for (i, lhs) in lhses.iter().enumerate() {
135+
// try each arm's matchers
138136
let lhs_tt = match *lhs {
139137
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
140-
_ => cx.span_bug(sp, "malformed macro lhs")
138+
_ => cx.span_bug(sp, "malformed macro lhs"),
141139
};
142140

143141
match TokenTree::parse(cx, lhs_tt, arg.clone()) {
@@ -173,8 +171,8 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
173171
ownership: cx.current_expansion.directory_ownership,
174172
};
175173
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
176-
p.root_module_name = cx.current_expansion.module.mod_path.last()
177-
.map(|id| id.as_str().to_string());
174+
p.root_module_name =
175+
cx.current_expansion.module.mod_path.last().map(|id| id.as_str().to_string());
178176

179177
p.process_potential_macro_variable();
180178
// Let the context choose how to interpret the result.
@@ -188,15 +186,13 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
188186
site_span: sp,
189187
macro_ident: name,
190188
arm_span,
191-
})
189+
});
192190
}
193191
Failure(token, msg) => match best_failure {
194192
Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
195-
_ => best_failure = Some((token, msg))
196-
}
197-
Error(err_sp, ref msg) => {
198-
cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
199-
}
193+
_ => best_failure = Some((token, msg)),
194+
},
195+
Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
200196
}
201197
}
202198

@@ -212,7 +208,8 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
212208

213209
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
214210
if let Some((arg, comma_span)) = arg.add_comma() {
215-
for lhs in lhses { // try each arm's matchers
211+
for lhs in lhses {
212+
// try each arm's matchers
216213
let lhs_tt = match *lhs {
217214
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
218215
_ => continue,
@@ -249,7 +246,7 @@ pub fn compile(
249246
sess: &ParseSess,
250247
features: &Features,
251248
def: &ast::Item,
252-
edition: Edition
249+
edition: Edition,
253250
) -> SyntaxExtension {
254251
let lhs_nm = ast::Ident::new(sym::lhs, def.span);
255252
let rhs_nm = ast::Ident::new(sym::rhs, def.span);
@@ -267,25 +264,32 @@ pub fn compile(
267264
// ...quasiquoting this would be nice.
268265
// These spans won't matter, anyways
269266
let argument_gram = vec![
270-
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
271-
tts: vec![
272-
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
273-
quoted::TokenTree::token(token::FatArrow, def.span),
274-
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
275-
],
276-
separator: Some(Token::new(
277-
if body.legacy { token::Semi } else { token::Comma }, def.span
278-
)),
279-
op: quoted::KleeneOp::OneOrMore,
280-
num_captures: 2,
281-
})),
267+
quoted::TokenTree::Sequence(
268+
DelimSpan::dummy(),
269+
Lrc::new(quoted::SequenceRepetition {
270+
tts: vec![
271+
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
272+
quoted::TokenTree::token(token::FatArrow, def.span),
273+
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
274+
],
275+
separator: Some(Token::new(
276+
if body.legacy { token::Semi } else { token::Comma },
277+
def.span,
278+
)),
279+
op: quoted::KleeneOp::OneOrMore,
280+
num_captures: 2,
281+
}),
282+
),
282283
// to phase into semicolon-termination instead of semicolon-separation
283-
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
284-
tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
285-
separator: None,
286-
op: quoted::KleeneOp::ZeroOrMore,
287-
num_captures: 0
288-
})),
284+
quoted::TokenTree::Sequence(
285+
DelimSpan::dummy(),
286+
Lrc::new(quoted::SequenceRepetition {
287+
tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
288+
separator: None,
289+
op: quoted::KleeneOp::ZeroOrMore,
290+
num_captures: 0,
291+
}),
292+
),
289293
];
290294

291295
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
@@ -307,8 +311,9 @@ pub fn compile(
307311

308312
// Extract the arguments:
309313
let lhses = match *argument_map[&lhs_nm] {
310-
MatchedSeq(ref s, _) => {
311-
s.iter().map(|m| {
314+
MatchedSeq(ref s, _) => s
315+
.iter()
316+
.map(|m| {
312317
if let MatchedNonterminal(ref nt) = *m {
313318
if let NtTT(ref tt) = **nt {
314319
let tt = quoted::parse(
@@ -327,14 +332,15 @@ pub fn compile(
327332
}
328333
}
329334
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
330-
}).collect::<Vec<quoted::TokenTree>>()
331-
}
332-
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
335+
})
336+
.collect::<Vec<quoted::TokenTree>>(),
337+
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
333338
};
334339

335340
let rhses = match *argument_map[&rhs_nm] {
336-
MatchedSeq(ref s, _) => {
337-
s.iter().map(|m| {
341+
MatchedSeq(ref s, _) => s
342+
.iter()
343+
.map(|m| {
338344
if let MatchedNonterminal(ref nt) = *m {
339345
if let NtTT(ref tt) = **nt {
340346
return quoted::parse(
@@ -345,14 +351,15 @@ pub fn compile(
345351
&def.attrs,
346352
edition,
347353
def.id,
348-
).pop()
349-
.unwrap();
354+
)
355+
.pop()
356+
.unwrap();
350357
}
351358
}
352359
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
353-
}).collect::<Vec<quoted::TokenTree>>()
354-
}
355-
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
360+
})
361+
.collect::<Vec<quoted::TokenTree>>(),
362+
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
356363
};
357364

358365
for rhs in &rhses {
@@ -366,16 +373,12 @@ pub fn compile(
366373
sess,
367374
slice::from_ref(lhs),
368375
&mut FxHashMap::default(),
369-
def.id
376+
def.id,
370377
);
371378
}
372379

373-
let expander: Box<_> = Box::new(MacroRulesMacroExpander {
374-
name: def.ident,
375-
lhses,
376-
rhses,
377-
valid,
378-
});
380+
let expander: Box<_> =
381+
Box::new(MacroRulesMacroExpander { name: def.ident, lhses, rhses, valid });
379382

380383
let default_transparency = if attr::contains_name(&def.attrs, sym::rustc_transparent_macro) {
381384
Transparency::Transparent
@@ -385,29 +388,34 @@ pub fn compile(
385388
Transparency::Opaque
386389
};
387390

388-
let allow_internal_unstable = attr::find_by_name(&def.attrs, sym::allow_internal_unstable)
389-
.map(|attr| attr
390-
.meta_item_list()
391-
.map(|list| list.iter()
392-
.filter_map(|it| {
393-
let name = it.ident().map(|ident| ident.name);
394-
if name.is_none() {
395-
sess.span_diagnostic.span_err(it.span(),
396-
"allow internal unstable expects feature names")
397-
}
398-
name
391+
let allow_internal_unstable =
392+
attr::find_by_name(&def.attrs, sym::allow_internal_unstable).map(|attr| {
393+
attr.meta_item_list()
394+
.map(|list| {
395+
list.iter()
396+
.filter_map(|it| {
397+
let name = it.ident().map(|ident| ident.name);
398+
if name.is_none() {
399+
sess.span_diagnostic.span_err(
400+
it.span(),
401+
"allow internal unstable expects feature names",
402+
)
403+
}
404+
name
405+
})
406+
.collect::<Vec<Symbol>>()
407+
.into()
399408
})
400-
.collect::<Vec<Symbol>>().into()
401-
)
402-
.unwrap_or_else(|| {
403-
sess.span_diagnostic.span_warn(
404-
attr.span, "allow_internal_unstable expects list of feature names. In the \
405-
future this will become a hard error. Please use `allow_internal_unstable(\
406-
foo, bar)` to only allow the `foo` and `bar` features",
407-
);
408-
vec![sym::allow_internal_unstable_backcompat_hack].into()
409-
})
410-
);
409+
.unwrap_or_else(|| {
410+
sess.span_diagnostic.span_warn(
411+
attr.span,
412+
"allow_internal_unstable expects list of feature names. In the \
413+
future this will become a hard error. Please use `allow_internal_unstable(\
414+
foo, bar)` to only allow the `foo` and `bar` features",
415+
);
416+
vec![sym::allow_internal_unstable_backcompat_hack].into()
417+
})
418+
});
411419

412420
let allow_internal_unsafe = attr::contains_name(&def.attrs, sym::allow_internal_unsafe);
413421

@@ -418,14 +426,14 @@ pub fn compile(
418426
}
419427
}
420428

421-
let unstable_feature = attr::find_stability(&sess,
422-
&def.attrs, def.span).and_then(|stability| {
423-
if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
424-
Some((stability.feature, issue))
425-
} else {
426-
None
427-
}
428-
});
429+
let unstable_feature =
430+
attr::find_stability(&sess, &def.attrs, def.span).and_then(|stability| {
431+
if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
432+
Some((stability.feature, issue))
433+
} else {
434+
None
435+
}
436+
});
429437

430438
SyntaxExtension {
431439
kind: SyntaxExtensionKind::LegacyBang(expander),
@@ -440,10 +448,12 @@ pub fn compile(
440448
}
441449
}
442450

443-
fn check_lhs_nt_follows(sess: &ParseSess,
444-
features: &Features,
445-
attrs: &[ast::Attribute],
446-
lhs: &quoted::TokenTree) -> bool {
451+
fn check_lhs_nt_follows(
452+
sess: &ParseSess,
453+
features: &Features,
454+
attrs: &[ast::Attribute],
455+
lhs: &quoted::TokenTree,
456+
) -> bool {
447457
// lhs is going to be like TokenTree::Delimited(...), where the
448458
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
449459
if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
@@ -464,19 +474,22 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
464474
for tt in tts {
465475
match *tt {
466476
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
467-
TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
468-
return false;
469-
},
477+
TokenTree::Delimited(_, ref del) => {
478+
if !check_lhs_no_empty_seq(sess, &del.tts) {
479+
return false;
480+
}
481+
}
470482
TokenTree::Sequence(span, ref seq) => {
471-
if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
472-
match *seq_tt {
483+
if seq.separator.is_none()
484+
&& seq.tts.iter().all(|seq_tt| match *seq_tt {
473485
TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
474-
TokenTree::Sequence(_, ref sub_seq) =>
486+
TokenTree::Sequence(_, ref sub_seq) => {
475487
sub_seq.op == quoted::KleeneOp::ZeroOrMore
476-
|| sub_seq.op == quoted::KleeneOp::ZeroOrOne,
488+
|| sub_seq.op == quoted::KleeneOp::ZeroOrOne
489+
}
477490
_ => false,
478-
}
479-
}) {
491+
})
492+
{
480493
let sp = span.entire();
481494
sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
482495
return false;
@@ -517,7 +530,7 @@ fn check_lhs_duplicate_matcher_bindings(
517530
if !check_lhs_duplicate_matcher_bindings(sess, &del.tts, metavar_names, node_id) {
518531
return false;
519532
}
520-
},
533+
}
521534
TokenTree::Sequence(_, ref seq) => {
522535
if !check_lhs_duplicate_matcher_bindings(sess, &seq.tts, metavar_names, node_id) {
523536
return false;
@@ -533,15 +546,17 @@ fn check_lhs_duplicate_matcher_bindings(
533546
fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
534547
match *rhs {
535548
quoted::TokenTree::Delimited(..) => return true,
536-
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
549+
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
537550
}
538551
false
539552
}
540553

541-
fn check_matcher(sess: &ParseSess,
542-
features: &Features,
543-
attrs: &[ast::Attribute],
544-
matcher: &[quoted::TokenTree]) -> bool {
554+
fn check_matcher(
555+
sess: &ParseSess,
556+
features: &Features,
557+
attrs: &[ast::Attribute],
558+
matcher: &[quoted::TokenTree],
559+
) -> bool {
545560
let first_sets = FirstSets::new(matcher);
546561
let empty_suffix = TokenSet::empty();
547562
let err = sess.span_diagnostic.err_count();
@@ -620,8 +635,8 @@ impl FirstSets {
620635

621636
// Reverse scan: Sequence comes before `first`.
622637
if subfirst.maybe_empty
623-
|| seq_rep.op == quoted::KleeneOp::ZeroOrMore
624-
|| seq_rep.op == quoted::KleeneOp::ZeroOrOne
638+
|| seq_rep.op == quoted::KleeneOp::ZeroOrMore
639+
|| seq_rep.op == quoted::KleeneOp::ZeroOrOne
625640
{
626641
// If sequence is potentially empty, then
627642
// union them (preserving first emptiness).
@@ -659,7 +674,6 @@ impl FirstSets {
659674
TokenTree::Sequence(sp, ref seq_rep) => {
660675
match self.first.get(&sp.entire()) {
661676
Some(&Some(ref subfirst)) => {
662-
663677
// If the sequence contents can be empty, then the first
664678
// token could be the separator token itself.
665679

@@ -670,8 +684,8 @@ impl FirstSets {
670684
assert!(first.maybe_empty);
671685
first.add_all(subfirst);
672686
if subfirst.maybe_empty
673-
|| seq_rep.op == quoted::KleeneOp::ZeroOrMore
674-
|| seq_rep.op == quoted::KleeneOp::ZeroOrOne
687+
|| seq_rep.op == quoted::KleeneOp::ZeroOrMore
688+
|| seq_rep.op == quoted::KleeneOp::ZeroOrOne
675689
{
676690
// continue scanning for more first
677691
// tokens, but also make sure we
@@ -720,7 +734,9 @@ struct TokenSet {
720734

721735
impl TokenSet {
722736
// Returns a set for the empty sequence.
723-
fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } }
737+
fn empty() -> Self {
738+
TokenSet { tokens: Vec::new(), maybe_empty: true }
739+
}
724740

725741
// Returns the set `{ tok }` for the single-token (and thus
726742
// non-empty) sequence [tok].
@@ -789,12 +805,14 @@ impl TokenSet {
789805
//
790806
// Requires that `first_sets` is pre-computed for `matcher`;
791807
// see `FirstSets::new`.
792-
fn check_matcher_core(sess: &ParseSess,
793-
features: &Features,
794-
attrs: &[ast::Attribute],
795-
first_sets: &FirstSets,
796-
matcher: &[quoted::TokenTree],
797-
follow: &TokenSet) -> TokenSet {
808+
fn check_matcher_core(
809+
sess: &ParseSess,
810+
features: &Features,
811+
attrs: &[ast::Attribute],
812+
first_sets: &FirstSets,
813+
matcher: &[quoted::TokenTree],
814+
follow: &TokenSet,
815+
) -> TokenSet {
798816
use quoted::TokenTree;
799817

800818
let mut last = TokenSet::empty();
@@ -804,11 +822,13 @@ fn check_matcher_core(sess: &ParseSess,
804822
// then ensure T can also be followed by any element of FOLLOW.
805823
'each_token: for i in 0..matcher.len() {
806824
let token = &matcher[i];
807-
let suffix = &matcher[i+1..];
825+
let suffix = &matcher[i + 1..];
808826

809827
let build_suffix_first = || {
810828
let mut s = first_sets.first(suffix);
811-
if s.maybe_empty { s.add_all(follow); }
829+
if s.maybe_empty {
830+
s.add_all(follow);
831+
}
812832
s
813833
};
814834

@@ -824,7 +844,8 @@ fn check_matcher_core(sess: &ParseSess,
824844
let can_be_followed_by_any;
825845
if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
826846
let msg = format!("invalid fragment specifier `{}`", bad_frag);
827-
sess.span_diagnostic.struct_span_err(token.span(), &msg)
847+
sess.span_diagnostic
848+
.struct_span_err(token.span(), &msg)
828849
.help(VALID_FRAGMENT_NAMES_MSG)
829850
.emit();
830851
// (This eliminates false positives and duplicates
@@ -879,12 +900,8 @@ fn check_matcher_core(sess: &ParseSess,
879900
// At this point, `suffix_first` is built, and
880901
// `my_suffix` is some TokenSet that we can use
881902
// for checking the interior of `seq_rep`.
882-
let next = check_matcher_core(sess,
883-
features,
884-
attrs,
885-
first_sets,
886-
&seq_rep.tts,
887-
my_suffix);
903+
let next =
904+
check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
888905
if next.maybe_empty {
889906
last.add_all(&next);
890907
} else {
@@ -906,16 +923,17 @@ fn check_matcher_core(sess: &ParseSess,
906923
for next_token in &suffix_first.tokens {
907924
match is_in_follow(next_token, &frag_spec.as_str()) {
908925
IsInFollow::Invalid(msg, help) => {
909-
sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
910-
.help(help).emit();
926+
sess.span_diagnostic
927+
.struct_span_err(next_token.span(), &msg)
928+
.help(help)
929+
.emit();
911930
// don't bother reporting every source of
912931
// conflict for a particular element of `last`.
913932
continue 'each_last;
914933
}
915934
IsInFollow::Yes => {}
916935
IsInFollow::No(possible) => {
917-
let may_be = if last.tokens.len() == 1 &&
918-
suffix_first.tokens.len() == 1
936+
let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
919937
{
920938
"is"
921939
} else {
@@ -925,12 +943,14 @@ fn check_matcher_core(sess: &ParseSess,
925943
let sp = next_token.span();
926944
let mut err = sess.span_diagnostic.struct_span_err(
927945
sp,
928-
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
929-
is not allowed for `{frag}` fragments",
930-
name=name,
931-
frag=frag_spec,
932-
next=quoted_tt_to_string(next_token),
933-
may_be=may_be),
946+
&format!(
947+
"`${name}:{frag}` {may_be} followed by `{next}`, which \
948+
is not allowed for `{frag}` fragments",
949+
name = name,
950+
frag = frag_spec,
951+
next = quoted_tt_to_string(next_token),
952+
may_be = may_be
953+
),
934954
);
935955
err.span_label(
936956
sp,
@@ -942,16 +962,18 @@ fn check_matcher_core(sess: &ParseSess,
942962
&[t] => {
943963
err.note(&format!(
944964
"only {} is allowed after `{}` fragments",
945-
t,
946-
frag_spec,
965+
t, frag_spec,
947966
));
948967
}
949968
ts => {
950969
err.note(&format!(
951970
"{}{} or {}",
952971
msg,
953-
ts[..ts.len() - 1].iter().map(|s| *s)
954-
.collect::<Vec<_>>().join(", "),
972+
ts[..ts.len() - 1]
973+
.iter()
974+
.map(|s| *s)
975+
.collect::<Vec<_>>()
976+
.join(", "),
955977
ts[ts.len() - 1],
956978
));
957979
}
@@ -1026,13 +1048,13 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
10261048
// since items *must* be followed by either a `;` or a `}`, we can
10271049
// accept anything after them
10281050
IsInFollow::Yes
1029-
},
1051+
}
10301052
"block" => {
10311053
// anything can follow block, the braces provide an easy boundary to
10321054
// maintain
10331055
IsInFollow::Yes
1034-
},
1035-
"stmt" | "expr" => {
1056+
}
1057+
"stmt" | "expr" => {
10361058
const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
10371059
match tok {
10381060
TokenTree::Token(token) => match token.kind {
@@ -1041,7 +1063,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
10411063
},
10421064
_ => IsInFollow::No(TOKENS),
10431065
}
1044-
},
1066+
}
10451067
"pat" => {
10461068
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
10471069
match tok {
@@ -1052,71 +1074,88 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
10521074
},
10531075
_ => IsInFollow::No(TOKENS),
10541076
}
1055-
},
1077+
}
10561078
"path" | "ty" => {
10571079
const TOKENS: &[&str] = &[
1058-
"`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
1080+
"`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
10591081
"`where`",
10601082
];
10611083
match tok {
10621084
TokenTree::Token(token) => match token.kind {
1063-
OpenDelim(token::DelimToken::Brace) |
1064-
OpenDelim(token::DelimToken::Bracket) |
1065-
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
1066-
BinOp(token::Or) => IsInFollow::Yes,
1067-
Ident(name, false) if name == kw::As ||
1068-
name == kw::Where => IsInFollow::Yes,
1085+
OpenDelim(token::DelimToken::Brace)
1086+
| OpenDelim(token::DelimToken::Bracket)
1087+
| Comma
1088+
| FatArrow
1089+
| Colon
1090+
| Eq
1091+
| Gt
1092+
| BinOp(token::Shr)
1093+
| Semi
1094+
| BinOp(token::Or) => IsInFollow::Yes,
1095+
Ident(name, false) if name == kw::As || name == kw::Where => {
1096+
IsInFollow::Yes
1097+
}
10691098
_ => IsInFollow::No(TOKENS),
10701099
},
1071-
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
1072-
IsInFollow::Yes,
1100+
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => {
1101+
IsInFollow::Yes
1102+
}
10731103
_ => IsInFollow::No(TOKENS),
10741104
}
1075-
},
1105+
}
10761106
"ident" | "lifetime" => {
10771107
// being a single token, idents and lifetimes are harmless
10781108
IsInFollow::Yes
1079-
},
1109+
}
10801110
"literal" => {
10811111
// literals may be of a single token, or two tokens (negative numbers)
10821112
IsInFollow::Yes
1083-
},
1113+
}
10841114
"meta" | "tt" => {
10851115
// being either a single token or a delimited sequence, tt is
10861116
// harmless
10871117
IsInFollow::Yes
1088-
},
1118+
}
10891119
"vis" => {
10901120
// Explicitly disallow `priv`, on the off chance it comes back.
10911121
const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
10921122
match tok {
10931123
TokenTree::Token(token) => match token.kind {
10941124
Comma => IsInFollow::Yes,
10951125
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
1096-
_ => if token.can_begin_type() {
1097-
IsInFollow::Yes
1098-
} else {
1099-
IsInFollow::No(TOKENS)
1126+
_ => {
1127+
if token.can_begin_type() {
1128+
IsInFollow::Yes
1129+
} else {
1130+
IsInFollow::No(TOKENS)
1131+
}
11001132
}
11011133
},
1102-
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::ident
1103-
|| frag.name == sym::ty
1104-
|| frag.name == sym::path =>
1105-
IsInFollow::Yes,
1134+
TokenTree::MetaVarDecl(_, _, frag)
1135+
if frag.name == sym::ident
1136+
|| frag.name == sym::ty
1137+
|| frag.name == sym::path =>
1138+
{
1139+
IsInFollow::Yes
1140+
}
11061141
_ => IsInFollow::No(TOKENS),
11071142
}
1108-
},
1143+
}
11091144
"" => IsInFollow::Yes, // kw::Invalid
1110-
_ => IsInFollow::Invalid(format!("invalid fragment specifier `{}`", frag),
1111-
VALID_FRAGMENT_NAMES_MSG),
1145+
_ => IsInFollow::Invalid(
1146+
format!("invalid fragment specifier `{}`", frag),
1147+
VALID_FRAGMENT_NAMES_MSG,
1148+
),
11121149
}
11131150
}
11141151
}
11151152

1116-
fn has_legal_fragment_specifier(sess: &ParseSess,
1117-
features: &Features,
1118-
attrs: &[ast::Attribute],
1119-
tok: &quoted::TokenTree) -> Result<(), String> {
1153+
fn has_legal_fragment_specifier(
1154+
sess: &ParseSess,
1155+
features: &Features,
1156+
attrs: &[ast::Attribute],
1157+
tok: &quoted::TokenTree,
1158+
) -> Result<(), String> {
11201159
debug!("has_legal_fragment_specifier({:?})", tok);
11211160
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
11221161
let frag_span = tok.span();
@@ -1127,21 +1166,34 @@ fn has_legal_fragment_specifier(sess: &ParseSess,
11271166
Ok(())
11281167
}
11291168

1130-
fn is_legal_fragment_specifier(_sess: &ParseSess,
1131-
_features: &Features,
1132-
_attrs: &[ast::Attribute],
1133-
frag_name: Symbol,
1134-
_frag_span: Span) -> bool {
1169+
fn is_legal_fragment_specifier(
1170+
_sess: &ParseSess,
1171+
_features: &Features,
1172+
_attrs: &[ast::Attribute],
1173+
frag_name: Symbol,
1174+
_frag_span: Span,
1175+
) -> bool {
11351176
/*
11361177
* If new fragment specifiers are invented in nightly, `_sess`,
11371178
* `_features`, `_attrs`, and `_frag_span` will be useful here
11381179
* for checking against feature gates. See past versions of
11391180
* this function.
11401181
*/
11411182
match frag_name {
1142-
sym::item | sym::block | sym::stmt | sym::expr | sym::pat |
1143-
sym::lifetime | sym::path | sym::ty | sym::ident | sym::meta | sym::tt |
1144-
sym::vis | sym::literal | kw::Invalid => true,
1183+
sym::item
1184+
| sym::block
1185+
| sym::stmt
1186+
| sym::expr
1187+
| sym::pat
1188+
| sym::lifetime
1189+
| sym::path
1190+
| sym::ty
1191+
| sym::ident
1192+
| sym::meta
1193+
| sym::tt
1194+
| sym::vis
1195+
| sym::literal
1196+
| kw::Invalid => true,
11451197
_ => false,
11461198
}
11471199
}
@@ -1151,7 +1203,9 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
11511203
quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
11521204
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
11531205
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
1154-
_ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
1155-
in follow set checker"),
1206+
_ => panic!(
1207+
"unexpected quoted::TokenTree::{{Sequence or Delimited}} \
1208+
in follow set checker"
1209+
),
11561210
}
11571211
}

‎src/libsyntax/ext/tt/quoted.rs

Lines changed: 13 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1+
use crate::ast;
12
use crate::ast::NodeId;
23
use crate::ext::tt::macro_parser;
34
use crate::feature_gate::Features;
45
use crate::parse::token::{self, Token, TokenKind};
56
use crate::parse::ParseSess;
67
use crate::print::pprust;
7-
use crate::tokenstream::{self, DelimSpan};
8-
use crate::ast;
98
use crate::symbol::kw;
9+
use crate::tokenstream::{self, DelimSpan};
1010

1111
use syntax_pos::{edition::Edition, BytePos, Span};
1212

@@ -137,8 +137,7 @@ impl TokenTree {
137137
TokenTree::Token(Token { span, .. })
138138
| TokenTree::MetaVar(span, _)
139139
| TokenTree::MetaVarDecl(span, _, _) => span,
140-
TokenTree::Delimited(span, _)
141-
| TokenTree::Sequence(span, _) => span.entire(),
140+
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
142141
}
143142
}
144143

@@ -199,7 +198,7 @@ pub fn parse(
199198
match tree {
200199
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
201200
let span = match trees.next() {
202-
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
201+
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
203202
match trees.next() {
204203
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
205204
Some((kind, _)) => {
@@ -209,22 +208,13 @@ pub fn parse(
209208
}
210209
_ => token.span,
211210
},
212-
tree => tree
213-
.as_ref()
214-
.map(tokenstream::TokenTree::span)
215-
.unwrap_or(span),
216-
},
217-
tree => tree
218-
.as_ref()
219-
.map(tokenstream::TokenTree::span)
220-
.unwrap_or(start_sp),
211+
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
212+
}
213+
}
214+
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
221215
};
222216
sess.missing_fragment_specifiers.borrow_mut().insert(span);
223-
result.push(TokenTree::MetaVarDecl(
224-
span,
225-
ident,
226-
ast::Ident::invalid(),
227-
));
217+
result.push(TokenTree::MetaVarDecl(span, ident, ast::Ident::invalid()));
228218
}
229219

230220
// Not a metavar or no matchers allowed, so just return the tree
@@ -311,10 +301,8 @@ fn parse_tree(
311301

312302
// `tree` is followed by a random token. This is an error.
313303
Some(tokenstream::TokenTree::Token(token)) => {
314-
let msg = format!(
315-
"expected identifier, found `{}`",
316-
pprust::token_to_string(&token),
317-
);
304+
let msg =
305+
format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
318306
sess.span_diagnostic.span_err(token.span, &msg);
319307
TokenTree::MetaVar(token.span, ast::Ident::invalid())
320308
}
@@ -371,10 +359,7 @@ fn parse_kleene_op(
371359
Some(op) => Ok(Ok((op, token.span))),
372360
None => Ok(Err(token)),
373361
},
374-
tree => Err(tree
375-
.as_ref()
376-
.map(tokenstream::TokenTree::span)
377-
.unwrap_or(span)),
362+
tree => Err(tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span)),
378363
}
379364
}
380365

@@ -426,8 +411,7 @@ fn parse_sep_and_kleene_op(
426411
};
427412

428413
// If we ever get to this point, we have experienced an "unexpected token" error
429-
sess.span_diagnostic
430-
.span_err(span, "expected one of: `*`, `+`, or `?`");
414+
sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
431415

432416
// Return a dummy
433417
(None, KleeneOp::ZeroOrMore)

0 commit comments

Comments
 (0)
Please sign in to comment.