diff --git a/src/doc/trpl/compiler-plugins.md b/src/doc/trpl/compiler-plugins.md index e1c9825111709..42dfaa1a8090a 100644 --- a/src/doc/trpl/compiler-plugins.md +++ b/src/doc/trpl/compiler-plugins.md @@ -46,7 +46,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token; -use syntax::ast::{TokenTree, TtToken}; +use syntax::ast::TokenTree; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; // trait for expr_usize use rustc::plugin::Registry; @@ -61,7 +61,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TtToken(_, token::Ident(s, _))] => s.to_string(), + [TokenTree::Token(_, token::Ident(s, _))] => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index e56e49c4f4902..8c9c883508703 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -36,7 +36,6 @@ pub use self::Sign::*; pub use self::Stmt_::*; pub use self::StrStyle::*; pub use self::StructFieldKind::*; -pub use self::TokenTree::*; pub use self::TraitItem_::*; pub use self::Ty_::*; pub use self::TyParamBound::*; @@ -954,12 +953,12 @@ impl Delimited { /// Returns the opening delimiter as a token tree. pub fn open_tt(&self) -> TokenTree { - TtToken(self.open_span, self.open_token()) + TokenTree::Token(self.open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self) -> TokenTree { - TtToken(self.close_span, self.close_token()) + TokenTree::Token(self.close_span, self.close_token()) } } @@ -999,61 +998,61 @@ pub enum KleeneOp { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TokenTree { /// A single token - TtToken(Span, token::Token), + Token(Span, token::Token), /// A delimited sequence of token trees - TtDelimited(Span, Rc<Delimited>), + Delimited(Span, Rc<Delimited>), // This only makes sense in MBE macros. /// A kleene-style repetition sequence with a span // FIXME(eddyb) #12938 Use DST. - TtSequence(Span, Rc<SequenceRepetition>), + Sequence(Span, Rc<SequenceRepetition>), } impl TokenTree { pub fn len(&self) -> usize { match *self { - TtToken(_, token::DocComment(name)) => { + TokenTree::Token(_, token::DocComment(name)) => { match doc_comment_style(&name.as_str()) { AttrStyle::Outer => 2, AttrStyle::Inner => 3 } } - TtToken(_, token::SpecialVarNt(..)) => 2, - TtToken(_, token::MatchNt(..)) => 3, - TtDelimited(_, ref delimed) => { + TokenTree::Token(_, token::SpecialVarNt(..)) => 2, + TokenTree::Token(_, token::MatchNt(..)) => 3, + TokenTree::Delimited(_, ref delimed) => { delimed.tts.len() + 2 } - TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { seq.tts.len() } - TtToken(..) => 0 + TokenTree::Token(..) => 0 } } pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { - (&TtToken(sp, token::DocComment(_)), 0) => { - TtToken(sp, token::Pound) + (&TokenTree::Token(sp, token::DocComment(_)), 0) => { + TokenTree::Token(sp, token::Pound) } - (&TtToken(sp, token::DocComment(name)), 1) + (&TokenTree::Token(sp, token::DocComment(name)), 1) if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { - TtToken(sp, token::Not) + TokenTree::Token(sp, token::Not) } - (&TtToken(sp, token::DocComment(name)), _) => { + (&TokenTree::Token(sp, token::DocComment(name)), _) => { let stripped = strip_doc_comment_decoration(&name.as_str()); - TtDelimited(sp, Rc::new(Delimited { + TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"), - token::Plain)), - TtToken(sp, token::Eq), - TtToken(sp, token::Literal( + tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"), + token::Plain)), + TokenTree::Token(sp, token::Eq), + TokenTree::Token(sp, token::Literal( token::StrRaw(token::intern(&stripped), 0), None))], close_span: sp, })) } - (&TtDelimited(_, ref delimed), _) => { + (&TokenTree::Delimited(_, ref delimed), _) => { if index == 0 { return delimed.open_tt(); } @@ -1062,19 +1061,19 @@ impl TokenTree { } delimed.tts[index - 1].clone() } - (&TtToken(sp, token::SpecialVarNt(var)), _) => { - let v = [TtToken(sp, token::Dollar), - TtToken(sp, token::Ident(token::str_to_ident(var.as_str()), + (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { + let v = [TokenTree::Token(sp, token::Dollar), + TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()), token::Plain))]; v[index].clone() } - (&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { - let v = [TtToken(sp, token::SubstNt(name, name_st)), - TtToken(sp, token::Colon), - TtToken(sp, token::Ident(kind, kind_st))]; + (&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { + let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)), + TokenTree::Token(sp, token::Colon), + TokenTree::Token(sp, token::Ident(kind, kind_st))]; v[index].clone() } - (&TtSequence(_, ref seq), _) => { + (&TokenTree::Sequence(_, ref seq), _) => { seq.tts[index].clone() } _ => panic!("Cannot expand a token tree") @@ -1084,9 +1083,9 @@ impl TokenTree { /// Returns the `Span` corresponding to this token tree. pub fn get_span(&self) -> Span { match *self { - TtToken(span, _) => span, - TtDelimited(span, _) => span, - TtSequence(span, _) => span, + TokenTree::Token(span, _) => span, + TokenTree::Delimited(span, _) => span, + TokenTree::Sequence(span, _) => span, } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index a276765e21616..be0d5729c7009 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box<MacResult+'cx> { let code = match (token_tree.len(), token_tree.get(0)) { - (1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code, + (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, _ => unreachable!() }; @@ -92,12 +92,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => { + (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { (code, None) }, - (3, Some(&ast::TtToken(_, token::Ident(ref code, _))), - Some(&ast::TtToken(_, token::Comma)), - Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => { + (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), + Some(&TokenTree::Token(_, token::Comma)), + Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) } _ => unreachable!() @@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &ast::TtToken(_, token::Ident(ref crate_name, _)), + &TokenTree::Token(_, token::Ident(ref crate_name, _)), // DIAGNOSTICS ident. - &ast::TtToken(_, token::Ident(ref name, _)) + &TokenTree::Token(_, token::Ident(ref name, _)) ) => (*&crate_name, name), _ => unreachable!() }; diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index c31a767300cf4..e9e36546ad6db 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, TokenTree}; use codemap::Span; use ext::base::*; use ext::base; @@ -17,7 +17,7 @@ use parse::token; use parse::token::str_to_ident; use ptr::P; -pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<base::MacResult+'cx> { if !cx.ecfg.enable_concat_idents() { feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, @@ -32,7 +32,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { - ast::TtToken(_, token::Comma) => {}, + TokenTree::Token(_, token::Comma) => {}, _ => { cx.span_err(sp, "concat_idents! expecting comma."); return DummyResult::expr(sp); @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } else { match *e { - ast::TtToken(_, token::Ident(ident, _)) => { + TokenTree::Token(_, token::Ident(ident, _)) => { res_str.push_str(&ident.name.as_str()) }, _ => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 59e8533a83d31..5e1d233916419 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, TokenTree}; use codemap::Span; use ext::base::ExtCtxt; use ext::base; @@ -71,67 +71,69 @@ pub mod rt { impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))] + vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] } } impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + vec![TokenTree::Token(DUMMY_SP, + token::Interpolated(token::NtPath(Box::new(self.clone()))))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] } } impl ToTokens for ast::Block { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] } } impl ToTokens for ast::Generics { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))] } } impl ToTokens for ast::WhereClause { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtWhereClause(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, + token::Interpolated(token::NtWhereClause(self.clone())))] } } impl ToTokens for P<ast::Item> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))] } } impl ToTokens for P<ast::ImplItem> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))] } } impl ToTokens for P<ast::TraitItem> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] } } impl ToTokens for P<ast::Stmt> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let mut tts = vec![ - ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone()))) + TokenTree::Token(self.span, token::Interpolated(token::NtStmt(self.clone()))) ]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { - tts.push(ast::TtToken(self.span, token::Semi)); + tts.push(TokenTree::Token(self.span, token::Semi)); } tts @@ -140,19 +142,19 @@ pub mod rt { impl ToTokens for P<ast::Expr> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } impl ToTokens for P<ast::Pat> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } @@ -173,12 +175,12 @@ pub mod rt { }; } - impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] } + impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] } impl_to_tokens_slice! { P<ast::Item>, [] } impl ToTokens for P<ast::MetaItem> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } @@ -186,11 +188,11 @@ pub mod rt { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let mut r = vec![]; // FIXME: The spans could be better - r.push(ast::TtToken(self.span, token::Pound)); + r.push(TokenTree::Token(self.span, token::Pound)); if self.node.style == ast::AttrStyle::Inner { - r.push(ast::TtToken(self.span, token::Not)); + r.push(TokenTree::Token(self.span, token::Not)); } - r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited { + r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), @@ -210,7 +212,7 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], @@ -278,7 +280,7 @@ pub mod rt { fn parse_item(&self, s: String) -> P<ast::Item>; fn parse_expr(&self, s: String) -> P<ast::Expr>; fn parse_stmt(&self, s: String) -> P<ast::Stmt>; - fn parse_tts(&self, s: String) -> Vec<ast::TokenTree>; + fn parse_tts(&self, s: String) -> Vec<TokenTree>; } impl<'a> ExtParseUtils for ExtCtxt<'a> { @@ -305,7 +307,7 @@ pub mod rt { self.parse_sess()) } - fn parse_tts(&self, s: String) -> Vec<ast::TokenTree> { + fn parse_tts(&self, s: String) -> Vec<TokenTree> { parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.cfg(), @@ -316,7 +318,7 @@ pub mod rt { pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'cx> { let (cx_expr, expr) = expand_tts(cx, sp, tts); let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]); @@ -325,7 +327,7 @@ pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -333,7 +335,7 @@ pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -341,7 +343,7 @@ pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt, pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -349,7 +351,7 @@ pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, pub fn expand_quote_arm(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -357,7 +359,7 @@ pub fn expand_quote_arm(cx: &mut ExtCtxt, pub fn expand_quote_ty(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -365,7 +367,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt, pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts); base::MacEager::expr(expanded) @@ -373,7 +375,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, pub fn expand_quote_attr(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_attribute_panic", vec!(cx.expr_bool(sp, true)), tts); @@ -383,7 +385,7 @@ pub fn expand_quote_attr(cx: &mut ExtCtxt, pub fn expand_quote_matcher(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box<base::MacResult+'static> { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); @@ -423,6 +425,11 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { vec!(e_str)) } +fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { + let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name)); + cx.expr_path(cx.path_global(sp, idents)) +} + fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) @@ -591,9 +598,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { mk_token_path(cx, sp, name) } -fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> { +fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> { match *tt { - ast::TtToken(sp, SubstNt(ident, _)) => { + TokenTree::Token(sp, SubstNt(ident, _)) => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = @@ -612,17 +619,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a vec!(cx.stmt_expr(e_push)) } - ref tt @ ast::TtToken(_, MatchNt(..)) if !matcher => { + ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { let mut seq = vec![]; for i in 0..tt.len() { seq.push(tt.get_tt(i)); } statements_mk_tts(cx, &seq[..], matcher) } - ast::TtToken(sp, ref tok) => { + TokenTree::Token(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, - mk_ast_path(cx, sp, "TtToken"), + mk_tt_path(cx, sp, "Token"), vec!(e_sp, expr_mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, @@ -631,16 +638,16 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a vec!(e_tok)); vec!(cx.stmt_expr(e_push)) }, - ast::TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() .chain(delimed.tts.iter() .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) .collect() }, - ast::TtSequence(sp, ref seq) => { + TokenTree::Sequence(sp, ref seq) => { if !matcher { - panic!("TtSequence in quote!"); + panic!("TokenTree::Sequence in quote!"); } let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -671,7 +678,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a id_ext("new")], vec![e_seq_struct]); let e_tok = cx.expr_call(sp, - mk_ast_path(cx, sp, "TtSequence"), + mk_tt_path(cx, sp, "Sequence"), vec!(e_sp, e_rc_new)); let e_push = cx.expr_method_call(sp, @@ -683,8 +690,8 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a } } -fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree]) - -> (P<ast::Expr>, Vec<ast::TokenTree>) { +fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) + -> (P<ast::Expr>, Vec<TokenTree>) { // NB: It appears that the main parser loses its mind if we consider // $foo as a SubstNt during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is @@ -746,7 +753,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<P<ast::Stmt>> { vec!(stmt_let_sp, stmt_let_tt) } -fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> { +fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> { let mut ss = Vec::new(); for tt in tts { ss.extend(statements_mk_tt(cx, tt, matcher)); @@ -754,7 +761,7 @@ fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec ss } -fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); @@ -790,7 +797,7 @@ fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, arg_exprs: Vec<P<ast::Expr>> , - tts: &[ast::TokenTree]) -> P<ast::Expr> { + tts: &[TokenTree]) -> P<ast::Expr> { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let cfg_call = || cx.expr_method_call( diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index ab34f41d932d5..628b88d13537a 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::TokenTree; use codemap::Span; use ext::base::ExtCtxt; use ext::base; @@ -18,7 +18,7 @@ use parse::token::keywords; pub fn expand_trace_macros(cx: &mut ExtCtxt, sp: Span, - tt: &[ast::TokenTree]) + tt: &[TokenTree]) -> Box<base::MacResult+'static> { if !cx.ecfg.enable_trace_macros() { feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, @@ -30,10 +30,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, } match (tt.len(), tt.first()) { - (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => { + (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => { cx.set_trace_macros(true); } - (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => { + (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index a4c99018bb9cc..0e69edd7ad14e 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -80,7 +80,6 @@ use self::TokenTreeOrTokenTreeVec::*; use ast; use ast::{TokenTree, Name}; -use ast::{TtDelimited, TtSequence, TtToken}; use codemap::{BytePos, mk_sp, Span}; use codemap; use parse::lexer::*; //resolve bug? @@ -146,16 +145,16 @@ pub struct MatcherPos { pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match elt { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { seq.num_captures } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { count_names(&delim.tts) } - &TtToken(_, MatchNt(..)) => { + &TokenTree::Token(_, MatchNt(..)) => { 1 } - &TtToken(_, _) => 0, + &TokenTree::Token(_, _) => 0, } }) } @@ -205,17 +204,17 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>], ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) { match m { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { for next_m in &seq.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { for next_m in &delim.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtToken(sp, MatchNt(bind_name, _, _, _)) => { + &TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { match ret_val.entry(bind_name.name) { Vacant(spot) => { spot.insert(res[*idx].clone()); @@ -229,8 +228,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) } } } - &TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"), - &TtToken(_, _) => (), + &TokenTree::Token(_, SubstNt(..)) => panic!("Cannot fill in a NT"), + &TokenTree::Token(_, _) => (), } } let mut ret_val = HashMap::new(); @@ -362,7 +361,7 @@ pub fn parse(sess: &ParseSess, } else { match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; @@ -388,10 +387,10 @@ pub fn parse(sess: &ParseSess, match_hi: ei_t.match_cur + seq.num_captures, up: Some(ei_t), sp_lo: sp.lo, - top_elts: Tt(TtSequence(sp, seq)), + top_elts: Tt(TokenTree::Sequence(sp, seq)), })); } - TtToken(_, MatchNt(..)) => { + TokenTree::Token(_, MatchNt(..)) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. match tok { @@ -399,10 +398,10 @@ pub fn parse(sess: &ParseSess, _ => bb_eis.push(ei), } } - TtToken(sp, SubstNt(..)) => { + TokenTree::Token(sp, SubstNt(..)) => { return Error(sp, "missing fragment specifier".to_string()) } - seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; ei.stack.push(MatcherTtFrame { @@ -412,7 +411,7 @@ pub fn parse(sess: &ParseSess, ei.idx = 0; cur_eis.push(ei); } - TtToken(_, ref t) => { + TokenTree::Token(_, ref t) => { let mut ei_t = ei.clone(); if token_name_eq(t,&tok) { ei_t.idx += 1; @@ -440,7 +439,7 @@ pub fn parse(sess: &ParseSess, if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TtToken(_, MatchNt(bind, name, _, _)) => { + TokenTree::Token(_, MatchNt(bind, name, _, _)) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -468,7 +467,7 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TtToken(span, MatchNt(_, ident, _, _)) => { + TokenTree::Token(span, MatchNt(_, ident, _, _)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index a98c001dc0e60..4e5825d182905 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken}; +use ast::{self, TokenTree}; use codemap::{Span, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; @@ -26,6 +26,7 @@ use util::small_vector::SmallVector; use std::cell::RefCell; use std::rc::Rc; +use std::iter::once; struct ParserAnyMacro<'a> { parser: RefCell<Parser<'a>>, @@ -171,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => &delim.tts[..], + TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => panic!(cx.span_fatal(sp, "malformed macro lhs")) }; @@ -182,7 +183,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TtDelimited(_, ref delimed) => delimed.tts.clone(), + TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")), } }, @@ -243,21 +244,21 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); let argument_gram = vec!( - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: vec![ - TtToken(DUMMY_SP, match_lhs_tok), - TtToken(DUMMY_SP, token::FatArrow), - TtToken(DUMMY_SP, match_rhs_tok)], + TokenTree::Token(DUMMY_SP, match_lhs_tok), + TokenTree::Token(DUMMY_SP, token::FatArrow), + TokenTree::Token(DUMMY_SP, match_rhs_tok)], separator: Some(token::Semi), op: ast::OneOrMore, num_captures: 2 })), //to phase into semicolon-termination instead of //semicolon-separation - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { - tts: vec![TtToken(DUMMY_SP, token::Semi)], + tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -307,14 +308,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, } fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { - // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is - // those tts. Or, it can be a "bare sequence", not wrapped in parens. + // lhs is going to be like MatchedNonterminal(NtTT(TokenTree::Delimited(...))), where the + // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { &MatchedNonterminal(NtTT(ref inner)) => match &**inner { - &TtDelimited(_, ref tts) => { + &TokenTree::Delimited(_, ref tts) => { check_matcher(cx, tts.tts.iter(), &Eof); }, - tt @ &TtSequence(..) => { + tt @ &TokenTree::Sequence(..) => { check_matcher(cx, Some(tt).into_iter(), &Eof); }, _ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \ @@ -327,7 +328,7 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { // after parsing/expansion. we can report every error in every macro this way. } -// returns the last token that was checked, for TtSequence. this gets used later on. +// returns the last token that was checked, for TokenTree::Sequence. this gets used later on. fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) -> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> { use print::pprust::token_to_string; @@ -338,7 +339,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) let mut tokens = matcher.peekable(); while let Some(token) = tokens.next() { last = match *token { - TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => { + TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. if can_be_followed_by_any(&frag_spec.name.as_str()) { @@ -346,9 +347,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } else { let next_token = match tokens.peek() { // If T' closes a complex NT, replace T' with F - Some(&&TtToken(_, CloseDelim(_))) => follow.clone(), - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtSequence(sp, _)) => { + Some(&&TokenTree::Token(_, CloseDelim(_))) => follow.clone(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Sequence(sp, _)) => { // Be conservative around sequences: to be // more specific, we would need to // consider FIRST sets, but also the @@ -366,12 +367,16 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) Eof }, // die next iteration - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), // else, we're at the end of the macro or sequence None => follow.clone() }; - let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() }; + let tok = if let TokenTree::Token(_, ref tok) = *token { + tok + } else { + unreachable!() + }; // If T' is in the set FOLLOW(NT), continue. Else, reject. match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) { @@ -391,7 +396,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtSequence(sp, ref seq) => { + TokenTree::Sequence(sp, ref seq) => { // iii. Else, T is a complex NT. match seq.separator { // If T has the form $(...)U+ or $(...)U* for some token U, @@ -408,8 +413,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // but conservatively correct. Some((span, tok)) => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => + delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by \ another sequence repetition, which is not allowed"); @@ -417,7 +423,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) }, None => Eof }; - check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(), + check_matcher(cx, once(&TokenTree::Token(span, tok.clone())), &fol) }, None => last, @@ -428,8 +434,8 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // sequence. If it accepts, continue, else, reject. None => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by another \ sequence repetition, which is not allowed"); @@ -441,11 +447,11 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtToken(..) => { + TokenTree::Token(..) => { // i. If T is not an NT, continue. continue }, - TtDelimited(_, ref tts) => { + TokenTree::Delimited(_, ref tts) => { // if we don't pass in that close delimiter, we'll incorrectly consider the matcher // `{ $foo:ty }` as having a follow that isn't `RBrace` check_matcher(cx, tts.tts.iter(), &tts.close_token()) diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index d1e48eda4ffd2..0fc31f3fd08af 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -10,7 +10,7 @@ use self::LockstepIterSize::*; use ast; -use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name}; +use ast::{TokenTree, Ident, Name}; use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -53,7 +53,7 @@ pub struct TtReader<'a> { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, @@ -67,7 +67,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, /// like any other attribute which consists of `meta` and surrounding #[ ] tokens. /// /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, @@ -78,7 +78,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -151,17 +151,17 @@ impl Add for LockstepIterSize { fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { - TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { seq.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) => + TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => match lookup_cur_matched(r, name) { Some(matched) => match *matched { MatchedNonterminal(_) => LisUnconstrained, @@ -169,7 +169,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { }, _ => LisUnconstrained }, - TtToken(..) => LisUnconstrained, + TokenTree::Token(..) => LisUnconstrained, } } @@ -232,17 +232,17 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - loop { /* because it's easiest, this handles `TtDelimited` not starting - with a `TtToken`, even though it won't happen */ + loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting + with a `TokenTree::Token`, even though it won't happen */ let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. frame.forest.get_tt(frame.idx) }; match t { - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TtSequence(sp, seq.clone()), + match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), r) { LisUnconstrained => { panic!(r.sp_diag.span_fatal( @@ -272,20 +272,20 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { idx: 0, dotdotdoted: true, sep: seq.separator.clone(), - forest: TtSequence(sp, seq), + forest: TokenTree::Sequence(sp, seq), }); } } } // FIXME #2887: think about span stuff here - TtToken(sp, SubstNt(ident, namep)) => { + TokenTree::Token(sp, SubstNt(ident, namep)) => { r.stack.last_mut().unwrap().idx += 1; match lookup_cur_matched(r, ident) { None => { r.cur_span = sp; r.cur_tok = SubstNt(ident, namep); return ret_val; - // this can't be 0 length, just like TtDelimited + // this can't be 0 length, just like TokenTree::Delimited } Some(cur_matched) => { match *cur_matched { @@ -313,8 +313,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - // TtDelimited or any token that can be unzipped - seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => { + // TokenTree::Delimited or any token that can be unzipped + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => { // do not advance the idx yet r.stack.push(TtFrame { forest: seq, @@ -324,15 +324,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { }); // if this could be 0-length, we'd need to potentially recur here } - TtToken(sp, DocComment(name)) if r.desugar_doc_comments => { + TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => { r.stack.push(TtFrame { - forest: TtToken(sp, DocComment(name)), + forest: TokenTree::Token(sp, DocComment(name)), idx: 0, dotdotdoted: false, sep: None }); } - TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { + TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { r.stack.last_mut().unwrap().idx += 1; if r.imported_from.is_some() { @@ -344,7 +344,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // otherwise emit nothing and proceed to the next token } - TtToken(sp, tok) => { + TokenTree::Token(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; r.stack.last_mut().unwrap().idx += 1; diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index d637343de04e8..cb16c95f9a3f4 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -586,10 +586,10 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { - TtToken(span, ref tok) => - TtToken(span, fld.fold_token(tok.clone())), - TtDelimited(span, ref delimed) => { - TtDelimited(span, Rc::new( + TokenTree::Token(span, ref tok) => + TokenTree::Token(span, fld.fold_token(tok.clone())), + TokenTree::Delimited(span, ref delimed) => { + TokenTree::Delimited(span, Rc::new( Delimited { delim: delimed.delim, open_span: delimed.open_span, @@ -598,8 +598,8 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { } )) }, - TtSequence(span, ref seq) => - TtSequence(span, + TokenTree::Sequence(span, ref seq) => + TokenTree::Sequence(span, Rc::new(SequenceRepetition { tts: fld.fold_tts(&seq.tts), separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a468f0d1d98f1..5c0ffb770b77b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -669,7 +669,7 @@ mod tests { use std::rc::Rc; use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION}; use owned_slice::OwnedSlice; - use ast; + use ast::{self, TokenTree}; use abi; use attr::{first_attr_value_str_by_name, AttrMetaMethods}; use parse; @@ -739,10 +739,10 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&ast::TtToken(_, token::Ident(name_macro_rules, token::Plain))), - Some(&ast::TtToken(_, token::Not)), - Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), - Some(&ast::TtDelimited(_, ref macro_delimed)), + Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), + Some(&TokenTree::Token(_, token::Not)), + Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), + Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name.as_str() == "macro_rules" && name_zip.name.as_str() == "zip" => { @@ -750,17 +750,17 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, - Some(&ast::TtDelimited(_, ref first_delimed)), - Some(&ast::TtToken(_, token::FatArrow)), - Some(&ast::TtDelimited(_, ref second_delimed)), + Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Delimited(_, ref second_delimed)), ) if macro_delimed.delim == token::Paren => { let tts = &first_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -770,8 +770,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -790,39 +790,39 @@ mod tests { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - ast::TtToken(sp(0, 2), + TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"), token::IdentStyle::Plain)), - ast::TtToken(sp(3, 4), + TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"), token::IdentStyle::Plain)), - ast::TtDelimited( + TokenTree::Delimited( sp(5, 14), Rc::new(ast::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - ast::TtToken(sp(6, 7), + TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"), token::IdentStyle::Plain)), - ast::TtToken(sp(8, 9), + TokenTree::Token(sp(8, 9), token::Colon), - ast::TtToken(sp(10, 13), + TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"), token::IdentStyle::Plain)), ], close_span: sp(13, 14), })), - ast::TtDelimited( + TokenTree::Delimited( sp(15, 21), Rc::new(ast::Delimited { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - ast::TtToken(sp(17, 18), + TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"), token::IdentStyle::Plain)), - ast::TtToken(sp(18, 19), + TokenTree::Token(sp(18, 19), token::Semi) ], close_span: sp(20, 21), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2401f6be78f9c..907197eb4dfb2 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,7 +48,6 @@ use ast::{StmtExpr, StmtSemi, StmtMac, VariantData, StructField}; use ast::{BiSub, StrStyle}; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef}; -use ast::{TtDelimited, TtSequence, TtToken}; use ast::{Ty, Ty_, TypeBinding, TyMac}; use ast::{TyFixedLengthVec, TyBareFn, TyTypeof, TyInfer}; use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr}; @@ -2428,7 +2427,7 @@ impl<'a> Parser<'a> { )); let (sep, repeat) = try!(self.parse_sep_and_kleene_op()); let name_num = macro_parser::count_names(&seq); - return Ok(TtSequence(mk_sp(sp.lo, seq_span.hi), + return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), Rc::new(SequenceRepetition { tts: seq, separator: sep, @@ -2437,7 +2436,7 @@ impl<'a> Parser<'a> { }))); } else if self.token.is_keyword_allow_following_colon(keywords::Crate) { try!(self.bump()); - return Ok(TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); + return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); } else { sp = mk_sp(sp.lo, self.span.hi); let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; @@ -2459,9 +2458,9 @@ impl<'a> Parser<'a> { sp = mk_sp(sp.lo, self.span.hi); let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; let nt_kind = try!(self.parse_ident()); - Ok(TtToken(sp, MatchNt(name, nt_kind, namep, kindp))) + Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) } else { - Ok(TtToken(sp, SubstNt(name, namep))) + Ok(TokenTree::Token(sp, SubstNt(name, namep))) } } @@ -2509,7 +2508,7 @@ impl<'a> Parser<'a> { /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> PResult<TokenTree> { // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TtSequence's + // parses token trees but also identifies TokenType::Sequence's // and token::SubstNt's; it's too early to know yet // whether something will be a nonterminal or a seq // yet. @@ -2540,7 +2539,7 @@ impl<'a> Parser<'a> { p.parse_unquoted() } _ => { - Ok(TtToken(p.span, try!(p.bump_and_get()))) + Ok(TokenTree::Token(p.span, try!(p.bump_and_get()))) } } } @@ -2579,7 +2578,7 @@ impl<'a> Parser<'a> { // Expand to cover the entire delimited token tree let span = Span { hi: close_span.hi, ..pre_span }; - Ok(TtDelimited(span, Rc::new(Delimited { + Ok(TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, open_span: open_span, tts: tts, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2ab94b41c74a2..fad0b7869f02d 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -11,7 +11,7 @@ pub use self::AnnNode::*; use abi; -use ast; +use ast::{self, TokenTree}; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast_util; use util::parser::AssocOp; @@ -1452,7 +1452,7 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> { match *tt { - ast::TtToken(_, ref tk) => { + TokenTree::Token(_, ref tk) => { try!(word(&mut self.s, &token_to_string(tk))); match *tk { parse::token::DocComment(..) => { @@ -1461,14 +1461,14 @@ impl<'a> State<'a> { _ => Ok(()) } } - ast::TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { try!(word(&mut self.s, &token_to_string(&delimed.open_token()))); try!(space(&mut self.s)); try!(self.print_tts(&delimed.tts)); try!(space(&mut self.s)); word(&mut self.s, &token_to_string(&delimed.close_token())) }, - ast::TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { try!(word(&mut self.s, "$(")); for tt_elt in &seq.tts { try!(self.print_tt(tt_elt)); @@ -1499,9 +1499,9 @@ impl<'a> State<'a> { // There should be no space between the module name and the following `::` in paths, // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701) suppress_space = match tt { - &ast::TtToken(_, token::Ident(_, token::ModName)) | - &ast::TtToken(_, token::MatchNt(_, _, _, token::ModName)) | - &ast::TtToken(_, token::SubstNt(_, token::ModName)) => true, + &TokenTree::Token(_, token::Ident(_, token::ModName)) | + &TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) | + &TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true, _ => false } } diff --git a/src/test/auxiliary/procedural_mbe_matching.rs b/src/test/auxiliary/procedural_mbe_matching.rs index 296d1e431f4ca..a92361b8106d7 100644 --- a/src/test/auxiliary/procedural_mbe_matching.rs +++ b/src/test/auxiliary/procedural_mbe_matching.rs @@ -18,7 +18,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat}; -use syntax::ast::{TokenTree, TtToken, Pat}; +use syntax::ast::{TokenTree, Pat}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs index e6f375354aa3a..3abc6f4a9f5d8 100644 --- a/src/test/auxiliary/roman_numerals.rs +++ b/src/test/auxiliary/roman_numerals.rs @@ -18,7 +18,7 @@ extern crate syntax; extern crate rustc; use syntax::codemap::Span; -use syntax::ast::{TokenTree, TtToken}; +use syntax::ast::TokenTree; use syntax::parse::token; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; // trait for expr_usize @@ -40,7 +40,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TtToken(_, token::Ident(s, _))] => s.to_string(), + [TokenTree::Token(_, token::Ident(s, _))] => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp);