Skip to content

Improve TokenTree variants #18229

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Oct 28, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/doc/guide-plugin.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ extern crate rustc;

use syntax::codemap::Span;
use syntax::parse::token::{IDENT, get_ident};
use syntax::ast::{TokenTree, TTTok};
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint
use rustc::plugin::Registry;
Expand All @@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
("I", 1)];

let text = match args {
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
Expand Down
62 changes: 47 additions & 15 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ use std::fmt::Show;
use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder};

#[cfg(stage0)]
pub use self::TtToken as TTTok;

// FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name".

Expand Down Expand Up @@ -592,6 +595,28 @@ pub enum CaptureClause {
CaptureByRef,
}

/// A token that delimits a sequence of token trees
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Delimiter {
pub span: Span,
pub token: ::parse::token::Token,
}

impl Delimiter {
/// Convert the delimiter to a `TtToken`
pub fn to_tt(&self) -> TokenTree {
TtToken(self.span, self.token.clone())
}
}

/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub enum KleeneOp {
ZeroOrMore,
OneOrMore,
}

/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can
Expand All @@ -600,32 +625,39 @@ pub enum CaptureClause {
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS "matchers" against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// macro_parser::matched_nonterminals into the TTNonterminals it finds.
/// `macro_parser::matched_nonterminals` into the `TtNonterminal`s it finds.
///
/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
/// The RHS of an MBE macro is the only place a `TtNonterminal` or `TtSequence`
/// makes any real sense. You could write them elsewhere but nothing
/// else knows what to do with them, so you'll probably get a syntax
/// error.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
#[doc="For macro invocations; parsing is delegated to the macro"]
pub enum TokenTree {
/// A single token
TTTok(Span, ::parse::token::Token),
/// A delimited sequence (the delimiters appear as the first
/// and last elements of the vector)
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTDelim(Rc<Vec<TokenTree>>),
TtToken(Span, ::parse::token::Token),
/// A delimited sequence of token trees
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),

// These only make sense for right-hand-sides of MBE macros:

/// A kleene-style repetition sequence with a span, a TTForest,
/// an optional separator, and a boolean where true indicates
/// zero or more (..), and false indicates one or more (+).
/// A Kleene-style repetition sequence with an optional separator.
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),

TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
/// A syntactic variable that will be filled in by macro expansion.
TTNonterminal(Span, Ident)
TtNonterminal(Span, Ident)
}

impl TokenTree {
/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
TtToken(span, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _, _, _) => span,
TtNonterminal(span, _) => span,
}
}
}

// Matchers are nodes defined-by and recognized-by the main rust parser and
Expand Down Expand Up @@ -684,9 +716,9 @@ pub type Matcher = Spanned<Matcher_>;
pub enum Matcher_ {
/// Match one token
MatchTok(::parse::token::Token),
/// Match repetitions of a sequence: body, separator, zero ok?,
/// Match repetitions of a sequence: body, separator, Kleene operator,
/// lo, hi position-in-match-array used:
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, bool, uint, uint),
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
/// Parse a Rust NT: name to bind, name of NT, position in match array:
MatchNonterminal(Ident, Ident, uint)
}
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
[ast::TTTok(_, token::IDENT(code, _))] => code,
[ast::TtToken(_, token::IDENT(code, _))] => code,
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
Expand Down Expand Up @@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TTTok(_, token::IDENT(ref code, _))] => {
[ast::TtToken(_, token::IDENT(ref code, _))] => {
(code, None)
},
[ast::TTTok(_, token::IDENT(ref code, _)),
ast::TTTok(_, token::COMMA),
ast::TTTok(_, token::LIT_STR_RAW(description, _))] => {
[ast::TtToken(_, token::IDENT(ref code, _)),
ast::TtToken(_, token::COMMA),
ast::TtToken(_, token::LIT_STR_RAW(description, _))] => {
(code, Some(description))
}
_ => unreachable!()
Expand All @@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TTTok(_, token::IDENT(ref name, _))] => name,
[ast::TtToken(_, token::IDENT(ref name, _))] => name,
_ => unreachable!()
};

Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
} else {
match tts[0] {
ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => {
return Some(parse::raw_str_lit(ident.as_str()))
}
_ => {
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/concat_idents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 {
match *e {
ast::TTTok(_, token::COMMA) => (),
ast::TtToken(_, token::COMMA) => (),
_ => {
cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::expr(sp);
}
}
} else {
match *e {
ast::TTTok(_, token::IDENT(ident,_)) => {
ast::TtToken(_, token::IDENT(ident,_)) => {
res_str.push_str(token::get_ident(ident).get())
}
_ => {
Expand Down
8 changes: 3 additions & 5 deletions src/libsyntax/ext/log_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,14 @@ use codemap;
use ext::base;
use print;

use std::rc::Rc;

pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
sp: codemap::Span,
tt: &[ast::TokenTree])
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {

cx.print_backtrace();
println!("{}", print::pprust::tt_to_string(&ast::TTDelim(
Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));

println!("{}", print::pprust::tts_to_string(tts));

// any so that `log_syntax` can be invoked as an expression and item.
base::DummyResult::any(sp)
Expand Down
29 changes: 16 additions & 13 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use ptr::P;
*
* This is registered as a set of expression syntax extension called quote!
* that lifts its argument token-tree to an AST representing the
* construction of the same token tree, with ast::TTNonterminal nodes
* construction of the same token tree, with ast::TtNonterminal nodes
* interpreted as antiquotes (splices).
*
*/
Expand Down Expand Up @@ -637,26 +637,29 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
}


fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TTTok(sp, ref tok) => {
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp,
mk_ast_path(cx, sp, "TTTok"),
mk_ast_path(cx, sp, "TtToken"),
vec!(e_sp, mk_token(cx, sp, tok)));
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
vec!(e_tok));
vec!(cx.stmt_expr(e_push))
}

ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
ast::TTSeq(..) => fail!("TTSeq in quote!"),

ast::TTNonterminal(sp, ident) => {

},
ast::TtDelimited(sp, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
mk_tt(cx, sp, &open.to_tt()).into_iter()
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
.collect()
},
ast::TtSequence(..) => fail!("TtSequence in quote!"),
ast::TtNonterminal(sp, ident) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter())

let e_to_toks =
Expand All @@ -674,7 +677,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(e_to_toks));

vec!(cx.stmt_expr(e_push))
}
},
}
}

Expand All @@ -690,7 +693,7 @@ fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (P<ast::Expr>, P<ast::Expr>) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a TTNonterminal during the main parse, so we have to re-parse
// $foo as a TtNonterminal during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/trace_macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
tt: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
match tt {
[ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => {
[ast::TtToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
cx.set_trace_macros(true);
}
[ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => {
[ast::TtToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,9 +323,9 @@ pub fn parse(sess: &ParseSess,
} else {
match ei.elts[idx].node.clone() {
/* need to descend into sequence */
MatchSeq(ref matchers, ref sep, zero_ok,
MatchSeq(ref matchers, ref sep, kleene_op,
match_idx_lo, match_idx_hi) => {
if zero_ok {
if kleene_op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.idx += 1u;
//we specifically matched zero repeats.
Expand Down
30 changes: 12 additions & 18 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
use ast;
use codemap::{Span, Spanned, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, MacroDef};
Expand Down Expand Up @@ -147,13 +147,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
rhses: &[Rc<NamedMatch>])
-> Box<MacResult+'cx> {
if cx.trace_macros() {
println!("{}! {} {} {}",
println!("{}! {{ {} }}",
token::get_ident(name),
"{",
print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter()
.map(|x| (*x).clone())
.collect()))),
"}");
print::pprust::tts_to_string(arg));
}

// Which arm's failure should we report? (the one furthest along)
Expand All @@ -175,15 +171,12 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
// okay, what's your transcriber?
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// cut off delimiters; don't parse 'em
TTDelim(ref tts) => {
(*tts).slice(1u,(*tts).len()-1u)
.iter()
.map(|x| (*x).clone())
.collect()
}
_ => cx.span_fatal(
sp, "macro rhs must be delimited")
// ignore delimiters
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.clone()
},
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
}
},
_ => cx.span_bug(sp, "bad thing in rhs")
Expand Down Expand Up @@ -239,10 +232,11 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
ms(MatchSeq(vec!(
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
ms(MatchTok(FAT_ARROW)),
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)),
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI),
ast::OneOrMore, 0u, 2u)),
//to phase into semicolon-termination instead of
//semicolon-separation
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u)));
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u)));


// Parse the macro_rules! invocation (`none` is for no interpolations):
Expand Down
Loading