diff --git a/Cargo.toml b/Cargo.toml index 4b863833..d1853ca2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "proc-macro2" -version = "0.3.8" # remember to update html_root_url +version = "0.4.0" # remember to update html_root_url authors = ["Alex Crichton "] license = "MIT/Apache-2.0" readme = "README.md" diff --git a/README.md b/README.md index 52fe7fbf..de265776 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ itself. Usage is done via: ```toml [dependencies] -proc-macro2 = "0.3" +proc-macro2 = "0.4" ``` followed by @@ -57,7 +57,7 @@ You can enable this feature via: ```toml [dependencies] -proc-macro2 = { version = "0.3", features = ["nightly"] } +proc-macro2 = { version = "0.4", features = ["nightly"] } ``` diff --git a/src/lib.rs b/src/lib.rs index b7fbb14c..286bf3a0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -43,14 +43,16 @@ //! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.8")] +#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.0")] #![cfg_attr(feature = "nightly", feature(proc_macro))] #[cfg(feature = "proc-macro")] extern crate proc_macro; extern crate unicode_xid; +use std::cmp::Ordering; use std::fmt; +use std::hash::{Hash, Hasher}; use std::iter::FromIterator; use std::marker; use std::rc::Rc; @@ -127,6 +129,12 @@ impl From for proc_macro::TokenStream { } } +impl Extend for TokenStream { + fn extend>(&mut self, streams: I) { + self.inner.extend(streams) + } +} + impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { TokenStream::_new(streams.into_iter().collect()) @@ -284,8 +292,8 @@ impl fmt::Debug for Span { #[derive(Clone)] pub enum TokenTree { Group(Group), - Term(Term), - Op(Op), + Ident(Ident), + Punct(Punct), Literal(Literal), } @@ -293,8 +301,8 @@ impl TokenTree { pub fn span(&self) -> Span { match *self { TokenTree::Group(ref t) => t.span(), - TokenTree::Term(ref t) => t.span(), - TokenTree::Op(ref t) => t.span(), + TokenTree::Ident(ref t) => t.span(), + TokenTree::Punct(ref t) => t.span(), TokenTree::Literal(ref t) => t.span(), } } @@ -302,8 +310,8 @@ impl TokenTree { pub fn set_span(&mut self, span: Span) { match *self { TokenTree::Group(ref mut t) => t.set_span(span), - TokenTree::Term(ref mut t) => t.set_span(span), - TokenTree::Op(ref mut t) => t.set_span(span), + TokenTree::Ident(ref mut t) => t.set_span(span), + TokenTree::Punct(ref mut t) => t.set_span(span), TokenTree::Literal(ref mut t) => t.set_span(span), } } @@ -315,15 +323,15 @@ impl From for TokenTree { } } -impl From for TokenTree { - fn from(g: Term) -> TokenTree { - TokenTree::Term(g) +impl From for TokenTree { + fn from(g: Ident) -> TokenTree { + TokenTree::Ident(g) } } -impl From for TokenTree { - fn from(g: Op) -> TokenTree { - TokenTree::Op(g) +impl From for TokenTree { + fn from(g: Punct) -> TokenTree { + TokenTree::Punct(g) } } @@ -337,8 +345,8 @@ impl fmt::Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { TokenTree::Group(ref t) => t.fmt(f), - TokenTree::Term(ref t) => t.fmt(f), - TokenTree::Op(ref t) => t.fmt(f), + TokenTree::Ident(ref t) => t.fmt(f), + TokenTree::Punct(ref t) => t.fmt(f), TokenTree::Literal(ref t) => t.fmt(f), } } @@ -350,8 +358,8 @@ impl fmt::Debug for TokenTree { // so don't bother with an extra layer of indirection match *self { TokenTree::Group(ref t) => t.fmt(f), - TokenTree::Term(ref t) => t.fmt(f), - TokenTree::Op(ref t) => t.fmt(f), + TokenTree::Ident(ref t) => t.fmt(f), + TokenTree::Punct(ref t) => t.fmt(f), TokenTree::Literal(ref t) => t.fmt(f), } } @@ -415,8 +423,8 @@ impl fmt::Debug for Group { } } -#[derive(Copy, Clone)] -pub struct Op { +#[derive(Clone)] +pub struct Punct { op: char, spacing: Spacing, span: Span, @@ -428,16 +436,16 @@ pub enum Spacing { Joint, } -impl Op { - pub fn new(op: char, spacing: Spacing) -> Op { - Op { +impl Punct { + pub fn new(op: char, spacing: Spacing) -> Punct { + Punct { op: op, spacing: spacing, span: Span::call_site(), } } - pub fn op(&self) -> char { + pub fn as_char(&self) -> char { self.op } @@ -454,15 +462,15 @@ impl Op { } } -impl fmt::Display for Op { +impl fmt::Display for Punct { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.op.fmt(f) } } -impl fmt::Debug for Op { +impl fmt::Debug for Punct { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Op"); + let mut debug = fmt.debug_struct("Punct"); debug.field("op", &self.op); debug.field("spacing", &self.spacing); #[cfg(procmacro2_semver_exempt)] @@ -471,26 +479,31 @@ impl fmt::Debug for Op { } } -#[derive(Copy, Clone)] -pub struct Term { - inner: imp::Term, +#[derive(Clone)] +pub struct Ident { + inner: imp::Ident, _marker: marker::PhantomData>, } -impl Term { - fn _new(inner: imp::Term) -> Term { - Term { +impl Ident { + fn _new(inner: imp::Ident) -> Ident { + Ident { inner: inner, _marker: marker::PhantomData, } } - pub fn new(string: &str, span: Span) -> Term { - Term::_new(imp::Term::new(string, span.inner)) + pub fn new(string: &str, span: Span) -> Ident { + Ident::_new(imp::Ident::new(string, span.inner)) } - pub fn as_str(&self) -> &str { - self.inner.as_str() + #[cfg(procmacro2_semver_exempt)] + pub fn new_raw(string: &str, span: Span) -> Ident { + Ident::_new_raw(string, span) + } + + fn _new_raw(string: &str, span: Span) -> Ident { + Ident::_new(imp::Ident::new_raw(string, span.inner)) } pub fn span(&self) -> Span { @@ -502,13 +515,40 @@ impl Term { } } -impl fmt::Display for Term { +impl PartialEq for Ident { + fn eq(&self, other: &Ident) -> bool{ + self.to_string() == other.to_string() + } +} + +impl Eq for Ident { +} + +impl PartialOrd for Ident { + fn partial_cmp(&self, other: &Ident) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Ident { + fn cmp(&self, other: &Ident) -> Ordering { + self.to_string().cmp(&other.to_string()) + } +} + +impl Hash for Ident { + fn hash(&self, hasher: &mut H) { + self.to_string().hash(hasher) + } +} + +impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_str().fmt(f) + self.inner.fmt(f) } } -impl fmt::Debug for Term { +impl fmt::Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f) } diff --git a/src/stable.rs b/src/stable.rs index 92227931..a718db23 100644 --- a/src/stable.rs +++ b/src/stable.rs @@ -14,7 +14,7 @@ use std::vec; use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult}; use unicode_xid::UnicodeXID; -use {Delimiter, Group, Op, Spacing, TokenTree}; +use {Delimiter, Group, Punct, Spacing, TokenTree}; #[derive(Clone)] pub struct TokenStream { @@ -95,9 +95,9 @@ impl fmt::Display for TokenStream { write!(f, "{} {} {}", start, tt.stream(), end)? } } - TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?, - TokenTree::Op(ref tt) => { - write!(f, "{}", tt.op())?; + TokenTree::Ident(ref tt) => write!(f, "{}", tt)?, + TokenTree::Punct(ref tt) => { + write!(f, "{}", tt.as_char())?; match tt.spacing() { Spacing::Alone => {} Spacing::Joint => joint = true, @@ -156,6 +156,12 @@ impl iter::FromIterator for TokenStream { } } +impl Extend for TokenStream { + fn extend>(&mut self, streams: I) { + self.inner.extend(streams); + } +} + pub type TokenTreeIter = vec::IntoIter; impl IntoIterator for TokenStream { @@ -170,6 +176,7 @@ impl IntoIterator for TokenStream { #[derive(Clone, PartialEq, Eq, Debug)] pub struct FileName(String); +#[allow(dead_code)] pub fn file_name(s: String) -> FileName { FileName(s) } @@ -407,24 +414,34 @@ impl fmt::Debug for Span { } } -#[derive(Copy, Clone)] -pub struct Term { +#[derive(Clone)] +pub struct Ident { intern: usize, span: Span, + raw: bool, } thread_local!(static SYMBOLS: RefCell = RefCell::new(Interner::new())); -impl Term { - pub fn new(string: &str, span: Span) -> Term { +impl Ident { + fn _new(string: &str, raw: bool, span: Span) -> Ident { validate_term(string); - Term { + Ident { intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)), span: span, + raw: raw, } } + pub fn new(string: &str, span: Span) -> Ident { + Ident::_new(string, false, span) + } + + pub fn new_raw(string: &str, span: Span) -> Ident { + Ident::_new(string, true, span) + } + pub fn as_str(&self) -> &str { SYMBOLS.with(|interner| { let interner = interner.borrow(); @@ -443,20 +460,13 @@ impl Term { } fn validate_term(string: &str) { - let validate = if string.starts_with('\'') { - &string[1..] - } else if string.starts_with("r#") { - &string[2..] - } else { - string - }; - + let validate = string; if validate.is_empty() { - panic!("Term is not allowed to be empty; use Option"); + panic!("Ident is not allowed to be empty; use Option"); } if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') { - panic!("Term cannot be a number; use Literal instead"); + panic!("Ident cannot be a number; use Literal instead"); } fn xid_ok(string: &str) -> bool { @@ -474,16 +484,26 @@ fn validate_term(string: &str) { } if !xid_ok(validate) { - panic!("{:?} is not a valid Term", string); + panic!("{:?} is not a valid Ident", string); } } -impl fmt::Debug for Term { +impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut debug = f.debug_struct("Term"); + if self.raw { + "r#".fmt(f)?; + } + self.as_str().fmt(f) + } +} + +impl fmt::Debug for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut debug = f.debug_struct("Ident"); debug.field("sym", &format_args!("{}", self.as_str())); #[cfg(procmacro2_semver_exempt)] debug.field("span", &self.span); + debug.field("raw", &self.raw); debug.finish() } } @@ -713,9 +733,9 @@ named!(token_kind -> TokenTree, alt!( | map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol | - symbol + map!(op, TokenTree::Punct) | - map!(op, TokenTree::Op) + symbol_leading_ws )); named!(group -> Group, alt!( @@ -738,17 +758,14 @@ named!(group -> Group, alt!( ) => { |ts| Group::new(Delimiter::Brace, ::TokenStream::_new_stable(ts)) } )); -fn symbol(mut input: Cursor) -> PResult { - input = skip_whitespace(input); +fn symbol_leading_ws(input: Cursor) -> PResult { + symbol(skip_whitespace(input)) +} +fn symbol(input: Cursor) -> PResult { let mut chars = input.char_indices(); - let lifetime = input.starts_with("'"); - if lifetime { - chars.next(); - } - - let raw = !lifetime && input.starts_with("r#"); + let raw = input.starts_with("r#"); if raw { chars.next(); chars.next(); @@ -768,27 +785,20 @@ fn symbol(mut input: Cursor) -> PResult { } let a = &input.rest[..end]; - if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) { + if a == "r#_" { Err(LexError) } else if a == "_" { - Ok((input.advance(end), Op::new('_', Spacing::Alone).into())) + Ok((input.advance(end), Punct::new('_', Spacing::Alone).into())) } else { - Ok(( - input.advance(end), - ::Term::new(a, ::Span::call_site()).into(), - )) + let ident = if raw { + ::Ident::_new_raw(&a[2..], ::Span::call_site()) + } else { + ::Ident::new(a, ::Span::call_site()) + }; + Ok((input.advance(end), ident.into())) } } -// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs -static KEYWORDS: &'static [&'static str] = &[ - "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do", - "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", - "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure", - "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true", - "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", -]; - fn literal(input: Cursor) -> PResult { let input_no_ws = skip_whitespace(input); @@ -1211,15 +1221,19 @@ fn digits(mut input: Cursor) -> PResult<()> { } } -fn op(input: Cursor) -> PResult { +fn op(input: Cursor) -> PResult { let input = skip_whitespace(input); match op_char(input) { + Ok((rest, '\'')) => { + symbol(rest)?; + Ok((rest, Punct::new('\'', Spacing::Joint))) + } Ok((rest, ch)) => { let kind = match op_char(rest) { Ok(_) => Spacing::Joint, Err(LexError) => Spacing::Alone, }; - Ok((rest, Op::new(ch, kind))) + Ok((rest, Punct::new(ch, kind))) } Err(LexError) => Err(LexError), } @@ -1238,7 +1252,7 @@ fn op_char(input: Cursor) -> PResult { return Err(LexError); } }; - let recognized = "~!@#$%^&*-=+|;:,<.>/?"; + let recognized = "~!@#$%^&*-=+|;:,<.>/?'"; if recognized.contains(first) { Ok((input.advance(first.len_utf8()), first)) } else { @@ -1249,13 +1263,13 @@ fn op_char(input: Cursor) -> PResult { fn doc_comment(input: Cursor) -> PResult> { let mut trees = Vec::new(); let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?; - trees.push(TokenTree::Op(Op::new('#', Spacing::Alone))); + trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone))); if inner { - trees.push(Op::new('!', Spacing::Alone).into()); + trees.push(Punct::new('!', Spacing::Alone).into()); } let mut stream = vec![ - TokenTree::Term(::Term::new("doc", span)), - TokenTree::Op(Op::new('=', Spacing::Alone)), + TokenTree::Ident(::Ident::new("doc", span)), + TokenTree::Punct(Punct::new('=', Spacing::Alone)), TokenTree::Literal(::Literal::string(comment)), ]; for tt in stream.iter_mut() { diff --git a/src/unstable.rs b/src/unstable.rs index d709f889..8026fdad 100644 --- a/src/unstable.rs +++ b/src/unstable.rs @@ -8,7 +8,7 @@ use std::panic; use proc_macro; use stable; -use {Delimiter, Group, Op, Spacing, TokenTree}; +use {Delimiter, Group, Punct, Spacing, TokenTree}; #[derive(Clone)] pub enum TokenStream { @@ -123,16 +123,16 @@ impl From for TokenStream { group.set_span(span.inner.unwrap_nightly()); group.into() } - TokenTree::Op(tt) => { + TokenTree::Punct(tt) => { let spacing = match tt.spacing() { Spacing::Joint => proc_macro::Spacing::Joint, Spacing::Alone => proc_macro::Spacing::Alone, }; - let mut op = proc_macro::Op::new(tt.op(), spacing); + let mut op = proc_macro::Punct::new(tt.as_char(), spacing); op.set_span(tt.span().inner.unwrap_nightly()); op.into() } - TokenTree::Term(tt) => tt.inner.unwrap_nightly().into(), + TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(), TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(), }; TokenStream::Nightly(tt.into()) @@ -157,6 +157,29 @@ impl iter::FromIterator for TokenStream { } } +impl Extend for TokenStream { + fn extend>(&mut self, streams: I) { + match self { + TokenStream::Nightly(tts) => { + *tts = tts.clone() + .into_iter() + .chain( + streams.into_iter() + .map(TokenStream::from) + .flat_map(|t| { + match t { + TokenStream::Nightly(tts) => tts.into_iter(), + _ => panic!() + } + }) + ) + .collect(); + } + TokenStream::Stable(tts) => tts.extend(streams), + } + } +} + impl fmt::Debug for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -225,16 +248,16 @@ impl Iterator for TokenTreeIter { g.set_span(::Span::_new(Span::Nightly(tt.span()))); g.into() } - proc_macro::TokenTree::Op(tt) => { + proc_macro::TokenTree::Punct(tt) => { let spacing = match tt.spacing() { proc_macro::Spacing::Joint => Spacing::Joint, proc_macro::Spacing::Alone => Spacing::Alone, }; - let mut o = Op::new(tt.op(), spacing); + let mut o = Punct::new(tt.as_char(), spacing); o.set_span(::Span::_new(Span::Nightly(tt.span()))); o.into() } - proc_macro::TokenTree::Term(s) => ::Term::_new(Term::Nightly(s)).into(), + proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Nightly(s)).into(), proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Nightly(l)).into(), }) } @@ -434,55 +457,64 @@ impl fmt::Debug for Span { } } -#[derive(Copy, Clone)] -pub enum Term { - Nightly(proc_macro::Term), - Stable(stable::Term), +#[derive(Clone)] +pub enum Ident { + Nightly(proc_macro::Ident), + Stable(stable::Ident), } -impl Term { - pub fn new(string: &str, span: Span) -> Term { +impl Ident { + pub fn new(string: &str, span: Span) -> Ident { match span { - Span::Nightly(s) => Term::Nightly(proc_macro::Term::new(string, s)), - Span::Stable(s) => Term::Stable(stable::Term::new(string, s)), + Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new(string, s)), + Span::Stable(s) => Ident::Stable(stable::Ident::new(string, s)), } } - pub fn as_str(&self) -> &str { - match self { - Term::Nightly(t) => t.as_str(), - Term::Stable(t) => t.as_str(), + pub fn new_raw(string: &str, span: Span) -> Ident { + match span { + Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new_raw(string, s)), + Span::Stable(s) => Ident::Stable(stable::Ident::new_raw(string, s)), } } pub fn span(&self) -> Span { match self { - Term::Nightly(t) => Span::Nightly(t.span()), - Term::Stable(t) => Span::Stable(t.span()), + Ident::Nightly(t) => Span::Nightly(t.span()), + Ident::Stable(t) => Span::Stable(t.span()), } } pub fn set_span(&mut self, span: Span) { match (self, span) { - (Term::Nightly(t), Span::Nightly(s)) => t.set_span(s), - (Term::Stable(t), Span::Stable(s)) => t.set_span(s), + (Ident::Nightly(t), Span::Nightly(s)) => t.set_span(s), + (Ident::Stable(t), Span::Stable(s)) => t.set_span(s), _ => mismatch(), } } - fn unwrap_nightly(self) -> proc_macro::Term { + fn unwrap_nightly(self) -> proc_macro::Ident { + match self { + Ident::Nightly(s) => s, + Ident::Stable(_) => mismatch(), + } + } +} + +impl fmt::Display for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Term::Nightly(s) => s, - Term::Stable(_) => mismatch(), + Ident::Nightly(t) => t.fmt(f), + Ident::Stable(t) => t.fmt(f), } } } -impl fmt::Debug for Term { +impl fmt::Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Term::Nightly(t) => t.fmt(f), - Term::Stable(t) => t.fmt(f), + Ident::Nightly(t) => t.fmt(f), + Ident::Stable(t) => t.fmt(f), } } } diff --git a/tests/test.rs b/tests/test.rs index dde66d4f..a3c53ec9 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -2,84 +2,78 @@ extern crate proc_macro2; use std::str::{self, FromStr}; -use proc_macro2::{Literal, Spacing, Span, Term, TokenStream, TokenTree}; +use proc_macro2::{Literal, Spacing, Span, Ident, TokenStream, TokenTree}; #[test] fn terms() { - assert_eq!(Term::new("String", Span::call_site()).as_str(), "String"); - assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn"); - assert_eq!(Term::new("_", Span::call_site()).as_str(), "_"); + assert_eq!(Ident::new("String", Span::call_site()).to_string(), "String"); + assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); + assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); } #[test] +#[cfg(procmacro2_semver_exempt)] fn raw_terms() { assert_eq!( - Term::new("r#String", Span::call_site()).as_str(), + Ident::new_raw("String", Span::call_site()).to_string(), "r#String" ); - assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn"); - assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_"); + assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); + assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_"); } #[test] -fn lifetimes() { - assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a"); - assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static"); - assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_"); -} - -#[test] -#[should_panic(expected = "Term is not allowed to be empty; use Option")] +#[should_panic(expected = "Ident is not allowed to be empty; use Option")] fn term_empty() { - Term::new("", Span::call_site()); + Ident::new("", Span::call_site()); } #[test] -#[should_panic(expected = "Term cannot be a number; use Literal instead")] +#[should_panic(expected = "Ident cannot be a number; use Literal instead")] fn term_number() { - Term::new("255", Span::call_site()); + Ident::new("255", Span::call_site()); } #[test] -#[should_panic(expected = "\"a#\" is not a valid Term")] +#[should_panic(expected = "\"a#\" is not a valid Ident")] fn term_invalid() { - Term::new("a#", Span::call_site()); + Ident::new("a#", Span::call_site()); } #[test] -#[should_panic(expected = "Term is not allowed to be empty; use Option")] +#[should_panic(expected = "not a valid Ident")] fn raw_term_empty() { - Term::new("r#", Span::call_site()); + Ident::new("r#", Span::call_site()); } #[test] -#[should_panic(expected = "Term cannot be a number; use Literal instead")] +#[should_panic(expected = "not a valid Ident")] fn raw_term_number() { - Term::new("r#255", Span::call_site()); + Ident::new("r#255", Span::call_site()); } #[test] -#[should_panic(expected = "\"r#a#\" is not a valid Term")] +#[should_panic(expected = "\"r#a#\" is not a valid Ident")] fn raw_term_invalid() { - Term::new("r#a#", Span::call_site()); + Ident::new("r#a#", Span::call_site()); } #[test] -#[should_panic(expected = "Term is not allowed to be empty; use Option")] +#[should_panic(expected = "not a valid Ident")] fn lifetime_empty() { - Term::new("'", Span::call_site()); + Ident::new("'", Span::call_site()); } #[test] -#[should_panic(expected = "Term cannot be a number; use Literal instead")] +#[should_panic(expected = "not a valid Ident")] fn lifetime_number() { - Term::new("'255", Span::call_site()); + Ident::new("'255", Span::call_site()); } #[test] -#[should_panic(expected = r#""\'a#" is not a valid Term"#)] +#[should_panic(expected = r#""\'a#" is not a valid Ident"#)] fn lifetime_invalid() { - Term::new("'a#", Span::call_site()); + Ident::new("'a#", Span::call_site()); } #[test] @@ -127,15 +121,14 @@ fn roundtrip() { #[test] fn fail() { fn fail(p: &str) { - if p.parse::().is_ok() { - panic!("should have failed to parse: {}", p); + if let Ok(s) = p.parse::() { + panic!("should have failed to parse: {}\n{:#?}", p, s); } } fail("1x"); fail("1u80"); fail("1f320"); fail("' static"); - fail("'mut"); fail("r#1"); fail("r#_"); } @@ -264,7 +257,7 @@ fn tricky_doc_comment() { let tokens = stream.into_iter().collect::>(); assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens); match tokens[0] { - proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'), + proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'), _ => panic!("wrong token {:?}", tokens[0]), } let mut tokens = match tokens[1] { @@ -276,11 +269,11 @@ fn tricky_doc_comment() { }; match tokens.next().unwrap() { - proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"), + proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"), t => panic!("wrong token {:?}", t), } match tokens.next().unwrap() { - proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='), + proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='), t => panic!("wrong token {:?}", t), } match tokens.next().unwrap() { @@ -300,8 +293,8 @@ fn tricky_doc_comment() { fn op_before_comment() { let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); match tts.next().unwrap() { - TokenTree::Op(tt) => { - assert_eq!(tt.op(), '~'); + TokenTree::Punct(tt) => { + assert_eq!(tt.as_char(), '~'); assert_eq!(tt.spacing(), Spacing::Alone); } wrong => panic!("wrong token {:?}", wrong), @@ -312,7 +305,7 @@ fn op_before_comment() { fn raw_identifier() { let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); match tts.next().unwrap() { - TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()), + TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), wrong => panic!("wrong token {:?}", wrong), } assert!(tts.next().is_none()); @@ -328,10 +321,11 @@ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ - Term { - sym: a + Ident { + sym: a, + raw: false }, - Op { + Punct { op: '+', spacing: Alone }, @@ -349,11 +343,12 @@ TokenStream [ Group { delimiter: Bracket, stream: TokenStream [ - Term { + Ident { sym: a, - span: bytes(2..3) + span: bytes(2..3), + raw: false }, - Op { + Punct { op: '+', spacing: Alone, span: bytes(4..5)