Skip to content

Commit 182bcdb

Browse files
authoredJul 8, 2016
Auto merge of #34575 - cgswords:tstream, r=nrc
Introducing TokenStreams and TokenSlices for procedural macros This pull request introduces TokenStreams and TokenSlices into the compiler in preparation for usage as part of RFC 1566 (procedural macros). r? @nrc
·
1.89.01.12.0
2 parents 2ad5ed0 + 7547596 commit 182bcdb

File tree

1 file changed

+1121
-39
lines changed

1 file changed

+1121
-39
lines changed
 

‎src/libsyntax/tokenstream.rs

Lines changed: 1121 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -8,18 +8,34 @@
88
// option. This file may not be copied, modified, or distributed
99
// except according to those terms.
1010

11-
//! # Token Trees
12-
//! TokenTrees are syntactic forms for dealing with tokens. The description below is
13-
//! more complete; in short a TokenTree is a single token, a delimited sequence of token
14-
//! trees, or a sequence with repetition for list splicing as part of macro expansion.
11+
//! # Token Streams
12+
//!
13+
//! TokenStreams represent syntactic objects before they are converted into ASTs.
14+
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
15+
//! which are themselves either a single Token, a Delimited subsequence of tokens,
16+
//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
17+
//! expansion).
18+
//!
19+
//! A TokenStream also has a slice view, `TokenSlice`, that is analogous to `str` for
20+
//! `String`: it allows the programmer to divvy up, explore, and otherwise partition a
21+
//! TokenStream as borrowed subsequences.
1522
16-
use ast::{AttrStyle};
17-
use codemap::{Span};
23+
use ast::{self, AttrStyle, LitKind};
24+
use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
25+
use codemap::Spanned;
1826
use ext::base;
1927
use ext::tt::macro_parser;
2028
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2129
use parse::lexer;
22-
use parse::token;
30+
use parse;
31+
use parse::token::{self, Token, Lit, InternedString, Nonterminal};
32+
use parse::token::Lit as TokLit;
33+
34+
use std::fmt;
35+
use std::mem;
36+
use std::ops::Index;
37+
use std::ops;
38+
use std::iter::*;
2339

2440
use std::rc::Rc;
2541

@@ -56,6 +72,11 @@ impl Delimited {
5672
pub fn close_tt(&self) -> TokenTree {
5773
TokenTree::Token(self.close_span, self.close_token())
5874
}
75+
76+
/// Returns the token trees inside the delimiters.
77+
pub fn subtrees(&self) -> &[TokenTree] {
78+
&self.tts
79+
}
5980
}
6081

6182
/// A sequence of token trees
@@ -91,17 +112,15 @@ pub enum KleeneOp {
91112
///
92113
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
93114
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
94-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
115+
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
95116
pub enum TokenTree {
96117
/// A single token
97118
Token(Span, token::Token),
98119
/// A delimited sequence of token trees
99120
Delimited(Span, Rc<Delimited>),
100121

101122
// This only makes sense in MBE macros.
102-
103123
/// A kleene-style repetition sequence with a span
104-
// FIXME(eddyb) #12938 Use DST.
105124
Sequence(Span, Rc<SequenceRepetition>),
106125
}
107126

@@ -111,45 +130,42 @@ impl TokenTree {
111130
TokenTree::Token(_, token::DocComment(name)) => {
112131
match doc_comment_style(&name.as_str()) {
113132
AttrStyle::Outer => 2,
114-
AttrStyle::Inner => 3
133+
AttrStyle::Inner => 3,
115134
}
116135
}
117136
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
118137
TokenTree::Token(_, token::MatchNt(..)) => 3,
119-
TokenTree::Delimited(_, ref delimed) => {
120-
delimed.tts.len() + 2
121-
}
122-
TokenTree::Sequence(_, ref seq) => {
123-
seq.tts.len()
124-
}
125-
TokenTree::Token(..) => 0
138+
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
139+
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
140+
TokenTree::Token(..) => 0,
126141
}
127142
}
128143

129144
pub fn get_tt(&self, index: usize) -> TokenTree {
130145
match (self, index) {
131-
(&TokenTree::Token(sp, token::DocComment(_)), 0) => {
132-
TokenTree::Token(sp, token::Pound)
133-
}
146+
(&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
134147
(&TokenTree::Token(sp, token::DocComment(name)), 1)
135-
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
148+
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
136149
TokenTree::Token(sp, token::Not)
137150
}
138151
(&TokenTree::Token(sp, token::DocComment(name)), _) => {
139152
let stripped = strip_doc_comment_decoration(&name.as_str());
140153

141154
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
142155
// required to wrap the text.
143-
let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
144-
*cnt = if x == '"' {
145-
1
146-
} else if *cnt != 0 && x == '#' {
147-
*cnt + 1
148-
} else {
149-
0
150-
};
151-
Some(*cnt)
152-
}).max().unwrap_or(0);
156+
let num_of_hashes = stripped.chars()
157+
.scan(0, |cnt, x| {
158+
*cnt = if x == '"' {
159+
1
160+
} else if *cnt != 0 && x == '#' {
161+
*cnt + 1
162+
} else {
163+
0
164+
};
165+
Some(*cnt)
166+
})
167+
.max()
168+
.unwrap_or(0);
153169

154170
TokenTree::Delimited(sp, Rc::new(Delimited {
155171
delim: token::Bracket,
@@ -181,24 +197,24 @@ impl TokenTree {
181197
TokenTree::Token(sp, token::Ident(kind))];
182198
v[index].clone()
183199
}
184-
(&TokenTree::Sequence(_, ref seq), _) => {
185-
seq.tts[index].clone()
186-
}
187-
_ => panic!("Cannot expand a token tree")
200+
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
201+
_ => panic!("Cannot expand a token tree"),
188202
}
189203
}
190204

191205
/// Returns the `Span` corresponding to this token tree.
192206
pub fn get_span(&self) -> Span {
193207
match *self {
194-
TokenTree::Token(span, _) => span,
208+
TokenTree::Token(span, _) => span,
195209
TokenTree::Delimited(span, _) => span,
196-
TokenTree::Sequence(span, _) => span,
210+
TokenTree::Sequence(span, _) => span,
197211
}
198212
}
199213

200214
/// Use this token tree as a matcher to parse given tts.
201-
pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
215+
pub fn parse(cx: &base::ExtCtxt,
216+
mtch: &[TokenTree],
217+
tts: &[TokenTree])
202218
-> macro_parser::NamedParseResult {
203219
// `None` is because we're not interpolating
204220
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
@@ -208,5 +224,1071 @@ impl TokenTree {
208224
true);
209225
macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
210226
}
227+
228+
/// Check if this TokenTree is equal to the other, regardless of span information.
229+
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
230+
match (self, other) {
231+
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
232+
(&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
233+
(*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
234+
{
235+
for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
236+
if !tt1.eq_unspanned(tt2) {
237+
return false;
238+
}
239+
}
240+
true
241+
}
242+
}
243+
(_, _) => false,
244+
}
245+
}
246+
247+
/// Retrieve the TokenTree's span.
248+
pub fn span(&self) -> Span {
249+
match *self {
250+
TokenTree::Token(sp, _) |
251+
TokenTree::Delimited(sp, _) |
252+
TokenTree::Sequence(sp, _) => sp,
253+
}
254+
}
255+
256+
/// Indicates if the stream is a token that is equal to the provided token.
257+
pub fn eq_token(&self, t: Token) -> bool {
258+
match *self {
259+
TokenTree::Token(_, ref tk) => *tk == t,
260+
_ => false,
261+
}
262+
}
263+
264+
/// Indicates if the token is an identifier.
265+
pub fn is_ident(&self) -> bool {
266+
self.maybe_ident().is_some()
267+
}
268+
269+
/// Returns an identifier.
270+
pub fn maybe_ident(&self) -> Option<ast::Ident> {
271+
match *self {
272+
TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
273+
TokenTree::Delimited(_, ref dl) => {
274+
let tts = dl.subtrees();
275+
if tts.len() != 1 {
276+
return None;
277+
}
278+
tts[0].maybe_ident()
279+
}
280+
_ => None,
281+
}
282+
}
283+
284+
/// Returns a Token literal.
285+
pub fn maybe_lit(&self) -> Option<token::Lit> {
286+
match *self {
287+
TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
288+
TokenTree::Delimited(_, ref dl) => {
289+
let tts = dl.subtrees();
290+
if tts.len() != 1 {
291+
return None;
292+
}
293+
tts[0].maybe_lit()
294+
}
295+
_ => None,
296+
}
297+
}
298+
299+
/// Returns an AST string literal.
300+
pub fn maybe_str(&self) -> Option<ast::Lit> {
301+
match *self {
302+
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
303+
let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
304+
ast::StrStyle::Cooked);
305+
Some(Spanned {
306+
node: l,
307+
span: sp,
308+
})
309+
}
310+
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
311+
let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
312+
ast::StrStyle::Raw(n));
313+
Some(Spanned {
314+
node: l,
315+
span: sp,
316+
})
317+
}
318+
_ => None,
319+
}
320+
}
321+
}
322+
323+
/// #Token Streams
324+
///
325+
/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
326+
/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
327+
/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
328+
/// for backwards compatibility.
329+
330+
/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
331+
/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
332+
/// and may be changed at any time in the future. The operators will not, however (except
333+
/// for signatures, later on).
334+
#[derive(Eq,Clone,Hash,RustcEncodable,RustcDecodable)]
335+
pub struct TokenStream {
336+
pub span: Span,
337+
pub tts: Vec<TokenTree>,
338+
}
339+
340+
impl fmt::Debug for TokenStream {
341+
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
342+
if self.tts.len() == 0 {
343+
write!(f, "([empty")?;
344+
} else {
345+
write!(f, "([")?;
346+
write!(f, "{:?}", self.tts[0])?;
347+
348+
for tt in self.tts.iter().skip(1) {
349+
write!(f, ",{:?}", tt)?;
350+
}
351+
}
352+
write!(f, "|")?;
353+
self.span.fmt(f)?;
354+
write!(f, "])")
355+
}
356+
}
357+
358+
/// Checks if two TokenStreams are equivalent (including spans). For unspanned
359+
/// equality, see `eq_unspanned`.
360+
impl PartialEq<TokenStream> for TokenStream {
361+
fn eq(&self, other: &TokenStream) -> bool {
362+
self.tts == other.tts
363+
}
364+
}
365+
366+
// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
367+
// will be at {2,13}. Without finer-grained span structures, however, this seems to be
368+
// our only recourse.
369+
// FIXME Do something smarter to compute the expansion id.
370+
fn covering_span(trees: &[TokenTree]) -> Span {
371+
// disregard any dummy spans we have
372+
let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
373+
374+
// if we're out of spans, stop
375+
if trees.len() < 1 {
376+
return DUMMY_SP;
377+
}
378+
379+
// set up the initial values
380+
let fst_span = trees[0].span();
381+
382+
let mut lo_span = fst_span.lo;
383+
let mut hi_span = fst_span.hi;
384+
let mut expn_id = fst_span.expn_id;
385+
386+
// compute the spans iteratively
387+
for t in trees.iter().skip(1) {
388+
let sp = t.span();
389+
if sp.lo < lo_span {
390+
lo_span = sp.lo;
391+
}
392+
if hi_span < sp.hi {
393+
hi_span = sp.hi;
394+
}
395+
if expn_id != sp.expn_id {
396+
expn_id = NO_EXPANSION;
397+
}
398+
}
399+
400+
Span {
401+
lo: lo_span,
402+
hi: hi_span,
403+
expn_id: expn_id,
404+
}
405+
}
406+
407+
/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
408+
/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
409+
/// indicating information about the structure of the stream. The `maybe_...` operations
410+
/// return `Some<...>` if the tokenstream contains the appropriate item.
411+
///
412+
/// Similarly, the `maybe_..._prefix` operations potentially return a
413+
/// partially-destructured stream as a pair where the first element is the expected item
414+
/// and the second is the remainder of the stream. As anb example,
415+
///
416+
/// `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
417+
impl TokenStream {
418+
/// Convert a vector of `TokenTree`s into a `TokenStream`.
419+
pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
420+
let span = covering_span(&trees);
421+
TokenStream {
422+
tts: trees,
423+
span: span,
424+
}
425+
}
426+
427+
/// Copies all of the TokenTrees from the TokenSlice, appending them to the stream.
428+
pub fn append_stream(mut self, ts2: &TokenSlice) {
429+
for tt in ts2.iter() {
430+
self.tts.push(tt.clone());
431+
}
432+
self.span = covering_span(&self.tts[..]);
433+
}
434+
435+
/// Manually change a TokenStream's span.
436+
pub fn respan(self, span: Span) -> TokenStream {
437+
TokenStream {
438+
tts: self.tts,
439+
span: span,
440+
}
441+
}
442+
443+
/// Construct a TokenStream from an ast literal.
444+
pub fn from_ast_lit_str(lit: ast::Lit) -> Option<TokenStream> {
445+
match lit.node {
446+
LitKind::Str(val, _) => {
447+
let val = TokLit::Str_(token::intern(&val));
448+
Some(TokenStream::from_tts(vec![TokenTree::Token(lit.span,
449+
Token::Literal(val, None))]))
450+
}
451+
_ => None,
452+
}
453+
454+
}
455+
456+
/// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
457+
pub fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
458+
let new_sp = covering_span(&tts);
459+
460+
let new_delim = Rc::new(Delimited {
461+
delim: token::DelimToken::Paren,
462+
open_span: DUMMY_SP,
463+
tts: tts,
464+
close_span: DUMMY_SP,
465+
});
466+
467+
TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
468+
}
469+
470+
/// Convert an interned string into a one-element TokenStream.
471+
pub fn from_interned_string_as_ident(s: InternedString) -> TokenStream {
472+
TokenStream::from_tts(vec![TokenTree::Token(DUMMY_SP,
473+
Token::Ident(token::str_to_ident(&s[..])))])
474+
}
211475
}
212476

477+
/// TokenSlices are 'views' of `TokenStream's; they fit the same role as `str`s do for
478+
/// `String`s. In general, most TokenStream manipulations will be refocusing their internal
479+
/// contents by taking a TokenSlice and then using indexing and the provided operators.
480+
#[derive(PartialEq, Eq, Debug)]
481+
pub struct TokenSlice([TokenTree]);
482+
483+
impl ops::Deref for TokenStream {
484+
type Target = TokenSlice;
485+
486+
fn deref(&self) -> &TokenSlice {
487+
let tts: &[TokenTree] = &*self.tts;
488+
unsafe { mem::transmute(tts) }
489+
}
490+
}
491+
492+
impl TokenSlice {
493+
/// Convert a borrowed TokenTree slice into a borrowed TokenSlice.
494+
fn from_tts(tts: &[TokenTree]) -> &TokenSlice {
495+
unsafe { mem::transmute(tts) }
496+
}
497+
498+
/// Indicates whether the `TokenStream` is empty.
499+
pub fn is_empty(&self) -> bool {
500+
self.len() == 0
501+
}
502+
503+
/// Return the `TokenSlice`'s length.
504+
pub fn len(&self) -> usize {
505+
self.0.len()
506+
}
507+
508+
/// Check equality versus another TokenStream, ignoring span information.
509+
pub fn eq_unspanned(&self, other: &TokenSlice) -> bool {
510+
if self.len() != other.len() {
511+
return false;
512+
}
513+
for (tt1, tt2) in self.iter().zip(other.iter()) {
514+
if !tt1.eq_unspanned(tt2) {
515+
return false;
516+
}
517+
}
518+
true
519+
}
520+
521+
/// Compute a span that covers the entire TokenSlice (eg, one wide enough to include
522+
/// the entire slice). If the inputs share expansion identification, it is preserved.
523+
/// If they do not, it is discarded.
524+
pub fn covering_span(&self) -> Span {
525+
covering_span(&self.0)
526+
}
527+
528+
/// Indicates where the stream is of the form `= <ts>`, where `<ts>` is a continued
529+
/// `TokenStream`.
530+
pub fn is_assignment(&self) -> bool {
531+
self.maybe_assignment().is_some()
532+
}
533+
534+
/// Returns the RHS of an assigment.
535+
pub fn maybe_assignment(&self) -> Option<&TokenSlice> {
536+
if !(self.len() > 1) {
537+
return None;
538+
}
539+
540+
Some(&self[1..])
541+
}
542+
543+
/// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
544+
/// `{a,b,c}`).
545+
pub fn is_delimited(&self) -> bool {
546+
self.maybe_delimited().is_some()
547+
}
548+
549+
/// Returns the inside of the delimited term as a new TokenStream.
550+
pub fn maybe_delimited(&self) -> Option<&TokenSlice> {
551+
if !(self.len() == 1) {
552+
return None;
553+
}
554+
555+
match self[0] {
556+
TokenTree::Delimited(_, ref rc) => Some(TokenSlice::from_tts(&*rc.tts)),
557+
_ => None,
558+
}
559+
}
560+
561+
/// Returns a list of `TokenSlice`s if the stream is a delimited list, breaking the
562+
/// stream on commas.
563+
pub fn maybe_comma_list(&self) -> Option<Vec<&TokenSlice>> {
564+
let maybe_tts = self.maybe_delimited();
565+
566+
let ts: &TokenSlice;
567+
match maybe_tts {
568+
Some(t) => {
569+
ts = t;
570+
}
571+
None => {
572+
return None;
573+
}
574+
}
575+
576+
let splits: Vec<&TokenSlice> = ts.split(|x| match *x {
577+
TokenTree::Token(_, Token::Comma) => true,
578+
_ => false,
579+
})
580+
.filter(|x| x.len() > 0)
581+
.collect();
582+
583+
Some(splits)
584+
}
585+
586+
/// Returns a Nonterminal if it is Interpolated.
587+
pub fn maybe_interpolated_nonterminal(&self) -> Option<Nonterminal> {
588+
if !(self.len() == 1) {
589+
return None;
590+
}
591+
592+
match self[0] {
593+
TokenTree::Token(_, Token::Interpolated(ref nt)) => Some(nt.clone()),
594+
_ => None,
595+
}
596+
}
597+
598+
/// Indicates if the stream is exactly one identifier.
599+
pub fn is_ident(&self) -> bool {
600+
self.maybe_ident().is_some()
601+
}
602+
603+
/// Returns an identifier
604+
pub fn maybe_ident(&self) -> Option<ast::Ident> {
605+
if !(self.len() == 1) {
606+
return None;
607+
}
608+
609+
let tok = if let Some(tts) = self.maybe_delimited() {
610+
if tts.len() != 1 {
611+
return None;
612+
}
613+
&tts[0]
614+
} else {
615+
&self[0]
616+
};
617+
618+
match *tok {
619+
TokenTree::Token(_, Token::Ident(t)) => Some(t),
620+
_ => None,
621+
}
622+
}
623+
624+
/// Indicates if the stream is exactly one literal
625+
pub fn is_lit(&self) -> bool {
626+
self.maybe_lit().is_some()
627+
}
628+
629+
/// Returns a literal
630+
pub fn maybe_lit(&self) -> Option<token::Lit> {
631+
if !(self.len() == 1) {
632+
return None;
633+
}
634+
635+
let tok = if let Some(tts) = self.maybe_delimited() {
636+
if tts.len() != 1 {
637+
return None;
638+
}
639+
&tts[0]
640+
} else {
641+
&self[0]
642+
};
643+
644+
match *tok {
645+
TokenTree::Token(_, Token::Literal(l, _)) => Some(l),
646+
_ => None,
647+
}
648+
}
649+
650+
/// Returns an AST string literal if the TokenStream is either a normal ('cooked') or
651+
/// raw string literal.
652+
pub fn maybe_str(&self) -> Option<ast::Lit> {
653+
if !(self.len() == 1) {
654+
return None;
655+
}
656+
657+
match self[0] {
658+
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
659+
let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
660+
ast::StrStyle::Cooked);
661+
Some(Spanned {
662+
node: l,
663+
span: sp,
664+
})
665+
}
666+
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
667+
let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
668+
ast::StrStyle::Raw(n));
669+
Some(Spanned {
670+
node: l,
671+
span: sp,
672+
})
673+
}
674+
_ => None,
675+
}
676+
}
677+
678+
/// This operation extracts the path prefix , returning an AST path struct and the remainder
679+
/// of the stream (if it finds one). To be more specific, a tokenstream that has a valid,
680+
/// non-global path as a prefix (eg `foo(bar, baz)`, `foo::bar(bar)`, but *not*
681+
/// `::foo::bar(baz)`) will yield the path and the remaining tokens (as a slice). The previous
682+
/// examples will yield
683+
/// `Some((Path { segments = vec![foo], ... }, [(bar, baz)]))`,
684+
/// `Some((Path { segments = vec![foo, bar] }, [(baz)]))`,
685+
/// and `None`, respectively.
686+
pub fn maybe_path_prefix(&self) -> Option<(ast::Path, &TokenSlice)> {
687+
let mut segments: Vec<ast::PathSegment> = Vec::new();
688+
689+
let path: Vec<&TokenTree> = self.iter()
690+
.take_while(|x| x.is_ident() || x.eq_token(Token::ModSep))
691+
.collect::<Vec<&TokenTree>>();
692+
693+
let path_size = path.len();
694+
if path_size == 0 {
695+
return None;
696+
}
697+
698+
let cov_span = self[..path_size].covering_span();
699+
let rst = &self[path_size..];
700+
701+
let fst_id = path[0];
702+
703+
if let Some(id) = fst_id.maybe_ident() {
704+
segments.push(ast::PathSegment {
705+
identifier: id,
706+
parameters: ast::PathParameters::none(),
707+
});
708+
} else {
709+
return None;
710+
}
711+
712+
// Let's use a state machine to parse out the rest.
713+
enum State {
714+
Mod, // Expect a `::`, or return None otherwise.
715+
Ident, // Expect an ident, or return None otherwise.
716+
}
717+
let mut state = State::Mod;
718+
719+
for p in &path[1..] {
720+
match state {
721+
State::Mod => {
722+
// State 0: ['::' -> state 1, else return None]
723+
if p.eq_token(Token::ModSep) {
724+
state = State::Ident;
725+
} else {
726+
return None;
727+
}
728+
}
729+
State::Ident => {
730+
// State 1: [ident -> state 0, else return None]
731+
if let Some(id) = p.maybe_ident() {
732+
segments.push(ast::PathSegment {
733+
identifier: id,
734+
parameters: ast::PathParameters::none(),
735+
});
736+
state = State::Mod;
737+
} else {
738+
return None;
739+
}
740+
}
741+
}
742+
}
743+
744+
let path = ast::Path {
745+
span: cov_span,
746+
global: false,
747+
segments: segments,
748+
};
749+
Some((path, rst))
750+
}
751+
752+
/// Returns an iterator over a TokenSlice (as a sequence of TokenStreams).
753+
fn iter(&self) -> Iter {
754+
Iter { vs: self }
755+
}
756+
757+
/// Splits a TokenSlice based on the provided `&TokenTree -> bool` predicate.
758+
fn split<P>(&self, pred: P) -> Split<P>
759+
where P: FnMut(&TokenTree) -> bool
760+
{
761+
Split {
762+
vs: self,
763+
pred: pred,
764+
finished: false,
765+
}
766+
}
767+
}
768+
769+
pub struct Iter<'a> {
770+
vs: &'a TokenSlice,
771+
}
772+
773+
impl<'a> Iterator for Iter<'a> {
774+
type Item = &'a TokenTree;
775+
776+
fn next(&mut self) -> Option<&'a TokenTree> {
777+
if self.vs.is_empty() {
778+
return None;
779+
}
780+
781+
let ret = Some(&self.vs[0]);
782+
self.vs = &self.vs[1..];
783+
ret
784+
}
785+
}
786+
787+
pub struct Split<'a, P>
788+
where P: FnMut(&TokenTree) -> bool
789+
{
790+
vs: &'a TokenSlice,
791+
pred: P,
792+
finished: bool,
793+
}
794+
795+
impl<'a, P> Iterator for Split<'a, P>
796+
where P: FnMut(&TokenTree) -> bool
797+
{
798+
type Item = &'a TokenSlice;
799+
800+
fn next(&mut self) -> Option<&'a TokenSlice> {
801+
if self.finished {
802+
return None;
803+
}
804+
805+
match self.vs.iter().position(|x| (self.pred)(x)) {
806+
None => {
807+
self.finished = true;
808+
Some(&self.vs[..])
809+
}
810+
Some(idx) => {
811+
let ret = Some(&self.vs[..idx]);
812+
self.vs = &self.vs[idx + 1..];
813+
ret
814+
}
815+
}
816+
}
817+
}
818+
819+
impl Index<usize> for TokenStream {
820+
type Output = TokenTree;
821+
822+
fn index(&self, index: usize) -> &TokenTree {
823+
Index::index(&**self, index)
824+
}
825+
}
826+
827+
impl ops::Index<ops::Range<usize>> for TokenStream {
828+
type Output = TokenSlice;
829+
830+
fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
831+
Index::index(&**self, index)
832+
}
833+
}
834+
835+
impl ops::Index<ops::RangeTo<usize>> for TokenStream {
836+
type Output = TokenSlice;
837+
838+
fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
839+
Index::index(&**self, index)
840+
}
841+
}
842+
843+
impl ops::Index<ops::RangeFrom<usize>> for TokenStream {
844+
type Output = TokenSlice;
845+
846+
fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
847+
Index::index(&**self, index)
848+
}
849+
}
850+
851+
impl ops::Index<ops::RangeFull> for TokenStream {
852+
type Output = TokenSlice;
853+
854+
fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
855+
Index::index(&**self, _index)
856+
}
857+
}
858+
859+
impl Index<usize> for TokenSlice {
860+
type Output = TokenTree;
861+
862+
fn index(&self, index: usize) -> &TokenTree {
863+
&self.0[index]
864+
}
865+
}
866+
867+
impl ops::Index<ops::Range<usize>> for TokenSlice {
868+
type Output = TokenSlice;
869+
870+
fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
871+
TokenSlice::from_tts(&self.0[index])
872+
}
873+
}
874+
875+
impl ops::Index<ops::RangeTo<usize>> for TokenSlice {
876+
type Output = TokenSlice;
877+
878+
fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
879+
TokenSlice::from_tts(&self.0[index])
880+
}
881+
}
882+
883+
impl ops::Index<ops::RangeFrom<usize>> for TokenSlice {
884+
type Output = TokenSlice;
885+
886+
fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
887+
TokenSlice::from_tts(&self.0[index])
888+
}
889+
}
890+
891+
impl ops::Index<ops::RangeFull> for TokenSlice {
892+
type Output = TokenSlice;
893+
894+
fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
895+
TokenSlice::from_tts(&self.0[_index])
896+
}
897+
}
898+
899+
900+
#[cfg(test)]
901+
mod tests {
902+
use super::*;
903+
use ast;
904+
use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
905+
use parse::token::{self, str_to_ident, Token, Lit};
906+
use util::parser_testing::string_to_tts;
907+
use std::rc::Rc;
908+
909+
fn sp(a: u32, b: u32) -> Span {
910+
Span {
911+
lo: BytePos(a),
912+
hi: BytePos(b),
913+
expn_id: NO_EXPANSION,
914+
}
915+
}
916+
917+
#[test]
918+
fn test_is_empty() {
919+
let test0 = TokenStream::from_tts(Vec::new());
920+
let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
921+
Token::Ident(str_to_ident("a")))]);
922+
let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
923+
924+
assert_eq!(test0.is_empty(), true);
925+
assert_eq!(test1.is_empty(), false);
926+
assert_eq!(test2.is_empty(), false);
927+
}
928+
929+
#[test]
930+
fn test_is_delimited() {
931+
let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
932+
let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
933+
let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
934+
let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
935+
let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
936+
let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
937+
938+
assert_eq!(test0.is_delimited(), false);
939+
assert_eq!(test1.is_delimited(), true);
940+
assert_eq!(test2.is_delimited(), true);
941+
assert_eq!(test3.is_delimited(), false);
942+
assert_eq!(test4.is_delimited(), false);
943+
assert_eq!(test5.is_delimited(), false);
944+
}
945+
946+
#[test]
947+
fn test_is_assign() {
948+
let test0 = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
949+
let test1 = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
950+
let test2 = TokenStream::from_tts(string_to_tts("= 5".to_string()));
951+
let test3 = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
952+
let test4 = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
953+
let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
954+
955+
assert_eq!(test0.is_assignment(), true);
956+
assert_eq!(test1.is_assignment(), true);
957+
assert_eq!(test2.is_assignment(), true);
958+
assert_eq!(test3.is_assignment(), false);
959+
assert_eq!(test4.is_assignment(), true);
960+
assert_eq!(test5.is_assignment(), false);
961+
}
962+
963+
#[test]
964+
fn test_is_lit() {
965+
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
966+
let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
967+
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
968+
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
969+
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
970+
971+
assert_eq!(test0.is_lit(), true);
972+
assert_eq!(test1.is_lit(), true);
973+
assert_eq!(test2.is_lit(), false);
974+
assert_eq!(test3.is_lit(), false);
975+
assert_eq!(test4.is_lit(), false);
976+
}
977+
978+
#[test]
979+
fn test_is_ident() {
980+
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
981+
let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
982+
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
983+
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
984+
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
985+
986+
assert_eq!(test0.is_ident(), false);
987+
assert_eq!(test1.is_ident(), false);
988+
assert_eq!(test2.is_ident(), true);
989+
assert_eq!(test3.is_ident(), false);
990+
assert_eq!(test4.is_ident(), false);
991+
}
992+
993+
#[test]
994+
fn test_maybe_assignment() {
995+
let test0_input = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
996+
let test1_input = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
997+
let test2_input = TokenStream::from_tts(string_to_tts("= 5".to_string()));
998+
let test3_input = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
999+
let test4_input = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
1000+
let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
1001+
1002+
let test0 = test0_input.maybe_assignment();
1003+
let test1 = test1_input.maybe_assignment();
1004+
let test2 = test2_input.maybe_assignment();
1005+
let test3 = test3_input.maybe_assignment();
1006+
let test4 = test4_input.maybe_assignment();
1007+
let test5 = test5_input.maybe_assignment();
1008+
1009+
let test0_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
1010+
token::Ident(str_to_ident("bar"))),
1011+
TokenTree::Token(sp(5, 7), token::ModSep),
1012+
TokenTree::Token(sp(7, 10),
1013+
token::Ident(str_to_ident("baz")))]);
1014+
assert_eq!(test0, Some(&test0_expected[..]));
1015+
1016+
let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
1017+
token::Literal(Lit::Str_(token::intern("5")), None))]);
1018+
assert_eq!(test1, Some(&test1_expected[..]));
1019+
1020+
let test2_expected = TokenStream::from_tts(vec![TokenTree::Token( sp(2,3)
1021+
, token::Literal(
1022+
Lit::Integer(
1023+
token::intern(&(5.to_string()))),
1024+
None))]);
1025+
assert_eq!(test2, Some(&test2_expected[..]));
1026+
1027+
assert_eq!(test3, None);
1028+
1029+
1030+
let test4_tts = vec![TokenTree::Token(sp(3, 6), token::Ident(str_to_ident("foo"))),
1031+
TokenTree::Token(sp(6, 7), token::Comma),
1032+
TokenTree::Token(sp(7, 10), token::Ident(str_to_ident("bar"))),
1033+
TokenTree::Token(sp(10, 11), token::Comma),
1034+
TokenTree::Token(sp(11, 14), token::Ident(str_to_ident("baz")))];
1035+
1036+
let test4_expected = TokenStream::from_tts(vec![TokenTree::Delimited(sp(2, 15),
1037+
Rc::new(Delimited {
1038+
delim: token::DelimToken::Paren,
1039+
open_span: sp(2, 3),
1040+
tts: test4_tts,
1041+
close_span: sp(14, 15),
1042+
}))]);
1043+
assert_eq!(test4, Some(&test4_expected[..]));
1044+
1045+
assert_eq!(test5, None);
1046+
1047+
}
1048+
1049+
#[test]
1050+
fn test_maybe_delimited() {
1051+
let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
1052+
let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
1053+
let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
1054+
let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
1055+
.to_string()));
1056+
let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
1057+
let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
1058+
1059+
let test0 = test0_input.maybe_delimited();
1060+
let test1 = test1_input.maybe_delimited();
1061+
let test2 = test2_input.maybe_delimited();
1062+
let test3 = test3_input.maybe_delimited();
1063+
let test4 = test4_input.maybe_delimited();
1064+
let test5 = test5_input.maybe_delimited();
1065+
1066+
assert_eq!(test0, None);
1067+
1068+
let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1069+
token::Ident(str_to_ident("bar"))),
1070+
TokenTree::Token(sp(4, 6), token::ModSep),
1071+
TokenTree::Token(sp(6, 9),
1072+
token::Ident(str_to_ident("baz")))]);
1073+
assert_eq!(test1, Some(&test1_expected[..]));
1074+
1075+
let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1076+
token::Ident(str_to_ident("foo"))),
1077+
TokenTree::Token(sp(4, 5), token::Comma),
1078+
TokenTree::Token(sp(5, 8),
1079+
token::Ident(str_to_ident("bar"))),
1080+
TokenTree::Token(sp(8, 9), token::Comma),
1081+
TokenTree::Token(sp(9, 12),
1082+
token::Ident(str_to_ident("baz")))]);
1083+
assert_eq!(test2, Some(&test2_expected[..]));
1084+
1085+
assert_eq!(test3, None);
1086+
1087+
assert_eq!(test4, None);
1088+
1089+
assert_eq!(test5, None);
1090+
}
1091+
1092+
#[test]
1093+
fn test_maybe_comma_list() {
1094+
let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
1095+
let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
1096+
let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
1097+
let test3_input = TokenStream::from_tts(string_to_tts("(foo::bar,bar,baz)".to_string()));
1098+
let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
1099+
.to_string()));
1100+
let test5_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
1101+
let test6_input = TokenStream::from_tts(string_to_tts("".to_string()));
1102+
// The following is supported behavior!
1103+
let test7_input = TokenStream::from_tts(string_to_tts("(foo,bar,)".to_string()));
1104+
1105+
let test0 = test0_input.maybe_comma_list();
1106+
let test1 = test1_input.maybe_comma_list();
1107+
let test2 = test2_input.maybe_comma_list();
1108+
let test3 = test3_input.maybe_comma_list();
1109+
let test4 = test4_input.maybe_comma_list();
1110+
let test5 = test5_input.maybe_comma_list();
1111+
let test6 = test6_input.maybe_comma_list();
1112+
let test7 = test7_input.maybe_comma_list();
1113+
1114+
assert_eq!(test0, None);
1115+
1116+
let test1_stream = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1117+
token::Ident(str_to_ident("bar"))),
1118+
TokenTree::Token(sp(4, 6), token::ModSep),
1119+
TokenTree::Token(sp(6, 9),
1120+
token::Ident(str_to_ident("baz")))]);
1121+
1122+
let test1_expected: Vec<&TokenSlice> = vec![&test1_stream[..]];
1123+
assert_eq!(test1, Some(test1_expected));
1124+
1125+
let test2_foo = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1126+
token::Ident(str_to_ident("foo")))]);
1127+
let test2_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(5, 8),
1128+
token::Ident(str_to_ident("bar")))]);
1129+
let test2_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(9, 12),
1130+
token::Ident(str_to_ident("baz")))]);
1131+
let test2_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..], &test2_baz[..]];
1132+
assert_eq!(test2, Some(test2_expected));
1133+
1134+
let test3_path = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1135+
token::Ident(str_to_ident("foo"))),
1136+
TokenTree::Token(sp(4, 6), token::ModSep),
1137+
TokenTree::Token(sp(6, 9),
1138+
token::Ident(str_to_ident("bar")))]);
1139+
let test3_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(10, 13),
1140+
token::Ident(str_to_ident("bar")))]);
1141+
let test3_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(14, 17),
1142+
token::Ident(str_to_ident("baz")))]);
1143+
let test3_expected: Vec<&TokenSlice> =
1144+
vec![&test3_path[..], &test3_bar[..], &test3_baz[..]];
1145+
assert_eq!(test3, Some(test3_expected));
1146+
1147+
assert_eq!(test4, None);
1148+
1149+
assert_eq!(test5, None);
1150+
1151+
assert_eq!(test6, None);
1152+
1153+
1154+
let test7_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..]];
1155+
assert_eq!(test7, Some(test7_expected));
1156+
}
1157+
1158+
// pub fn maybe_ident(&self) -> Option<ast::Ident>
1159+
#[test]
1160+
fn test_maybe_ident() {
1161+
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
1162+
let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
1163+
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
1164+
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
1165+
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
1166+
1167+
assert_eq!(test0, None);
1168+
assert_eq!(test1, None);
1169+
assert_eq!(test2, Some(str_to_ident("foo")));
1170+
assert_eq!(test3, None);
1171+
assert_eq!(test4, None);
1172+
}
1173+
1174+
// pub fn maybe_lit(&self) -> Option<token::Lit>
1175+
#[test]
1176+
fn test_maybe_lit() {
1177+
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_lit();
1178+
let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_lit();
1179+
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_lit();
1180+
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_lit();
1181+
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_lit();
1182+
1183+
assert_eq!(test0, Some(Lit::Str_(token::intern("foo"))));
1184+
assert_eq!(test1, Some(Lit::Integer(token::intern(&(5.to_string())))));
1185+
assert_eq!(test2, None);
1186+
assert_eq!(test3, None);
1187+
assert_eq!(test4, None);
1188+
}
1189+
1190+
#[test]
1191+
fn test_maybe_path_prefix() {
1192+
let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
1193+
let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
1194+
let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
1195+
let test3_input = TokenStream::from_tts(string_to_tts("foo::bar(bar,baz)".to_string()));
1196+
1197+
let test0 = test0_input.maybe_path_prefix();
1198+
let test1 = test1_input.maybe_path_prefix();
1199+
let test2 = test2_input.maybe_path_prefix();
1200+
let test3 = test3_input.maybe_path_prefix();
1201+
1202+
let test0_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
1203+
TokenTree::Token(sp(7, 9), token::ModSep),
1204+
TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("baz")))];
1205+
1206+
let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(3, 13),
1207+
Rc::new(Delimited {
1208+
delim: token::DelimToken::Paren,
1209+
open_span: sp(3, 4),
1210+
tts: test0_tts,
1211+
close_span: sp(12, 13),
1212+
}))]);
1213+
1214+
let test0_expected = Some((ast::Path::from_ident(sp(0, 3), str_to_ident("foo")),
1215+
&test0_stream[..]));
1216+
assert_eq!(test0, test0_expected);
1217+
1218+
assert_eq!(test1, None);
1219+
assert_eq!(test2, None);
1220+
1221+
let test3_path = ast::Path {
1222+
span: sp(0, 8),
1223+
global: false,
1224+
segments: vec![ast::PathSegment {
1225+
identifier: str_to_ident("foo"),
1226+
parameters: ast::PathParameters::none(),
1227+
},
1228+
ast::PathSegment {
1229+
identifier: str_to_ident("bar"),
1230+
parameters: ast::PathParameters::none(),
1231+
}],
1232+
};
1233+
1234+
let test3_tts = vec![TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("bar"))),
1235+
TokenTree::Token(sp(12, 13), token::Comma),
1236+
TokenTree::Token(sp(13, 16), token::Ident(str_to_ident("baz")))];
1237+
1238+
let test3_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(8, 17),
1239+
Rc::new(Delimited {
1240+
delim: token::DelimToken::Paren,
1241+
open_span: sp(8, 9),
1242+
tts: test3_tts,
1243+
close_span: sp(16, 17),
1244+
}))]);
1245+
let test3_expected = Some((test3_path, &test3_stream[..]));
1246+
assert_eq!(test3, test3_expected);
1247+
}
1248+
1249+
#[test]
1250+
fn test_as_paren_delimited_stream() {
1251+
let test0 = TokenStream::as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
1252+
let test1 = TokenStream::as_paren_delimited_stream(string_to_tts("baz(foo,bar)"
1253+
.to_string()));
1254+
1255+
let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
1256+
TokenTree::Token(sp(3, 4), token::Comma),
1257+
TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
1258+
TokenTree::Token(sp(7, 8), token::Comma)];
1259+
let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
1260+
Rc::new(Delimited {
1261+
delim: token::DelimToken::Paren,
1262+
open_span: DUMMY_SP,
1263+
tts: test0_tts,
1264+
close_span: DUMMY_SP,
1265+
}))]);
1266+
1267+
assert_eq!(test0, test0_stream);
1268+
1269+
1270+
let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
1271+
TokenTree::Token(sp(7, 8), token::Comma),
1272+
TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
1273+
1274+
let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
1275+
TokenTree::Delimited(sp(3, 12),
1276+
Rc::new(Delimited {
1277+
delim: token::DelimToken::Paren,
1278+
open_span: sp(3, 4),
1279+
tts: test1_tts,
1280+
close_span: sp(11, 12),
1281+
}))];
1282+
1283+
let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
1284+
Rc::new(Delimited {
1285+
delim: token::DelimToken::Paren,
1286+
open_span: DUMMY_SP,
1287+
tts: test1_parse,
1288+
close_span: DUMMY_SP,
1289+
}))]);
1290+
1291+
assert_eq!(test1, test1_stream);
1292+
}
1293+
1294+
}

0 commit comments

Comments
 (0)
Please sign in to comment.