Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit e80a930

Browse files
committedJan 8, 2019
Make TokenStream less recursive.
`TokenStream` is currently recursive in *two* ways: - the `TokenTree` variant contains a `ThinTokenStream`, which can contain a `TokenStream`; - the `TokenStream` variant contains a `Vec<TokenStream>`. The latter is not necessary and causes significant complexity. This commit replaces it with the simpler `Vec<(TokenTree, IsJoint)>`. This reduces complexity significantly. In particular, `StreamCursor` is eliminated, and `Cursor` becomes much simpler, consisting now of just a `TokenStream` and an index. The commit also removes the `Extend` impl for `TokenStream`, because it is only used in tests. (The commit also removes those tests.) Overall, the commit reduces the number of lines of code by almost 200.
1 parent b92552d commit e80a930

File tree

7 files changed

+148
-341
lines changed

7 files changed

+148
-341
lines changed
 

‎src/libsyntax/attr/mod.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -472,7 +472,7 @@ impl MetaItem {
472472
Token::from_ast_ident(segment.ident)).into());
473473
last_pos = segment.ident.span.hi();
474474
}
475-
idents.push(self.node.tokens(self.span));
475+
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
476476
TokenStream::new(idents)
477477
}
478478

@@ -529,15 +529,17 @@ impl MetaItemKind {
529529
match *self {
530530
MetaItemKind::Word => TokenStream::empty(),
531531
MetaItemKind::NameValue(ref lit) => {
532-
TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
532+
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
533+
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
534+
TokenStream::new(vec)
533535
}
534536
MetaItemKind::List(ref list) => {
535537
let mut tokens = Vec::new();
536538
for (i, item) in list.iter().enumerate() {
537539
if i > 0 {
538540
tokens.push(TokenTree::Token(span, Token::Comma).into());
539541
}
540-
tokens.push(item.node.tokens());
542+
item.node.tokens().append_to_tree_and_joint_vec(&mut tokens);
541543
}
542544
TokenTree::Delimited(
543545
DelimSpan::from_single(span),

‎src/libsyntax/ext/quote.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ pub mod rt {
233233
self.span, token::Token::from_ast_ident(segment.ident)
234234
).into());
235235
}
236-
inner.push(self.tokens.clone());
236+
self.tokens.clone().append_to_tree_and_joint_vec(&mut inner);
237237

238238
let delim_span = DelimSpan::from_single(self.span);
239239
r.push(TokenTree::Delimited(

‎src/libsyntax/ext/tt/transcribe.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use fold::noop_fold_tt;
77
use parse::token::{self, Token, NtTT};
88
use smallvec::SmallVec;
99
use syntax_pos::DUMMY_SP;
10-
use tokenstream::{TokenStream, TokenTree, DelimSpan};
10+
use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
1111

1212
use rustc_data_structures::fx::FxHashMap;
1313
use rustc_data_structures::sync::Lrc;
@@ -63,7 +63,7 @@ pub fn transcribe(cx: &ExtCtxt,
6363
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
6464
let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
6565
let mut repeats = Vec::new();
66-
let mut result: Vec<TokenStream> = Vec::new();
66+
let mut result: Vec<TreeAndJoint> = Vec::new();
6767
let mut result_stack = Vec::new();
6868

6969
loop {
@@ -78,7 +78,7 @@ pub fn transcribe(cx: &ExtCtxt,
7878
if let Some(sep) = sep.clone() {
7979
// repeat same span, I guess
8080
let prev_span = match result.last() {
81-
Some(stream) => stream.trees().next().unwrap().span(),
81+
Some((tt, _)) => tt.span(),
8282
None => DUMMY_SP,
8383
};
8484
result.push(TokenTree::Token(prev_span, sep).into());

‎src/libsyntax/parse/lexer/tokentrees.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use print::pprust::token_to_string;
22
use parse::lexer::StringReader;
33
use parse::{token, PResult};
4-
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree};
4+
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
55

66
impl<'a> StringReader<'a> {
77
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
@@ -33,7 +33,7 @@ impl<'a> StringReader<'a> {
3333
}
3434
}
3535

36-
fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
36+
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
3737
let sm = self.sess.source_map();
3838
match self.token {
3939
token::Eof => {
@@ -156,7 +156,7 @@ impl<'a> StringReader<'a> {
156156
Ok(TokenTree::Delimited(
157157
delim_span,
158158
delim,
159-
tts.into(),
159+
tts.into()
160160
).into())
161161
},
162162
token::CloseDelim(_) => {
@@ -176,7 +176,7 @@ impl<'a> StringReader<'a> {
176176
let raw = self.span_src_raw;
177177
self.real_token();
178178
let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
179-
Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint }))
179+
Ok((tt, if is_joint { Joint } else { NonJoint }))
180180
}
181181
}
182182
}

‎src/libsyntax/parse/parser.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2914,7 +2914,7 @@ impl<'a> Parser<'a> {
29142914
TokenTree::Delimited(
29152915
frame.span,
29162916
frame.delim,
2917-
frame.tree_cursor.original_stream().into(),
2917+
frame.tree_cursor.stream.into(),
29182918
)
29192919
},
29202920
token::CloseDelim(_) | token::Eof => unreachable!(),

‎src/libsyntax/tokenstream.rs

Lines changed: 127 additions & 323 deletions
Large diffs are not rendered by default.

‎src/libsyntax_ext/proc_macro_server.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use syntax::ast;
1111
use syntax::ext::base::ExtCtxt;
1212
use syntax::parse::lexer::comments;
1313
use syntax::parse::{self, token, ParseSess};
14-
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream};
14+
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
1515
use syntax_pos::hygiene::{SyntaxContext, Transparency};
1616
use syntax_pos::symbol::{keywords, Symbol};
1717
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
@@ -46,13 +46,14 @@ impl ToInternal<token::DelimToken> for Delimiter {
4646
}
4747
}
4848

49-
impl FromInternal<(TokenStream, &'_ ParseSess, &'_ mut Vec<Self>)>
49+
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
5050
for TokenTree<Group, Punct, Ident, Literal>
5151
{
52-
fn from_internal((stream, sess, stack): (TokenStream, &ParseSess, &mut Vec<Self>)) -> Self {
52+
fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
53+
-> Self {
5354
use syntax::parse::token::*;
5455

55-
let (tree, joint) = stream.as_tree();
56+
let joint = is_joint == Joint;
5657
let (span, token) = match tree {
5758
tokenstream::TokenTree::Delimited(span, delim, tts) => {
5859
let delimiter = Delimiter::from_internal(delim);
@@ -450,7 +451,7 @@ impl server::TokenStreamIter for Rustc<'_> {
450451
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
451452
loop {
452453
let tree = iter.stack.pop().or_else(|| {
453-
let next = iter.cursor.next_as_stream()?;
454+
let next = iter.cursor.next_with_joint()?;
454455
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
455456
})?;
456457
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
@@ -461,7 +462,7 @@ impl server::TokenStreamIter for Rustc<'_> {
461462
// and not doing the roundtrip through AST.
462463
if let TokenTree::Group(ref group) = tree {
463464
if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
464-
iter.cursor.insert(group.stream.clone());
465+
iter.cursor.append(group.stream.clone());
465466
continue;
466467
}
467468
}

0 commit comments

Comments
 (0)
Please sign in to comment.