Skip to content

Commit bd981cc

Browse files
committed
Auto merge of #57004 - nnethercote:TS-change-Stream, r=petrochenkov
Make `TokenStream` less recursive. `TokenStream` is currently recursive in *two* ways: - the `TokenTree` variant contains a `ThinTokenStream`, which can contain a `TokenStream`; - the `TokenStream` variant contains a `Vec<TokenStream>`. The latter is not necessary and causes significant complexity. This commit replaces it with the simpler `Vec<(TokenTree, IsJoint)>`. This reduces complexity significantly. In particular, `StreamCursor` is eliminated, and `Cursor` becomes much simpler, consisting now of just a `TokenStream` and an index. The commit also removes the `Extend` impl for `TokenStream`, because it is only used in tests. (The commit also removes those tests.) Overall, the commit reduces the number of lines of code by almost 200.
2 parents 167ceff + e80a930 commit bd981cc

File tree

7 files changed

+148
-341
lines changed

7 files changed

+148
-341
lines changed

src/libsyntax/attr/mod.rs

+5-3
Original file line numberDiff line numberDiff line change
@@ -472,7 +472,7 @@ impl MetaItem {
472472
Token::from_ast_ident(segment.ident)).into());
473473
last_pos = segment.ident.span.hi();
474474
}
475-
idents.push(self.node.tokens(self.span));
475+
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
476476
TokenStream::new(idents)
477477
}
478478

@@ -529,15 +529,17 @@ impl MetaItemKind {
529529
match *self {
530530
MetaItemKind::Word => TokenStream::empty(),
531531
MetaItemKind::NameValue(ref lit) => {
532-
TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
532+
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
533+
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
534+
TokenStream::new(vec)
533535
}
534536
MetaItemKind::List(ref list) => {
535537
let mut tokens = Vec::new();
536538
for (i, item) in list.iter().enumerate() {
537539
if i > 0 {
538540
tokens.push(TokenTree::Token(span, Token::Comma).into());
539541
}
540-
tokens.push(item.node.tokens());
542+
item.node.tokens().append_to_tree_and_joint_vec(&mut tokens);
541543
}
542544
TokenTree::Delimited(
543545
DelimSpan::from_single(span),

src/libsyntax/ext/quote.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ pub mod rt {
233233
self.span, token::Token::from_ast_ident(segment.ident)
234234
).into());
235235
}
236-
inner.push(self.tokens.clone());
236+
self.tokens.clone().append_to_tree_and_joint_vec(&mut inner);
237237

238238
let delim_span = DelimSpan::from_single(self.span);
239239
r.push(TokenTree::Delimited(

src/libsyntax/ext/tt/transcribe.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use fold::noop_fold_tt;
77
use parse::token::{self, Token, NtTT};
88
use smallvec::SmallVec;
99
use syntax_pos::DUMMY_SP;
10-
use tokenstream::{TokenStream, TokenTree, DelimSpan};
10+
use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
1111

1212
use rustc_data_structures::fx::FxHashMap;
1313
use rustc_data_structures::sync::Lrc;
@@ -63,7 +63,7 @@ pub fn transcribe(cx: &ExtCtxt,
6363
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
6464
let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
6565
let mut repeats = Vec::new();
66-
let mut result: Vec<TokenStream> = Vec::new();
66+
let mut result: Vec<TreeAndJoint> = Vec::new();
6767
let mut result_stack = Vec::new();
6868

6969
loop {
@@ -78,7 +78,7 @@ pub fn transcribe(cx: &ExtCtxt,
7878
if let Some(sep) = sep.clone() {
7979
// repeat same span, I guess
8080
let prev_span = match result.last() {
81-
Some(stream) => stream.trees().next().unwrap().span(),
81+
Some((tt, _)) => tt.span(),
8282
None => DUMMY_SP,
8383
};
8484
result.push(TokenTree::Token(prev_span, sep).into());

src/libsyntax/parse/lexer/tokentrees.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use print::pprust::token_to_string;
22
use parse::lexer::StringReader;
33
use parse::{token, PResult};
4-
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree};
4+
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
55

66
impl<'a> StringReader<'a> {
77
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
@@ -33,7 +33,7 @@ impl<'a> StringReader<'a> {
3333
}
3434
}
3535

36-
fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
36+
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
3737
let sm = self.sess.source_map();
3838
match self.token {
3939
token::Eof => {
@@ -156,7 +156,7 @@ impl<'a> StringReader<'a> {
156156
Ok(TokenTree::Delimited(
157157
delim_span,
158158
delim,
159-
tts.into(),
159+
tts.into()
160160
).into())
161161
},
162162
token::CloseDelim(_) => {
@@ -176,7 +176,7 @@ impl<'a> StringReader<'a> {
176176
let raw = self.span_src_raw;
177177
self.real_token();
178178
let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
179-
Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint }))
179+
Ok((tt, if is_joint { Joint } else { NonJoint }))
180180
}
181181
}
182182
}

src/libsyntax/parse/parser.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -2914,7 +2914,7 @@ impl<'a> Parser<'a> {
29142914
TokenTree::Delimited(
29152915
frame.span,
29162916
frame.delim,
2917-
frame.tree_cursor.original_stream().into(),
2917+
frame.tree_cursor.stream.into(),
29182918
)
29192919
},
29202920
token::CloseDelim(_) | token::Eof => unreachable!(),

0 commit comments

Comments
 (0)