Skip to content

Commit 7ce1fbe

Browse files
committed
Auto merge of #39419 - jseyfried:simplify_tokentree, r=nrc
Simplify `TokenTree` and fix `macro_rules!` bugs This PR - fixes #39390, fixes #39403, and fixes #39404 (each is a [breaking-change], see issues for examples), - fixes #39889, - simplifies and optimizes macro invocation parsing, - cleans up `ext::tt::transcribe`, - removes `tokenstream::TokenTree::Sequence` and `Token::MatchNt`, - instead, adds a new type `ext::tt::quoted::TokenTree` for use by `macro_rules!` (`ext::tt`) - removes `parser.quote_depth` and `parser.parsing_token_tree`, and - removes `quote_matcher!`. - Instead, use `quote_tokens!` and `ext::tt::quoted::parse` the result with `expect_matchers=true`. - I found no outside uses of `quote_matcher!` when searching Rust code on Github. r? @nrc
2 parents 2f52386 + 839398a commit 7ce1fbe

File tree

36 files changed

+762
-766
lines changed

36 files changed

+762
-766
lines changed

src/libproc_macro/lib.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -173,8 +173,7 @@ impl FromStr for TokenStream {
173173
__internal::with_parse_sess(|sess| {
174174
let src = src.to_string();
175175
let name = "<proc-macro source code>".to_string();
176-
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
177-
.map_err(parse_to_lex_err));
176+
let tts = parse::parse_tts_from_source_str(name, src, sess);
178177

179178
Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
180179
})

src/libproc_macro_plugin/qquote.rs

-1
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,6 @@ impl Quote for TokenTree {
119119
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
120120
(quote delimited))
121121
},
122-
_ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
123122
}
124123
}
125124
}

src/librustc/lint/builtin.rs

+7
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,12 @@ declare_lint! {
236236
"detects use of struct constructors that would be invisible with new visibility rules"
237237
}
238238

239+
declare_lint! {
240+
pub MISSING_FRAGMENT_SPECIFIER,
241+
Warn,
242+
"detects missing fragment specifiers in unused `macro_rules!` patterns"
243+
}
244+
239245
declare_lint! {
240246
pub DEPRECATED,
241247
Warn,
@@ -286,6 +292,7 @@ impl LintPass for HardwiredLints {
286292
LEGACY_DIRECTORY_OWNERSHIP,
287293
LEGACY_IMPORTS,
288294
LEGACY_CONSTRUCTOR_VISIBILITY,
295+
MISSING_FRAGMENT_SPECIFIER,
289296
DEPRECATED
290297
)
291298
}

src/librustc_driver/driver.rs

+8
Original file line numberDiff line numberDiff line change
@@ -688,6 +688,14 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
688688

689689
let krate = ecx.monotonic_expander().expand_crate(krate);
690690

691+
let mut missing_fragment_specifiers: Vec<_> =
692+
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
693+
missing_fragment_specifiers.sort();
694+
for span in missing_fragment_specifiers {
695+
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
696+
let msg = "missing fragment specifier".to_string();
697+
sess.add_lint(lint, ast::CRATE_NODE_ID, span, msg);
698+
}
691699
if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count {
692700
ecx.parse_sess.span_diagnostic.abort_if_errors();
693701
}

src/librustc_incremental/calculate_svh/svh_visitor.rs

-24
Original file line numberDiff line numberDiff line change
@@ -1044,26 +1044,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
10441044
self.hash_token_tree(sub_tt);
10451045
}
10461046
}
1047-
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
1048-
hash_span!(self, span);
1049-
let tokenstream::SequenceRepetition {
1050-
ref tts,
1051-
ref separator,
1052-
op,
1053-
num_captures,
1054-
} = **sequence_repetition;
1055-
1056-
tts.len().hash(self.st);
1057-
for sub_tt in tts {
1058-
self.hash_token_tree(sub_tt);
1059-
}
1060-
self.hash_discriminant(separator);
1061-
if let Some(ref separator) = *separator {
1062-
self.hash_token(separator, span);
1063-
}
1064-
op.hash(self.st);
1065-
num_captures.hash(self.st);
1066-
}
10671047
}
10681048
}
10691049

@@ -1129,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
11291109
token::Token::Ident(ident) |
11301110
token::Token::Lifetime(ident) |
11311111
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
1132-
token::Token::MatchNt(ident1, ident2) => {
1133-
ident1.name.as_str().hash(self.st);
1134-
ident2.name.as_str().hash(self.st);
1135-
}
11361112

11371113
token::Token::Interpolated(ref non_terminal) => {
11381114
// FIXME(mw): This could be implemented properly. It's just a

src/librustc_lint/lib.rs

+4
Original file line numberDiff line numberDiff line change
@@ -247,6 +247,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
247247
id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY),
248248
reference: "issue #39207 <https://github.com/rust-lang/rust/issues/39207>",
249249
},
250+
FutureIncompatibleInfo {
251+
id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
252+
reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
253+
},
250254
]);
251255

252256
// Register renamed and removed lints

src/librustc_save_analysis/span_utils.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ impl<'a> SpanUtils<'a> {
287287
let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
288288
let mut prev = toks.next().unwrap();
289289

290-
let first_span = prev.get_span();
290+
let first_span = prev.span();
291291
let mut angle_count = 0;
292292
for tok in toks {
293293
if let TokenTree::Token(_, ref tok) = prev {
@@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> {
305305
continue;
306306
}
307307
if let TokenTree::Token(_, token::Semi) = tok {
308-
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
308+
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
309309
} else if let TokenTree::Delimited(_, ref d) = tok {
310310
if d.delim == token::Brace {
311-
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
311+
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
312312
}
313313
}
314314
prev = tok;

src/librustdoc/html/highlight.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ impl<'a> Classifier<'a> {
315315
token::Lifetime(..) => Class::Lifetime,
316316

317317
token::Underscore | token::Eof | token::Interpolated(..) |
318-
token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
318+
token::SubstNt(..) | token::Tilde | token::At => Class::None,
319319
};
320320

321321
// Anything that didn't return above is the simple case where we the

src/librustdoc/visit_ast.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
211211
};
212212

213213
// FIXME(jseyfried) merge with `self.visit_macro()`
214-
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
214+
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
215215
om.macros.push(Macro {
216216
def_id: def_id,
217217
attrs: def.attrs.clone().into(),
@@ -521,7 +521,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
521521
// convert each exported_macro into a doc item
522522
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
523523
// Extract the spans of all matchers. They represent the "interface" of the macro.
524-
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
524+
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
525525

526526
Macro {
527527
def_id: self.cx.tcx.hir.local_def_id(def.id),

src/libsyntax/ext/quote.rs

+45-105
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,9 @@ use ext::base::ExtCtxt;
1414
use ext::base;
1515
use ext::build::AstBuilder;
1616
use parse::parser::{Parser, PathStyle};
17-
use parse::token::*;
1817
use parse::token;
1918
use ptr::P;
20-
use tokenstream::{self, TokenTree};
19+
use tokenstream::TokenTree;
2120

2221

2322
/// Quasiquoting works via token trees.
@@ -356,14 +355,35 @@ pub mod rt {
356355
}
357356

358357
fn parse_tts(&self, s: String) -> Vec<TokenTree> {
359-
panictry!(parse::parse_tts_from_source_str(
360-
"<quote expansion>".to_string(),
361-
s,
362-
self.parse_sess()))
358+
parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
363359
}
364360
}
365361
}
366362

363+
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
364+
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
365+
use std::rc::Rc;
366+
use tokenstream::Delimited;
367+
368+
let mut results = Vec::new();
369+
let mut result = Vec::new();
370+
for tree in tts {
371+
match tree {
372+
TokenTree::Token(_, token::OpenDelim(..)) => {
373+
results.push(::std::mem::replace(&mut result, Vec::new()));
374+
}
375+
TokenTree::Token(span, token::CloseDelim(delim)) => {
376+
let tree =
377+
TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
378+
result = results.pop().unwrap();
379+
result.push(tree);
380+
}
381+
tree @ _ => result.push(tree),
382+
}
383+
}
384+
result
385+
}
386+
367387
// These panicking parsing functions are used by the quote_*!() syntax extensions,
368388
// but shouldn't be used otherwise.
369389
pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
@@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
510530
base::MacEager::expr(expanded)
511531
}
512532

513-
pub fn expand_quote_matcher(cx: &mut ExtCtxt,
514-
sp: Span,
515-
tts: &[TokenTree])
516-
-> Box<base::MacResult+'static> {
517-
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
518-
let mut vector = mk_stmts_let(cx, sp);
519-
vector.extend(statements_mk_tts(cx, &tts[..], true));
520-
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
521-
let block = cx.expr_block(cx.block(sp, vector));
522-
523-
let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
524-
base::MacEager::expr(expanded)
525-
}
526-
527533
fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
528534
strs.iter().map(|s| ast::Ident::from_str(s)).collect()
529535
}
@@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
669675
vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
670676
}
671677

672-
token::MatchNt(name, kind) => {
673-
return cx.expr_call(sp,
674-
mk_token_path(cx, sp, "MatchNt"),
675-
vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
676-
}
677-
678678
token::Interpolated(_) => panic!("quote! with interpolated token"),
679679

680680
_ => ()
@@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
712712
mk_token_path(cx, sp, name)
713713
}
714714

715-
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
715+
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
716716
match *tt {
717-
TokenTree::Token(sp, SubstNt(ident)) => {
717+
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
718718
// tt.extend($ident.to_tokens(ext_cx))
719719

720720
let e_to_toks =
@@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
733733

734734
vec![cx.stmt_expr(e_push)]
735735
}
736-
ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
737-
let mut seq = vec![];
738-
for i in 0..tt.len() {
739-
seq.push(tt.get_tt(i));
740-
}
741-
statements_mk_tts(cx, &seq[..], matcher)
742-
}
743736
TokenTree::Token(sp, ref tok) => {
744737
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
745738
let e_tok = cx.expr_call(sp,
@@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
753746
vec![cx.stmt_expr(e_push)]
754747
},
755748
TokenTree::Delimited(span, ref delimed) => {
756-
statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
757-
.chain(delimed.tts.iter()
758-
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
759-
.chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
760-
.collect()
761-
},
762-
TokenTree::Sequence(sp, ref seq) => {
763-
if !matcher {
764-
panic!("TokenTree::Sequence in quote!");
765-
}
766-
767-
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
768-
769-
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
770-
let mut tts_stmts = vec![stmt_let_tt];
771-
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
772-
tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
773-
let e_tts = cx.expr_block(cx.block(sp, tts_stmts));
774-
775-
let e_separator = match seq.separator {
776-
Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
777-
None => cx.expr_none(sp),
778-
};
779-
let e_op = match seq.op {
780-
tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
781-
tokenstream::KleeneOp::OneOrMore => "OneOrMore",
782-
};
783-
let e_op_idents = vec![
784-
id_ext("syntax"),
785-
id_ext("tokenstream"),
786-
id_ext("KleeneOp"),
787-
id_ext(e_op),
788-
];
789-
let e_op = cx.expr_path(cx.path_global(sp, e_op_idents));
790-
let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
791-
cx.field_imm(sp, id_ext("separator"), e_separator),
792-
cx.field_imm(sp, id_ext("op"), e_op),
793-
cx.field_imm(sp, id_ext("num_captures"),
794-
cx.expr_usize(sp, seq.num_captures))];
795-
let seq_path = vec![id_ext("syntax"),
796-
id_ext("tokenstream"),
797-
id_ext("SequenceRepetition")];
798-
let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
799-
let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
800-
id_ext("rc"),
801-
id_ext("Rc"),
802-
id_ext("new")],
803-
vec![e_seq_struct]);
804-
let e_tok = cx.expr_call(sp,
805-
mk_tt_path(cx, sp, "Sequence"),
806-
vec![e_sp, e_rc_new]);
807-
let e_push =
808-
cx.expr_method_call(sp,
809-
cx.expr_ident(sp, id_ext("tt")),
810-
id_ext("push"),
811-
vec![e_tok]);
812-
vec![cx.stmt_expr(e_push)]
749+
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
750+
stmts.extend(statements_mk_tts(cx, &delimed.tts));
751+
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
752+
stmts
813753
}
814754
}
815755
}
816756

817757
fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
818758
-> (P<ast::Expr>, Vec<TokenTree>) {
819-
// NB: It appears that the main parser loses its mind if we consider
820-
// $foo as a SubstNt during the main parse, so we have to re-parse
821-
// under quote_depth > 0. This is silly and should go away; the _guess_ is
822-
// it has to do with transition away from supporting old-style macros, so
823-
// try removing it when enough of them are gone.
824-
825759
let mut p = cx.new_parser_from_tts(tts);
826-
p.quote_depth += 1;
827760

828761
let cx_expr = panictry!(p.parse_expr());
829762
if !p.eat(&token::Comma) {
@@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
877810
vec![stmt_let_sp, stmt_let_tt]
878811
}
879812

880-
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> {
813+
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
881814
let mut ss = Vec::new();
815+
let mut quoted = false;
882816
for tt in tts {
883-
ss.extend(statements_mk_tt(cx, tt, matcher));
817+
quoted = match *tt {
818+
TokenTree::Token(_, token::Dollar) if !quoted => true,
819+
_ => {
820+
ss.extend(statements_mk_tt(cx, tt, quoted));
821+
false
822+
}
823+
}
884824
}
885825
ss
886826
}
887827

888-
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree])
889-
-> (P<ast::Expr>, P<ast::Expr>) {
828+
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
890829
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
891830

892831
let mut vector = mk_stmts_let(cx, sp);
893-
vector.extend(statements_mk_tts(cx, &tts[..], false));
832+
vector.extend(statements_mk_tts(cx, &tts[..]));
894833
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
895834
let block = cx.expr_block(cx.block(sp, vector));
835+
let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
896836

897-
(cx_expr, block)
837+
(cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
898838
}
899839

900840
fn expand_wrapper(cx: &ExtCtxt,

0 commit comments

Comments
 (0)