Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Simplify TokenTree and fix macro_rules! bugs #39419

Merged
merged 10 commits into from
Mar 1, 2017
3 changes: 1 addition & 2 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,7 @@ impl FromStr for TokenStream {
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
.map_err(parse_to_lex_err));
let tts = parse::parse_tts_from_source_str(name, src, sess);

Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
})
Expand Down
1 change: 0 additions & 1 deletion src/libproc_macro_plugin/qquote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ impl Quote for TokenTree {
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
(quote delimited))
},
_ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
}
}
}
Expand Down
7 changes: 7 additions & 0 deletions src/librustc/lint/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,12 @@ declare_lint! {
"detects use of struct constructors that would be invisible with new visibility rules"
}

declare_lint! {
pub MISSING_FRAGMENT_SPECIFIER,
Warn,
"detects missing fragment specifiers in unused `macro_rules!` patterns"
}

declare_lint! {
pub DEPRECATED,
Warn,
Expand Down Expand Up @@ -286,6 +292,7 @@ impl LintPass for HardwiredLints {
LEGACY_DIRECTORY_OWNERSHIP,
LEGACY_IMPORTS,
LEGACY_CONSTRUCTOR_VISIBILITY,
MISSING_FRAGMENT_SPECIFIER,
DEPRECATED
)
}
Expand Down
8 changes: 8 additions & 0 deletions src/librustc_driver/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -688,6 +688,14 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,

let krate = ecx.monotonic_expander().expand_crate(krate);

let mut missing_fragment_specifiers: Vec<_> =
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
missing_fragment_specifiers.sort();
for span in missing_fragment_specifiers {
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
let msg = "missing fragment specifier".to_string();
sess.add_lint(lint, ast::CRATE_NODE_ID, span, msg);
}
if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count {
ecx.parse_sess.span_diagnostic.abort_if_errors();
}
Expand Down
24 changes: 0 additions & 24 deletions src/librustc_incremental/calculate_svh/svh_visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1044,26 +1044,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
self.hash_token_tree(sub_tt);
}
}
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
hash_span!(self, span);
let tokenstream::SequenceRepetition {
ref tts,
ref separator,
op,
num_captures,
} = **sequence_repetition;

tts.len().hash(self.st);
for sub_tt in tts {
self.hash_token_tree(sub_tt);
}
self.hash_discriminant(separator);
if let Some(ref separator) = *separator {
self.hash_token(separator, span);
}
op.hash(self.st);
num_captures.hash(self.st);
}
}
}

Expand Down Expand Up @@ -1129,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
token::Token::Ident(ident) |
token::Token::Lifetime(ident) |
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
token::Token::MatchNt(ident1, ident2) => {
ident1.name.as_str().hash(self.st);
ident2.name.as_str().hash(self.st);
}

token::Token::Interpolated(ref non_terminal) => {
// FIXME(mw): This could be implemented properly. It's just a
Expand Down
4 changes: 4 additions & 0 deletions src/librustc_lint/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY),
reference: "issue #39207 <https://github.com/rust-lang/rust/issues/39207>",
},
FutureIncompatibleInfo {
id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
},
]);

// Register renamed and removed lints
Expand Down
6 changes: 3 additions & 3 deletions src/librustc_save_analysis/span_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ impl<'a> SpanUtils<'a> {
let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
let mut prev = toks.next().unwrap();

let first_span = prev.get_span();
let first_span = prev.span();
let mut angle_count = 0;
for tok in toks {
if let TokenTree::Token(_, ref tok) = prev {
Expand All @@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> {
continue;
}
if let TokenTree::Token(_, token::Semi) = tok {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
} else if let TokenTree::Delimited(_, ref d) = tok {
if d.delim == token::Brace {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
}
}
prev = tok;
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ impl<'a> Classifier<'a> {
token::Lifetime(..) => Class::Lifetime,

token::Underscore | token::Eof | token::Interpolated(..) |
token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
token::SubstNt(..) | token::Tilde | token::At => Class::None,
};

// Anything that didn't return above is the simple case where we the
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/visit_ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
};

// FIXME(jseyfried) merge with `self.visit_macro()`
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
om.macros.push(Macro {
def_id: def_id,
attrs: def.attrs.clone().into(),
Expand Down Expand Up @@ -521,7 +521,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// convert each exported_macro into a doc item
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
// Extract the spans of all matchers. They represent the "interface" of the macro.
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();

Macro {
def_id: self.cx.tcx.hir.local_def_id(def.id),
Expand Down
150 changes: 45 additions & 105 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@ use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::parser::{Parser, PathStyle};
use parse::token::*;
use parse::token;
use ptr::P;
use tokenstream::{self, TokenTree};
use tokenstream::TokenTree;


/// Quasiquoting works via token trees.
Expand Down Expand Up @@ -356,14 +355,35 @@ pub mod rt {
}

fn parse_tts(&self, s: String) -> Vec<TokenTree> {
panictry!(parse::parse_tts_from_source_str(
"<quote expansion>".to_string(),
s,
self.parse_sess()))
parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
}
}
}

// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
use std::rc::Rc;
use tokenstream::Delimited;

let mut results = Vec::new();
let mut result = Vec::new();
for tree in tts {
match tree {
TokenTree::Token(_, token::OpenDelim(..)) => {
results.push(::std::mem::replace(&mut result, Vec::new()));
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
let tree =
TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
result = results.pop().unwrap();
result.push(tree);
}
tree @ _ => result.push(tree),
}
}
result
}

// These panicking parsing functions are used by the quote_*!() syntax extensions,
// but shouldn't be used otherwise.
pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
Expand Down Expand Up @@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
base::MacEager::expr(expanded)
}

pub fn expand_quote_matcher(cx: &mut ExtCtxt,
sp: Span,
tts: &[TokenTree])
-> Box<base::MacResult+'static> {
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], true));
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let block = cx.expr_block(cx.block(sp, vector));

let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
base::MacEager::expr(expanded)
}

fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
strs.iter().map(|s| ast::Ident::from_str(s)).collect()
}
Expand Down Expand Up @@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
}

token::MatchNt(name, kind) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "MatchNt"),
vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
}

token::Interpolated(_) => panic!("quote! with interpolated token"),

_ => ()
Expand Down Expand Up @@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
mk_token_path(cx, sp, name)
}

fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
match *tt {
TokenTree::Token(sp, SubstNt(ident)) => {
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
// tt.extend($ident.to_tokens(ext_cx))

let e_to_toks =
Expand All @@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm

vec![cx.stmt_expr(e_push)]
}
ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
let mut seq = vec![];
for i in 0..tt.len() {
seq.push(tt.get_tt(i));
}
statements_mk_tts(cx, &seq[..], matcher)
}
TokenTree::Token(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp,
Expand All @@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
vec![cx.stmt_expr(e_push)]
},
TokenTree::Delimited(span, ref delimed) => {
statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
.chain(delimed.tts.iter()
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
.chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
.collect()
},
TokenTree::Sequence(sp, ref seq) => {
if !matcher {
panic!("TokenTree::Sequence in quote!");
}

let e_sp = cx.expr_ident(sp, id_ext("_sp"));

let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut tts_stmts = vec![stmt_let_tt];
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let e_tts = cx.expr_block(cx.block(sp, tts_stmts));

let e_separator = match seq.separator {
Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
None => cx.expr_none(sp),
};
let e_op = match seq.op {
tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
tokenstream::KleeneOp::OneOrMore => "OneOrMore",
};
let e_op_idents = vec![
id_ext("syntax"),
id_ext("tokenstream"),
id_ext("KleeneOp"),
id_ext(e_op),
];
let e_op = cx.expr_path(cx.path_global(sp, e_op_idents));
let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
cx.field_imm(sp, id_ext("separator"), e_separator),
cx.field_imm(sp, id_ext("op"), e_op),
cx.field_imm(sp, id_ext("num_captures"),
cx.expr_usize(sp, seq.num_captures))];
let seq_path = vec![id_ext("syntax"),
id_ext("tokenstream"),
id_ext("SequenceRepetition")];
let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
id_ext("rc"),
id_ext("Rc"),
id_ext("new")],
vec![e_seq_struct]);
let e_tok = cx.expr_call(sp,
mk_tt_path(cx, sp, "Sequence"),
vec![e_sp, e_rc_new]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
vec![e_tok]);
vec![cx.stmt_expr(e_push)]
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
stmts.extend(statements_mk_tts(cx, &delimed.tts));
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
stmts
}
}
}

fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
-> (P<ast::Expr>, Vec<TokenTree>) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a SubstNt during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.

let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1;

let cx_expr = panictry!(p.parse_expr());
if !p.eat(&token::Comma) {
Expand Down Expand Up @@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
vec![stmt_let_sp, stmt_let_tt]
}

fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> {
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
let mut ss = Vec::new();
let mut quoted = false;
for tt in tts {
ss.extend(statements_mk_tt(cx, tt, matcher));
quoted = match *tt {
TokenTree::Token(_, token::Dollar) if !quoted => true,
_ => {
ss.extend(statements_mk_tt(cx, tt, quoted));
false
}
}
}
ss
}

fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree])
-> (P<ast::Expr>, P<ast::Expr>) {
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);

let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], false));
vector.extend(statements_mk_tts(cx, &tts[..]));
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let block = cx.expr_block(cx.block(sp, vector));
let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];

(cx_expr, block)
(cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
}

fn expand_wrapper(cx: &ExtCtxt,
Expand Down
Loading