diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 584b9455a93ad..00dc3c7e5eee0 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -677,7 +677,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { IdentTT(ref expander, tt_span, allow_internal_unstable) => { if ident.name == keywords::Invalid.name() { self.cx.span_err(path.span, - &format!("macro {}! expects an ident argument", path)); + &format!("macro {}! expects an ident argument", path)); self.cx.trace_macros_diag(); kind.dummy(span) } else { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 71634ada89458..0e731ccf3a93d 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -332,7 +332,8 @@ fn nameize>( } } } - TokenTree::MetaVar(..) | TokenTree::Token(..) => (), + TokenTree::MetaVar(..) | + TokenTree::Token(..) => (), } Ok(()) @@ -540,7 +541,7 @@ fn inner_parse_loop( // // At the beginning of the loop, if we reach the end of the delimited submatcher, // we pop the stack to backtrack out of the descent. - seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..), _) => { let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let idx = item.idx; item.stack.push(MatcherTtFrame { @@ -552,7 +553,7 @@ fn inner_parse_loop( } // We just matched a normal token. We can just advance the parser. - TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + TokenTree::Token(_, ref t, _) if token_name_eq(t, token) => { item.idx += 1; next_items.push(item); } @@ -561,7 +562,8 @@ fn inner_parse_loop( // rules. NOTE that this is not necessarily an error unless _all_ items in // `cur_items` end up doing this. There may still be some other matchers that do // end up working out. - TokenTree::Token(..) | TokenTree::MetaVar(..) => {} + TokenTree::Token(..) | + TokenTree::MetaVar(..) => {} } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index ffe68289d5224..a29d1dd0e5f8b 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, }; let rhs_spans = rhs.iter().map(|t| t.span()).collect::>(); - // rhs has holes ( `$id` and `$(...)` that need filled) + // rhs has holes ( `$id` and `$(...)` which need to be filled) let mut tts = transcribe(cx, Some(named_matches), rhs); // Replace all the tokens for the corresponding positions in the macro, to maintain @@ -202,7 +202,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::Token(DUMMY_SP, token::FatArrow, quoted::TokenHygiene::DefSite), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), @@ -211,7 +211,8 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi, + quoted::TokenHygiene::DefSite)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 @@ -237,7 +238,12 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { - let tt = quoted::parse(tt.clone().into(), true, sess, features, &def.attrs) + let tt = quoted::parse(tt.clone().into(), + false, + true, + sess, + features, + &def.attrs) .pop().unwrap(); valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt); return tt; @@ -254,7 +260,12 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { - return quoted::parse(tt.clone().into(), false, sess, features, &def.attrs) + return quoted::parse(tt.clone().into(), + !body.legacy && features.macro_hygiene_optout, + false, + sess, + features, + &def.attrs) .pop().unwrap(); } } @@ -328,7 +339,9 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use self::quoted::TokenTree; for tt in tts { match *tt { - TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (), + TokenTree::Token(..) | + TokenTree::MetaVar(..) | + TokenTree::MetaVarDecl(..) => (), TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { return false; }, @@ -410,7 +423,9 @@ impl FirstSets { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { - TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | + TokenTree::MetaVar(..) | + TokenTree::MetaVarDecl(..) => { first.replace_with(tt.clone()); } TokenTree::Delimited(span, ref delimited) => { @@ -440,7 +455,8 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp, sep.clone())); + first.add_one_maybe(TokenTree::Token(sp, sep.clone(), + quoted::TokenHygiene::DefSite)); } // Reverse scan: Sequence comes before `first`. @@ -470,7 +486,9 @@ impl FirstSets { for tt in tts.iter() { assert!(first.maybe_empty); match *tt { - TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | + TokenTree::MetaVar(..) | + TokenTree::MetaVarDecl(..) => { first.add_one(tt.clone()); return first; } @@ -487,7 +505,8 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp, sep.clone())); + first.add_one_maybe(TokenTree::Token(sp, sep.clone(), + quoted::TokenHygiene::DefSite)); } assert!(first.maybe_empty); @@ -641,7 +660,9 @@ fn check_matcher_core(sess: &ParseSess, // First, update `last` so that it corresponds to the set // of NT tokens that might end the sequence `... token`. match *token { - TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | + TokenTree::MetaVar(..) | + TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); @@ -692,7 +713,8 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::Token(sp, u.clone())); + new.add_one_maybe(TokenTree::Token(sp, u.clone(), + quoted::TokenHygiene::DefSite)); &new } else { &suffix_first @@ -805,7 +827,7 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool { fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { use self::quoted::TokenTree; - if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { + if let TokenTree::Token(_, token::CloseDelim(_), _) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. Ok(true) @@ -822,14 +844,14 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result match *tok { - TokenTree::Token(_, ref tok) => match *tok { + TokenTree::Token(_, ref tok, _) => match *tok { FatArrow | Comma | Semi => Ok(true), _ => Ok(false) }, _ => Ok(false), }, "pat" => match *tok { - TokenTree::Token(_, ref tok) => match *tok { + TokenTree::Token(_, ref tok, _) => match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true), _ => Ok(false) @@ -837,7 +859,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result Ok(false), }, "path" | "ty" => match *tok { - TokenTree::Token(_, ref tok) => match *tok { + TokenTree::Token(_, ref tok, _) => match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true), @@ -858,7 +880,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { // Explicitly disallow `priv`, on the off chance it comes back. match *tok { - TokenTree::Token(_, ref tok) => match *tok { + TokenTree::Token(_, ref tok, _) => match *tok { Comma => Ok(true), Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true), ref tok => Ok(tok.can_begin_type()) @@ -931,8 +953,8 @@ fn is_legal_fragment_specifier(sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { - quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), - quoted::TokenTree::MetaVar(_, name) => format!("${}", name), + quoted::TokenTree::Token(_, ref tok, _) => ::print::pprust::token_to_string(tok), + quoted::TokenTree::MetaVar(_, name, _) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ in follow set checker"), diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 77c6afa1c64a6..147c7e2f8af96 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -46,7 +46,7 @@ impl Delimited { } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; - TokenTree::Token(open_span, self.open_token()) + TokenTree::Token(open_span, self.open_token(), TokenHygiene::DefSite) } /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter. @@ -56,7 +56,7 @@ impl Delimited { } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; - TokenTree::Token(close_span, self.close_token()) + TokenTree::Token(close_span, self.close_token(), TokenHygiene::DefSite) } } @@ -80,6 +80,7 @@ pub enum KleeneOp { ZeroOrMore, /// Kleene plus (`+`) for one or more repetitions OneOrMore, + /// Question mark (`?`) for zero or one repetitions ZeroOrOne, } @@ -87,12 +88,12 @@ pub enum KleeneOp { /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum TokenTree { - Token(Span, token::Token), + Token(Span, token::Token, TokenHygiene), Delimited(Span, Lrc), - /// A kleene-style repetition sequence + /// A Kleene-style repetition sequence Sequence(Span, Lrc), /// E.g. `$var` - MetaVar(Span, ast::Ident), + MetaVar(Span, ast::Ident, TokenHygiene), /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. MetaVarDecl( Span, @@ -150,15 +151,22 @@ impl TokenTree { /// Retrieve the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) - | TokenTree::MetaVar(sp, _) - | TokenTree::MetaVarDecl(sp, _, _) - | TokenTree::Delimited(sp, _) - | TokenTree::Sequence(sp, _) => sp, + TokenTree::Token(sp, _, _) | + TokenTree::MetaVar(sp, _, _) | + TokenTree::MetaVarDecl(sp, _, _) | + TokenTree::Delimited(sp, _) | + TokenTree::Sequence(sp, _) => sp, } } } +/// Syntaxt context to apply to a token when invoking a macro. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub enum TokenHygiene { + DefSite, + CallSite, +} + /// Takes a `tokenstream::TokenStream` and returns a `Vec`. Specifically, this /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a /// collection of `TokenTree` for use in parsing a macro. @@ -180,6 +188,7 @@ impl TokenTree { /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. pub fn parse( input: tokenstream::TokenStream, + hygiene_optout: bool, expect_matchers: bool, sess: &ParseSess, features: &Features, @@ -194,9 +203,15 @@ pub fn parse( while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut trees, expect_matchers, sess, features, attrs); + let tree = parse_tree(tree, + &mut trees, + hygiene_optout, + expect_matchers, + sess, + features, + attrs); match tree { - TokenTree::MetaVar(start_sp, ident) if expect_matchers => { + TokenTree::MetaVar(start_sp, ident, _) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { @@ -248,6 +263,7 @@ pub fn parse( fn parse_tree( tree: tokenstream::TokenTree, trees: &mut Peekable, + hygiene_optout: bool, expect_matchers: bool, sess: &ParseSess, features: &Features, @@ -258,63 +274,58 @@ where { // Depending on what `tree` is, we could be parsing different parts of a macro match tree { - // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { - // `tree` is followed by a delimited set of token trees. This indicates the beginning - // of a repetition sequence in the macro (e.g. `$(pat)*`). - Some(tokenstream::TokenTree::Delimited(span, delimited)) => { - // Must have `(` not `{` or `[` - if delimited.delim != token::Paren { - let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); - let msg = format!("expected `(`, found `{}`", tok); - sess.span_diagnostic.span_err(span, &msg); + // `tree` is `#` token and hygiene opt-out syntax is on. Look at the next token in `trees`. + tokenstream::TokenTree::Token(span, token::Pound) if hygiene_optout => match trees.peek() { + Some(tokenstream::TokenTree::Token(_, token::Dollar)) => { + if let tokenstream::TokenTree::Token(span, token::Dollar) = trees.next().unwrap() { + parse_meta_var(span, + TokenHygiene::CallSite, + trees, + hygiene_optout, + expect_matchers, + sess, + features, + attrs) + } else { + unreachable!(); } - // Parse the contents of the sequence itself - let sequence = parse(delimited.tts.into(), expect_matchers, sess, features, attrs); - // Get the Kleene operator and optional separator - let (separator, op) = parse_sep_and_kleene_op(trees, span, sess, features, attrs); - // Count the number of captured "names" (i.e. named metavars) - let name_captures = macro_parser::count_names(&sequence); - TokenTree::Sequence( - span, - Lrc::new(SequenceRepetition { - tts: sequence, - separator, - op, - num_captures: name_captures, - }), - ) } - // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special - // metavariable that names the crate of the invokation. - Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { - let (ident, is_raw) = token.ident().unwrap(); - let span = ident_span.with_lo(span.lo()); - if ident.name == keywords::Crate.name() && !is_raw { - let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span); - TokenTree::Token(span, token::Ident(ident, is_raw)) + Some(tokenstream::TokenTree::Token(_, token::Ident(..))) => { + if let tokenstream::TokenTree::Token(span, tok @ token::Ident(..)) = + trees.next().unwrap() { + TokenTree::Token(span, tok, TokenHygiene::CallSite) } else { - TokenTree::MetaVar(span, ident) + unreachable!(); } } - // `tree` is followed by a random token. This is an error. - Some(tokenstream::TokenTree::Token(span, tok)) => { - let msg = format!( - "expected identifier, found `{}`", - pprust::token_to_string(&tok) - ); - sess.span_diagnostic.span_err(span, &msg); - TokenTree::MetaVar(span, keywords::Invalid.ident()) + Some(tokenstream::TokenTree::Token(_, token::Lifetime(..))) => { + if let tokenstream::TokenTree::Token(span, tok @ token::Lifetime(..)) = + trees.next().unwrap() { + TokenTree::Token(span, tok, TokenHygiene::CallSite) + } else { + unreachable!(); + } } - // There are no more tokens. Just return the `$` we already have. - None => TokenTree::Token(span, token::Dollar), - }, + _ => TokenTree::Token(span, token::Pound, TokenHygiene::DefSite), + } + + // `tree` is a `$` token. Look at the next token in `trees`. + tokenstream::TokenTree::Token(span, token::Dollar) => + parse_meta_var(span, + TokenHygiene::DefSite, + trees, + hygiene_optout, + expect_matchers, + sess, + features, + attrs), // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Token(span, tok) => + TokenTree::Token(span, tok, TokenHygiene::DefSite), // `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -322,12 +333,93 @@ where span, Lrc::new(Delimited { delim: delimited.delim, - tts: parse(delimited.tts.into(), expect_matchers, sess, features, attrs), + tts: parse(delimited.tts.into(), + hygiene_optout, + expect_matchers, + sess, + features, + attrs), }), ), } } +/// Attempt to parse a single meta variable or meta variable sequence. +fn parse_meta_var( + span: Span, + token_hygiene: TokenHygiene, + trees: &mut Peekable, + hygiene_optout: bool, + expect_matchers: bool, + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], +) -> TokenTree +where + I: Iterator, +{ + match trees.next() { + // `tree` is followed by a delimited set of token trees. This indicates the beginning + // of a repetition sequence in the macro (e.g. `$(pat)*`). + Some(tokenstream::TokenTree::Delimited(span, delimited)) => { + // Must have `(` not `{` or `[` + if delimited.delim != token::Paren { + let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); + let msg = format!("expected `(`, found `{}`", tok); + sess.span_diagnostic.span_err(span, &msg); + } + // Parse the contents of the sequence itself + let sequence = parse( + delimited.tts.into(), + hygiene_optout, + expect_matchers, + sess, + features, + attrs + ); + // Get the Kleene operator and optional separator + let (separator, op) = parse_sep_and_kleene_op(trees, span, sess, features, attrs); + // Count the number of captured "names" (i.e. named metavars) + let name_captures = macro_parser::count_names(&sequence); + TokenTree::Sequence( + span, + Lrc::new(SequenceRepetition { + tts: sequence, + separator, + op, + num_captures: name_captures, + }), + ) + } + + // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special + // metavariable that names the crate of the invokation. + Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { + let (ident, is_raw) = token.ident().unwrap(); + let span = ident_span.with_lo(span.lo()); + if ident.name == keywords::Crate.name() && !is_raw { + let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span); + TokenTree::Token(span, token::Ident(ident, is_raw), token_hygiene) + } else { + TokenTree::MetaVar(span, ident, token_hygiene) + } + } + + // `tree` is followed by an arbitrary token. This is an error. + Some(tokenstream::TokenTree::Token(span, tok)) => { + let msg = format!( + "expected identifier, found `{}`", + pprust::token_to_string(&tok) + ); + sess.span_diagnostic.span_err(span, &msg); + TokenTree::MetaVar(span, keywords::Invalid.ident(), token_hygiene) + } + + // There are no more tokens. Just return the `$` we already have. + None => TokenTree::Token(span, token::Dollar, TokenHygiene::DefSite), + } +} + /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return /// `None`. fn kleene_op(token: &token::Token) -> Option { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 1cdb6b0e5c902..1672333609f69 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -13,9 +13,9 @@ use ext::base::ExtCtxt; use ext::expand::Marker; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use fold::noop_fold_tt; +use fold::{Folder, noop_fold_tt}; use parse::token::{self, Token, NtTT}; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::{Span, SyntaxContext, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -116,7 +116,7 @@ pub fn transcribe(cx: &ExtCtxt, continue }; - match tree { + match tree.clone() { quoted::TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. match lockstep_iter_size("ed::TokenTree::Sequence(sp, seq.clone()), @@ -150,7 +150,7 @@ pub fn transcribe(cx: &ExtCtxt, } } // FIXME #2887: think about span stuff here - quoted::TokenTree::MetaVar(mut sp, ident) => { + quoted::TokenTree::MetaVar(mut sp, ident, _escape_hygiene) => { if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) { if let MatchedNonterminal(ref nt) = *cur_matched { if let NtTT(ref tt) = **nt { @@ -177,9 +177,15 @@ pub fn transcribe(cx: &ExtCtxt, stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } - quoted::TokenTree::Token(sp, tok) => { - let mut marker = Marker(cx.current_expansion.mark); - result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into()) + quoted::TokenTree::Token(sp, tok, hygiene) => { + let mut new_tok = match hygiene { + quoted::TokenHygiene::DefSite => noop_fold_tt(TokenTree::Token(sp, tok), + &mut Marker(cx.current_expansion.mark)), + quoted::TokenHygiene::CallSite => noop_fold_tt(TokenTree::Token(sp, tok), + &mut Escaper(cx.call_site().ctxt())), + }; + + result.push(new_tok.into()); } quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } @@ -249,7 +255,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree, size + lockstep_iter_size(tt, interpolations, repeats) }) }, - TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => + TokenTree::MetaVar(_, name, _) | TokenTree::MetaVarDecl(_, name, _) => match lookup_cur_matched(name, interpolations, repeats) { Some(matched) => match *matched { MatchedNonterminal(_) => LockstepIterSize::Unconstrained, @@ -260,3 +266,18 @@ fn lockstep_iter_size(tree: "ed::TokenTree, TokenTree::Token(..) => LockstepIterSize::Unconstrained, } } + +// An Escaper escapes the syntax context with the given syntax context. +#[derive(Debug)] +pub struct Escaper(pub SyntaxContext); + +impl Folder for Escaper { + fn fold_ident(&mut self, mut ident: Ident) -> Ident { + ident.span = ident.span.with_ctxt(self.0); + ident + } + + fn new_span(&mut self, span: Span) -> Span { + span.with_ctxt(self.0) + } +} diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index d8db76a95ff38..e54877a636ae0 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -305,6 +305,9 @@ declare_features! ( // Declarative macros 2.0 (`macro`). (active, decl_macro, "1.17.0", Some(39412), None), + // Hygiene opt-out (escaping) for macros 2.0 using `#ident` syntax. + (active, macro_hygiene_optout, "1.27.0", None, None), + // Allows #[link(kind="static-nobundle"...] (active, static_nobundle, "1.16.0", Some(37403), None), diff --git a/src/test/compile-fail/feature-gate-macro-hygiene-optout.rs b/src/test/compile-fail/feature-gate-macro-hygiene-optout.rs new file mode 100644 index 0000000000000..69184a7121be5 --- /dev/null +++ b/src/test/compile-fail/feature-gate-macro-hygiene-optout.rs @@ -0,0 +1,22 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] + +macro m() { + pub mod #foo { + //~^ ERROR expected identifier, found `#` + pub const #BAR: u32 = 123; + } +} + +fn main() { + m!(); +} diff --git a/src/test/compile-fail/hygiene-optout-1.rs b/src/test/compile-fail/hygiene-optout-1.rs new file mode 100644 index 0000000000000..8fdc9161ec552 --- /dev/null +++ b/src/test/compile-fail/hygiene-optout-1.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m($mod_name:ident) { + pub mod $mod_name { + pub const BAR: u32 = 123; + } +} + +fn main() { + m!(foo); + let _ = foo::BAR; + //~^ ERROR cannot find value `BAR` in module `foo` +} diff --git a/src/test/compile-fail/hygiene-optout-2.rs b/src/test/compile-fail/hygiene-optout-2.rs new file mode 100644 index 0000000000000..ff348ce4c1b66 --- /dev/null +++ b/src/test/compile-fail/hygiene-optout-2.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m() { + pub mod #foo { + pub const BAR: u32 = 123; + } +} + +fn main() { + m!(); + let _ = foo::BAR; + //~^ ERROR cannot find value `BAR` in module `foo` +} diff --git a/src/test/compile-fail/hygiene-optout-3.rs b/src/test/compile-fail/hygiene-optout-3.rs new file mode 100644 index 0000000000000..b2d3860682d75 --- /dev/null +++ b/src/test/compile-fail/hygiene-optout-3.rs @@ -0,0 +1,27 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m_helper() { + struct #S; +} + +macro m() { + m_helper!(); + let s = S; +} + +fn main() { + m!(); + let s = S; + //~^ ERROR cannot find value `S` in this scope +} diff --git a/src/test/compile-fail/hygiene-optout-4.rs b/src/test/compile-fail/hygiene-optout-4.rs new file mode 100644 index 0000000000000..e494f95163869 --- /dev/null +++ b/src/test/compile-fail/hygiene-optout-4.rs @@ -0,0 +1,25 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m() { + struct #S; + + impl S { + //~^ ERROR cannot find type `S` in this scope + fn f() {} + } +} + +fn main() { + m!(); +} diff --git a/src/test/compile-fail/hygiene-optout-5.rs b/src/test/compile-fail/hygiene-optout-5.rs new file mode 100644 index 0000000000000..10d3f9858c0be --- /dev/null +++ b/src/test/compile-fail/hygiene-optout-5.rs @@ -0,0 +1,25 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m($mod_name:ident) { + pub mod $#mod_name { + //~^ ERROR expected identifier, found `#` + //~| ERROR unknown macro variable `` + //~| ERROR expected identifier, found reserved identifier `` + //~| ERROR expected one of `;` or `{`, found `mod_name` + } +} + +fn main() { + m!(foo); +} diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index fd8f7b9e384f3..012c5f09340bd 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -39,6 +39,7 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+); let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(), + false, true, cx.parse_sess, &Features::new(), diff --git a/src/test/run-pass/hygiene-optout-1.rs b/src/test/run-pass/hygiene-optout-1.rs new file mode 100644 index 0000000000000..ddadc9e569f89 --- /dev/null +++ b/src/test/run-pass/hygiene-optout-1.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m() { + pub mod #foo { + pub const #BAR: u32 = 123; + } +} + +fn main() { + m!(); + assert_eq!(123, foo::BAR); +} diff --git a/src/test/run-pass/hygiene-optout-2.rs b/src/test/run-pass/hygiene-optout-2.rs new file mode 100644 index 0000000000000..a46cfca2bf008 --- /dev/null +++ b/src/test/run-pass/hygiene-optout-2.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +macro m($mod_name:ident) { + pub mod #$mod_name { + pub const #BAR: u32 = 123; + } +} + +fn main() { + m!(foo); + assert_eq!(123, foo::BAR); +} diff --git a/src/test/run-pass/hygiene-optout-lifetimes.rs b/src/test/run-pass/hygiene-optout-lifetimes.rs new file mode 100644 index 0000000000000..55f4dfb5e6b9b --- /dev/null +++ b/src/test/run-pass/hygiene-optout-lifetimes.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(decl_macro)] +#![feature(macro_hygiene_optout)] + +use std::marker::PhantomData; + +macro m($lifetime:tt) { + pub struct #Foo<$lifetime>(PhantomData<&#'a ()>); +} + +fn main() { + m!('a); + let _ = Foo(Default::default()); +}