Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hygiene opt-out for idents in expansion of declarative macros #47992

Closed
wants to merge 10 commits into from
14 changes: 9 additions & 5 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax
quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
tts: vec![
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow, false),
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow, quoted::TokenHygiene::DefSite),
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
],
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
Expand All @@ -211,7 +211,8 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> Syntax
})),
// to phase into semicolon-termination instead of semicolon-separation
quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi, false)],
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi,
quoted::TokenHygiene::DefSite)],
separator: None,
op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0
Expand Down Expand Up @@ -454,7 +455,8 @@ impl FirstSets {

if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sp, sep.clone(), false));
first.add_one_maybe(TokenTree::Token(sp, sep.clone(),
quoted::TokenHygiene::DefSite));
}

// Reverse scan: Sequence comes before `first`.
Expand Down Expand Up @@ -503,7 +505,8 @@ impl FirstSets {

if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sp, sep.clone(), false));
first.add_one_maybe(TokenTree::Token(sp, sep.clone(),
quoted::TokenHygiene::DefSite));
}

assert!(first.maybe_empty);
Expand Down Expand Up @@ -710,7 +713,8 @@ fn check_matcher_core(sess: &ParseSess,
let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone();
new.add_one_maybe(TokenTree::Token(sp, u.clone(), false));
new.add_one_maybe(TokenTree::Token(sp, u.clone(),
quoted::TokenHygiene::DefSite));
&new
} else {
&suffix_first
Expand Down
50 changes: 25 additions & 25 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ impl Delimited {
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
TokenTree::Token(open_span, self.open_token(), false)
TokenTree::Token(open_span, self.open_token(), TokenHygiene::DefSite)
}

/// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
Expand All @@ -56,7 +56,7 @@ impl Delimited {
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
TokenTree::Token(close_span, self.close_token(), false)
TokenTree::Token(close_span, self.close_token(), TokenHygiene::DefSite)
}
}

Expand Down Expand Up @@ -88,20 +88,12 @@ pub enum KleeneOp {
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum TokenTree {
Token(
Span,
token::Token,
bool, /* escape hygiene */
),
Token(Span, token::Token, TokenHygiene),
Delimited(Span, Lrc<Delimited>),
/// A Kleene-style repetition sequence
Sequence(Span, Lrc<SequenceRepetition>),
/// E.g. `$var`
MetaVar(
Span,
ast::Ident,
bool, /* escape hygiene */
),
MetaVar(Span, ast::Ident, TokenHygiene),
/// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
MetaVarDecl(
Span,
Expand Down Expand Up @@ -168,6 +160,13 @@ impl TokenTree {
}
}

/// Syntaxt context to apply to a token when invoking a macro.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum TokenHygiene {
DefSite,
CallSite,
}

/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
/// collection of `TokenTree` for use in parsing a macro.
Expand Down Expand Up @@ -279,8 +278,8 @@ where
tokenstream::TokenTree::Token(span, token::Pound) if hygiene_optout => match trees.peek() {
Some(tokenstream::TokenTree::Token(_, token::Dollar)) => {
if let tokenstream::TokenTree::Token(span, token::Dollar) = trees.next().unwrap() {
parse_meta_var(true,
span,
parse_meta_var(span,
TokenHygiene::CallSite,
trees,
hygiene_optout,
expect_matchers,
Expand All @@ -295,7 +294,7 @@ where
Some(tokenstream::TokenTree::Token(_, token::Ident(..))) => {
if let tokenstream::TokenTree::Token(span, tok @ token::Ident(..)) =
trees.next().unwrap() {
TokenTree::Token(span, tok, true)
TokenTree::Token(span, tok, TokenHygiene::CallSite)
} else {
unreachable!();
}
Expand All @@ -304,19 +303,19 @@ where
Some(tokenstream::TokenTree::Token(_, token::Lifetime(..))) => {
if let tokenstream::TokenTree::Token(span, tok @ token::Lifetime(..)) =
trees.next().unwrap() {
TokenTree::Token(span, tok, true)
TokenTree::Token(span, tok, TokenHygiene::CallSite)
} else {
unreachable!();
}
}

_ => TokenTree::Token(span, token::Pound, false),
_ => TokenTree::Token(span, token::Pound, TokenHygiene::DefSite),
}

// `tree` is a `$` token. Look at the next token in `trees`.
tokenstream::TokenTree::Token(span, token::Dollar) =>
parse_meta_var(false,
span,
parse_meta_var(span,
TokenHygiene::DefSite,
trees,
hygiene_optout,
expect_matchers,
Expand All @@ -325,7 +324,8 @@ where
attrs),

// `tree` is an arbitrary token. Keep it.
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok, false),
tokenstream::TokenTree::Token(span, tok) =>
TokenTree::Token(span, tok, TokenHygiene::DefSite),

// `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to
// descend into the delimited set and further parse it.
Expand All @@ -346,8 +346,8 @@ where

/// Attempt to parse a single meta variable or meta variable sequence.
fn parse_meta_var<I>(
escape_hygiene: bool,
span: Span,
token_hygiene: TokenHygiene,
trees: &mut Peekable<I>,
hygiene_optout: bool,
expect_matchers: bool,
Expand Down Expand Up @@ -399,9 +399,9 @@ where
let span = ident_span.with_lo(span.lo());
if ident.name == keywords::Crate.name() && !is_raw {
let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw), escape_hygiene)
TokenTree::Token(span, token::Ident(ident, is_raw), token_hygiene)
} else {
TokenTree::MetaVar(span, ident, escape_hygiene)
TokenTree::MetaVar(span, ident, token_hygiene)
}
}

Expand All @@ -412,11 +412,11 @@ where
pprust::token_to_string(&tok)
);
sess.span_diagnostic.span_err(span, &msg);
TokenTree::MetaVar(span, keywords::Invalid.ident(), escape_hygiene)
TokenTree::MetaVar(span, keywords::Invalid.ident(), token_hygiene)
}

// There are no more tokens. Just return the `$` we already have.
None => TokenTree::Token(span, token::Dollar, false),
None => TokenTree::Token(span, token::Dollar, TokenHygiene::DefSite),
}
}

Expand Down
49 changes: 24 additions & 25 deletions src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ use ext::base::ExtCtxt;
use ext::expand::Marker;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::quoted;
use fold::noop_fold_tt;
use fold::{Folder, noop_fold_tt};
use parse::token::{self, Token, NtTT};
use syntax_pos::{Span, DUMMY_SP};
use syntax_pos::{Span, SyntaxContext, DUMMY_SP};
use tokenstream::{TokenStream, TokenTree, Delimited};
use util::small_vector::SmallVector;

Expand Down Expand Up @@ -177,31 +177,15 @@ pub fn transcribe(cx: &ExtCtxt,
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
result_stack.push(mem::replace(&mut result, Vec::new()));
}
quoted::TokenTree::Token(sp, tok, escape_hygiene) => {
let sp_ctxt = if escape_hygiene { cx.call_site() } else { sp }.ctxt();
let sp = sp.with_ctxt(sp_ctxt.apply_mark(cx.current_expansion.mark));

let update_ident_ctxt = |ident: Ident| {
let new_span = if escape_hygiene {
cx.call_site()
} else {
ident.span.apply_mark(cx.current_expansion.mark)
};
Ident::new(ident.name, new_span)
};

let result_tok = match tok {
token::Ident(ident, is_raw) =>
TokenTree::Token(sp, token::Ident(update_ident_ctxt(ident), is_raw)),
token::Lifetime(ident) =>
TokenTree::Token(sp, token::Lifetime()),
_ => {
let mut marker = Marker(cx.current_expansion.mark);
noop_fold_tt(TokenTree::Token(sp, tok), &mut marker)
}
quoted::TokenTree::Token(sp, tok, hygiene) => {
let mut new_tok = match hygiene {
quoted::TokenHygiene::DefSite => noop_fold_tt(TokenTree::Token(sp, tok),
&mut Marker(cx.current_expansion.mark)),
quoted::TokenHygiene::CallSite => noop_fold_tt(TokenTree::Token(sp, tok),
&mut Escaper(cx.call_site().ctxt())),
};

result.push(result_tok.into());
result.push(new_tok.into());
}
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
}
Expand Down Expand Up @@ -282,3 +266,18 @@ fn lockstep_iter_size(tree: &quoted::TokenTree,
TokenTree::Token(..) => LockstepIterSize::Unconstrained,
}
}

// An Escaper escapes the syntax context with the given syntax context.
#[derive(Debug)]
pub struct Escaper(pub SyntaxContext);

impl Folder for Escaper {
fn fold_ident(&mut self, mut ident: Ident) -> Ident {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Overriding fold_ident is not necessary, overridden new_span applies to identifiers as well.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, I see. Why is it necessary in the Folder implementation for Marker though?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It isn't necessary there either, it's just something I forgot to remove in #49154.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay. :-)

ident.span = ident.span.with_ctxt(self.0);
ident
}

fn new_span(&mut self, span: Span) -> Span {
span.with_ctxt(self.0)
}
}
24 changes: 24 additions & 0 deletions src/test/compile-fail/hygiene-optout-4.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(decl_macro)]

macro m() {
struct #S;

impl S {
//~^ ERROR cannot find type `S` in this scope
fn f() {}
}
}

fn main() {
m!();
}