Skip to content

Commit

Permalink
Auto merge of rust-lang#72529 - RalfJung:rollup-ydthv90, r=RalfJung
Browse files Browse the repository at this point in the history
Rollup of 3 pull requests

Successful merges:

 - rust-lang#72284 (Remove `macro_defs` map)
 - rust-lang#72393 (Rewrite `Parser::collect_tokens`)
 - rust-lang#72528 (Fix typo in doc comment.)

Failed merges:

r? @ghost
  • Loading branch information
bors committed May 24, 2020
2 parents 7726070 + e2e3aac commit 94fcccc
Show file tree
Hide file tree
Showing 11 changed files with 160 additions and 95 deletions.
2 changes: 1 addition & 1 deletion src/librustc_ast_lowering/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
) -> Span {
span.fresh_expansion(ExpnData {
allow_internal_unstable,
..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition())
..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition(), None)
})
}

Expand Down
10 changes: 9 additions & 1 deletion src/librustc_expand/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{DiagnosticBuilder, ErrorReported};
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
use rustc_session::parse::ParseSess;
use rustc_span::def_id::DefId;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{AstPass, ExpnData, ExpnId, ExpnKind};
use rustc_span::source_map::SourceMap;
Expand Down Expand Up @@ -857,7 +858,13 @@ impl SyntaxExtension {
SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition)
}

pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData {
pub fn expn_data(
&self,
parent: ExpnId,
call_site: Span,
descr: Symbol,
macro_def_id: Option<DefId>,
) -> ExpnData {
ExpnData {
kind: ExpnKind::Macro(self.macro_kind(), descr),
parent,
Expand All @@ -867,6 +874,7 @@ impl SyntaxExtension {
allow_internal_unsafe: self.allow_internal_unsafe,
local_inner_macros: self.local_inner_macros,
edition: self.edition,
macro_def_id,
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/librustc_expand/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -988,6 +988,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
ExpnKind::Macro(MacroKind::Attr, sym::derive),
item.span(),
self.cx.parse_sess.edition,
None,
)
}),
_ => None,
Expand Down
186 changes: 113 additions & 73 deletions src/librustc_parse/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ impl<'a> Drop for Parser<'a> {
struct TokenCursor {
frame: TokenCursorFrame,
stack: Vec<TokenCursorFrame>,
cur_token: Option<TreeAndJoint>,
collecting: Option<Collecting>,
}

#[derive(Clone)]
Expand All @@ -127,30 +129,24 @@ struct TokenCursorFrame {
open_delim: bool,
tree_cursor: tokenstream::Cursor,
close_delim: bool,
last_token: LastToken,
}

/// This is used in `TokenCursorFrame` above to track tokens that are consumed
/// by the parser, and then that's transitively used to record the tokens that
/// each parse AST item is created with.
///
/// Right now this has two states, either collecting tokens or not collecting
/// tokens. If we're collecting tokens we just save everything off into a local
/// `Vec`. This should eventually though likely save tokens from the original
/// token stream and just use slicing of token streams to avoid creation of a
/// whole new vector.
///
/// The second state is where we're passively not recording tokens, but the last
/// token is still tracked for when we want to start recording tokens. This
/// "last token" means that when we start recording tokens we'll want to ensure
/// that this, the first token, is included in the output.
///
/// You can find some more example usage of this in the `collect_tokens` method
/// on the parser.
#[derive(Clone)]
enum LastToken {
Collecting(Vec<TreeAndJoint>),
Was(Option<TreeAndJoint>),
/// Used to track additional state needed by `collect_tokens`
#[derive(Clone, Debug)]
struct Collecting {
/// Holds the current tokens captured during the most
/// recent call to `collect_tokens`
buf: Vec<TreeAndJoint>,
/// The depth of the `TokenCursor` stack at the time
/// collection was started. When we encounter a `TokenTree::Delimited`,
/// we want to record the `TokenTree::Delimited` itself,
/// but *not* any of the inner tokens while we are inside
/// the new frame (this would cause us to record duplicate tokens).
///
/// This `depth` fields tracks stack depth we are recording tokens.
/// Only tokens encountered at this depth will be recorded. See
/// `TokenCursor::next` for more details.
depth: usize,
}

impl TokenCursorFrame {
Expand All @@ -161,7 +157,6 @@ impl TokenCursorFrame {
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
}
}
Expand All @@ -171,25 +166,38 @@ impl TokenCursor {
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
TokenTree::open_tt(self.frame.span, self.frame.delim)
} else if let Some(tree) = self.frame.tree_cursor.next() {
TokenTree::open_tt(self.frame.span, self.frame.delim).into()
} else if let Some(tree) = self.frame.tree_cursor.next_with_joint() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
TokenTree::close_tt(self.frame.span, self.frame.delim)
TokenTree::close_tt(self.frame.span, self.frame.delim).into()
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue;
} else {
return Token::new(token::Eof, DUMMY_SP);
};

match self.frame.last_token {
LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
// Don't set an open delimiter as our current token - we want
// to leave it as the full `TokenTree::Delimited` from the previous
// iteration of this loop
if !matches!(tree.0, TokenTree::Token(Token { kind: TokenKind::OpenDelim(_), .. })) {
self.cur_token = Some(tree.clone());
}

if let Some(collecting) = &mut self.collecting {
if collecting.depth == self.stack.len() {
debug!(
"TokenCursor::next(): collected {:?} at depth {:?}",
tree,
self.stack.len()
);
collecting.buf.push(tree.clone().into())
}
}

match tree {
match tree.0 {
TokenTree::Token(token) => return token,
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts);
Expand Down Expand Up @@ -350,6 +358,8 @@ impl<'a> Parser<'a> {
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens),
stack: Vec::new(),
cur_token: None,
collecting: None,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
Expand Down Expand Up @@ -1105,65 +1115,95 @@ impl<'a> Parser<'a> {
}
}

/// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected
/// into a `TokenStream`, and returned along with the result
/// of the callback.
///
/// Note: If your callback consumes an opening delimiter
/// (including the case where you call `collect_tokens`
/// when the current token is an opening delimeter),
/// you must also consume the corresponding closing delimiter.
///
/// That is, you can consume
/// `something ([{ }])` or `([{}])`, but not `([{}]`
///
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
fn collect_tokens<R>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, TokenStream)> {
// Record all tokens we parse when parsing this item.
let mut tokens = Vec::new();
let prev_collecting = match self.token_cursor.frame.last_token {
LastToken::Collecting(ref mut list) => Some(mem::take(list)),
LastToken::Was(ref mut last) => {
tokens.extend(last.take());
None
}
};
self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
let prev = self.token_cursor.stack.len();
let tokens: Vec<TreeAndJoint> = self.token_cursor.cur_token.clone().into_iter().collect();
debug!("collect_tokens: starting with {:?}", tokens);

// We need special handling for the case where `collect_tokens` is called
// on an opening delimeter (e.g. '('). At this point, we have already pushed
// a new frame - however, we want to record the original `TokenTree::Delimited`,
// for consistency with the case where we start recording one token earlier.
// See `TokenCursor::next` to see how `cur_token` is set up.
let prev_depth =
if matches!(self.token_cursor.cur_token, Some((TokenTree::Delimited(..), _))) {
if self.token_cursor.stack.is_empty() {
// There is nothing below us in the stack that
// the function could consume, so the only thing it can legally
// capture is the entire contents of the current frame.
return Ok((f(self)?, TokenStream::new(tokens)));
}
// We have already recorded the full `TokenTree::Delimited` when we created
// our `tokens` vector at the start of this function. We are now inside
// a new frame corresponding to the `TokenTree::Delimited` we already recoreded.
// We don't want to record any of the tokens inside this frame, since they
// will be duplicates of the tokens nested inside the `TokenTree::Delimited`.
// Therefore, we set our recording depth to the *previous* frame. This allows
// us to record a sequence like: `(foo).bar()`: the `(foo)` will be recored
// as our initial `cur_token`, while the `.bar()` will be recored after we
// pop the `(foo)` frame.
self.token_cursor.stack.len() - 1
} else {
self.token_cursor.stack.len()
};
let prev_collecting =
self.token_cursor.collecting.replace(Collecting { buf: tokens, depth: prev_depth });

let ret = f(self);
let last_token = if self.token_cursor.stack.len() == prev {
&mut self.token_cursor.frame.last_token
} else if self.token_cursor.stack.get(prev).is_none() {
// This can happen due to a bad interaction of two unrelated recovery mechanisms with
// mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
// (#62881).
return Ok((ret?, TokenStream::default()));

let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
collecting.buf
} else {
&mut self.token_cursor.stack[prev].last_token
let msg = format!("our vector went away?");
debug!("collect_tokens: {}", msg);
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
// This can happen due to a bad interaction of two unrelated recovery mechanisms
// with mismatched delimiters *and* recovery lookahead on the likely typo
// `pub ident(` (#62895, different but similar to the case above).
return Ok((ret?, TokenStream::default()));
};

// Pull out the tokens that we've collected from the call to `f` above.
let mut collected_tokens = match *last_token {
LastToken::Collecting(ref mut v) => mem::take(v),
LastToken::Was(ref was) => {
let msg = format!("our vector went away? - found Was({:?})", was);
debug!("collect_tokens: {}", msg);
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
// This can happen due to a bad interaction of two unrelated recovery mechanisms
// with mismatched delimiters *and* recovery lookahead on the likely typo
// `pub ident(` (#62895, different but similar to the case above).
return Ok((ret?, TokenStream::default()));
}
};
debug!("collect_tokens: got raw tokens {:?}", collected_tokens);

// If we're not at EOF our current token wasn't actually consumed by
// `f`, but it'll still be in our list that we pulled out. In that case
// put it back.
let extra_token = if self.token != token::Eof { collected_tokens.pop() } else { None };

// If we were previously collecting tokens, then this was a recursive
// call. In that case we need to record all the tokens we collected in
// our parent list as well. To do that we push a clone of our stream
// onto the previous list.
match prev_collecting {
Some(mut list) => {
list.extend(collected_tokens.iter().cloned());
list.extend(extra_token);
*last_token = LastToken::Collecting(list);
}
None => {
*last_token = LastToken::Was(extra_token);
if let Some(mut collecting) = prev_collecting {
// If we were previously collecting at the same depth,
// then the previous call to `collect_tokens` needs to see
// the tokens we just recorded.
//
// If we were previously recording at an lower `depth`,
// then the previous `collect_tokens` call already recorded
// this entire frame in the form of a `TokenTree::Delimited`,
// so there is nothing else for us to do.
if collecting.depth == prev_depth {
collecting.buf.extend(collected_tokens.iter().cloned());
collecting.buf.extend(extra_token);
debug!("collect_tokens: updating previous buf to {:?}", collecting);
}
self.token_cursor.collecting = Some(collecting)
}

Ok((ret?, TokenStream::new(collected_tokens)))
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_resolve/build_reduced_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ impl<'a> Resolver<'a> {
}

crate fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> {
let def_id = match self.macro_defs.get(&expn_id) {
Some(def_id) => *def_id,
let def_id = match expn_id.expn_data().macro_def_id {
Some(def_id) => def_id,
None => return self.ast_transform_scopes.get(&expn_id).unwrap_or(&self.graph_root),
};
if let Some(id) = self.definitions.as_local_node_id(def_id) {
Expand Down
11 changes: 3 additions & 8 deletions src/librustc_resolve/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -922,7 +922,6 @@ pub struct Resolver<'a> {
dummy_ext_bang: Lrc<SyntaxExtension>,
dummy_ext_derive: Lrc<SyntaxExtension>,
non_macro_attrs: [Lrc<SyntaxExtension>; 2],
macro_defs: FxHashMap<ExpnId, DefId>,
local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
ast_transform_scopes: FxHashMap<ExpnId, Module<'a>>,
unused_macros: NodeMap<Span>,
Expand Down Expand Up @@ -1152,9 +1151,6 @@ impl<'a> Resolver<'a> {
let mut invocation_parent_scopes = FxHashMap::default();
invocation_parent_scopes.insert(ExpnId::root(), ParentScope::module(graph_root));

let mut macro_defs = FxHashMap::default();
macro_defs.insert(ExpnId::root(), root_def_id);

let features = session.features_untracked();
let non_macro_attr =
|mark_used| Lrc::new(SyntaxExtension::non_macro_attr(mark_used, session.edition()));
Expand Down Expand Up @@ -1229,7 +1225,6 @@ impl<'a> Resolver<'a> {
invocation_parent_scopes,
output_macro_rules_scopes: Default::default(),
helper_attrs: Default::default(),
macro_defs,
local_macro_def_scopes: FxHashMap::default(),
name_already_seen: FxHashMap::default(),
potentially_unused_imports: Vec::new(),
Expand Down Expand Up @@ -1335,8 +1330,8 @@ impl<'a> Resolver<'a> {

fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId {
loop {
match self.macro_defs.get(&ctxt.outer_expn()) {
Some(&def_id) => return def_id,
match ctxt.outer_expn().expn_data().macro_def_id {
Some(def_id) => return def_id,
None => ctxt.remove_mark(),
};
}
Expand Down Expand Up @@ -1820,7 +1815,7 @@ impl<'a> Resolver<'a> {
&& module.expansion.is_descendant_of(parent.expansion)
{
// The macro is a proc macro derive
if let Some(&def_id) = self.macro_defs.get(&module.expansion) {
if let Some(def_id) = module.expansion.expn_data().macro_def_id {
if let Some(ext) = self.get_macro_by_def_id(def_id) {
if !ext.is_builtin && ext.macro_kind() == MacroKind::Derive {
if parent.expansion.outer_expn_is_descendant_of(span.ctxt()) {
Expand Down
13 changes: 9 additions & 4 deletions src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ impl<'a> base::Resolver for Resolver<'a> {
call_site,
self.session.edition(),
features.into(),
None,
)));

let parent_scope = if let Some(module_id) = parent_module_id {
Expand Down Expand Up @@ -290,13 +291,17 @@ impl<'a> base::Resolver for Resolver<'a> {
let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?;

let span = invoc.span();
invoc_id.set_expn_data(ext.expn_data(parent_scope.expansion, span, fast_print_path(path)));

if let Res::Def(_, def_id) = res {
invoc_id.set_expn_data(ext.expn_data(
parent_scope.expansion,
span,
fast_print_path(path),
res.opt_def_id(),
));

if let Res::Def(_, _) = res {
if after_derive {
self.session.span_err(span, "macro attributes must be placed before `#[derive]`");
}
self.macro_defs.insert(invoc_id, def_id);
let normal_module_def_id = self.macro_def_scope(invoc_id).normal_ancestor_id;
self.definitions.add_parent_module_of_macro_def(invoc_id, normal_module_def_id);
}
Expand Down
Loading

0 comments on commit 94fcccc

Please sign in to comment.