Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Attribute cleanups #127308

Merged
merged 9 commits into from
Jul 7, 2024
33 changes: 0 additions & 33 deletions compiler/rustc_ast/src/ast_traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
use crate::{AttrVec, Attribute, Stmt, StmtKind};

use rustc_span::Span;

use std::fmt;
use std::marker::PhantomData;

Expand Down Expand Up @@ -91,37 +89,6 @@ impl<T: AstDeref<Target: HasNodeId>> HasNodeId for T {
}
}

/// A trait for AST nodes having a span.
pub trait HasSpan {
fn span(&self) -> Span;
}

macro_rules! impl_has_span {
($($T:ty),+ $(,)?) => {
$(
impl HasSpan for $T {
fn span(&self) -> Span {
self.span
}
}
)+
};
}

impl_has_span!(AssocItem, Block, Expr, ForeignItem, Item, Pat, Path, Stmt, Ty, Visibility);

impl<T: AstDeref<Target: HasSpan>> HasSpan for T {
fn span(&self) -> Span {
self.ast_deref().span()
}
}

impl HasSpan for AttrItem {
fn span(&self) -> Span {
self.span()
}
}

/// A trait for AST nodes having (or not having) collected tokens.
pub trait HasTokens {
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,8 @@ impl Attribute {
}
}

pub fn tokens(&self) -> TokenStream {
// Named `get_tokens` to distinguish it from the `<Attribute as HasTokens>::tokens` method.
pub fn get_tokens(&self) -> TokenStream {
match &self.kind {
AttrKind::Normal(normal) => TokenStream::new(
normal
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub mod tokenstream;
pub mod visit;

pub use self::ast::*;
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasSpan, HasTokens};
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasTokens};

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,7 @@ fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
visit_attr_tts(tts, vis);
visit_delim_span(dspan, vis);
}
AttrTokenTree::Attributes(AttributesData { attrs, tokens }) => {
AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => {
visit_attrs(attrs, vis);
visit_lazy_tts_opt_mut(Some(tokens), vis);
}
Expand Down
33 changes: 17 additions & 16 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
//! ownership of the original.

use crate::ast::{AttrStyle, StmtKind};
use crate::ast_traits::{HasAttrs, HasSpan, HasTokens};
use crate::ast_traits::{HasAttrs, HasTokens};
use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use crate::AttrVec;

Expand Down Expand Up @@ -170,8 +170,8 @@ pub enum AttrTokenTree {
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
Attributes(AttributesData),
/// See `AttrsTarget` for more information
AttrsTarget(AttrsTarget),
}

impl AttrTokenStream {
Expand All @@ -180,7 +180,7 @@ impl AttrTokenStream {
}

/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens.
Expand All @@ -199,13 +199,13 @@ impl AttrTokenStream {
TokenStream::new(stream.to_token_trees()),
))
}
AttrTokenTree::Attributes(data) => {
let idx = data
AttrTokenTree::AttrsTarget(target) => {
let idx = target
.attrs
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx);

let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees();
if !inner_attrs.is_empty() {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
Expand All @@ -227,7 +227,7 @@ impl AttrTokenStream {

let mut stream = TokenStream::default();
for inner_attr in inner_attrs {
stream.push_stream(inner_attr.tokens());
stream.push_stream(inner_attr.get_tokens());
}
stream.push_stream(delim_tokens.clone());
*tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
Expand All @@ -242,7 +242,7 @@ impl AttrTokenStream {
);
}
for attr in outer_attrs {
res.extend(attr.tokens().0.iter().cloned());
res.extend(attr.get_tokens().0.iter().cloned());
}
res.extend(target_tokens);
}
Expand All @@ -262,7 +262,7 @@ impl AttrTokenStream {
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct AttributesData {
pub struct AttrsTarget {
/// Attributes, both outer and inner.
/// These are stored in the original order that they were parsed in.
pub attrs: AttrVec,
Expand Down Expand Up @@ -436,17 +436,17 @@ impl TokenStream {
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
}

pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
let Some(tokens) = node.tokens() else {
panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
panic!("missing tokens for node: {:?}", node);
};
let attrs = node.attrs();
let attr_stream = if attrs.is_empty() {
tokens.to_attr_token_stream()
} else {
let attr_data =
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
let target =
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
};
TokenStream::new(attr_stream.to_token_trees())
}
Expand Down Expand Up @@ -765,6 +765,7 @@ mod size_asserts {
static_assert_size!(AttrTokenStream, 8);
static_assert_size!(AttrTokenTree, 32);
static_assert_size!(LazyAttrTokenStream, 8);
static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
static_assert_size!(TokenStream, 8);
static_assert_size!(TokenTree, 32);
// tidy-alphabetical-end
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_builtin_macros/src/cfg_eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ impl CfgEval<'_> {

// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrTokenStream` (specifically, we capture
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
// `AttrTokenTree::AttrsTarget` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
parser.capture_cfg = true;
match parse_annotatable_with(&mut parser) {
Expand Down
29 changes: 13 additions & 16 deletions compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ impl<'a> StripUnconfigured<'a> {
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
fn can_skip(stream: &AttrTokenStream) -> bool {
stream.0.iter().all(|tree| match tree {
AttrTokenTree::Attributes(_) => false,
AttrTokenTree::AttrsTarget(_) => false,
AttrTokenTree::Token(..) => true,
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
})
Expand All @@ -185,22 +185,22 @@ impl<'a> StripUnconfigured<'a> {
let trees: Vec<_> = stream
.0
.iter()
.flat_map(|tree| match tree.clone() {
AttrTokenTree::Attributes(mut data) => {
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
.filter_map(|tree| match tree.clone() {
AttrTokenTree::AttrsTarget(mut target) => {
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));

if self.in_cfg(&data.attrs) {
data.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&data.tokens.to_attr_token_stream()),
if self.in_cfg(&target.attrs) {
target.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&target.tokens.to_attr_token_stream()),
);
Some(AttrTokenTree::Attributes(data)).into_iter()
Some(AttrTokenTree::AttrsTarget(target))
} else {
None.into_iter()
None
}
}
AttrTokenTree::Delimited(sp, spacing, delim, mut inner) => {
inner = self.configure_tokens(&inner);
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)).into_iter()
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
}
AttrTokenTree::Token(
Token {
Expand All @@ -220,9 +220,7 @@ impl<'a> StripUnconfigured<'a> {
) => {
panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tree);
}
AttrTokenTree::Token(token, spacing) => {
Some(AttrTokenTree::Token(token, spacing)).into_iter()
}
AttrTokenTree::Token(token, spacing) => Some(AttrTokenTree::Token(token, spacing)),
})
.collect();
AttrTokenStream::new(trees)
Expand Down Expand Up @@ -294,7 +292,7 @@ impl<'a> StripUnconfigured<'a> {
attr: &Attribute,
(item, item_span): (ast::AttrItem, Span),
) -> Attribute {
let orig_tokens = attr.tokens();
let orig_tokens = attr.get_tokens();

// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
Expand All @@ -310,12 +308,11 @@ impl<'a> StripUnconfigured<'a> {
else {
panic!("Bad tokens for attribute {attr:?}");
};
let pound_span = pound_token.span;

// We don't really have a good span to use for the synthesized `[]`
// in `#[attr]`, so just use the span of the `#` token.
let bracket_group = AttrTokenTree::Delimited(
DelimSpan::from_single(pound_span),
DelimSpan::from_single(pound_token.span),
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
item.tokens
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_parse/src/parser/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ impl<'a> Parser<'a> {
pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
let mut attrs = ast::AttrVec::new();
loop {
let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
let start_pos = self.num_bump_calls;
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
Expand All @@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
None
};
if let Some(attr) = attr {
let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
let end_pos = self.num_bump_calls;
// If we are currently capturing tokens, mark the location of this inner attribute.
// If capturing ends up creating a `LazyAttrTokenStream`, we will include
// this replace range with it, removing the inner attribute from the final
Expand All @@ -313,7 +313,7 @@ impl<'a> Parser<'a> {
// corresponding macro).
let range = start_pos..end_pos;
if let Capturing::Yes = self.capture_state.capturing {
self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
self.capture_state.inner_attr_ranges.insert(attr.id, (range, None));
}
attrs.push(attr);
} else {
Expand Down
Loading
Loading