Skip to content

Commit

Permalink
Auto merge of #17078 - Veykril:diags-perf, r=Veykril
Browse files Browse the repository at this point in the history
internal: Improve diagnostics performance
  • Loading branch information
bors committed Apr 15, 2024
2 parents b223860 + 531a270 commit 90cfa80
Show file tree
Hide file tree
Showing 18 changed files with 176 additions and 158 deletions.
2 changes: 1 addition & 1 deletion crates/base-db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub trait Upcast<T: ?Sized> {

pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16;
pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 2024;

pub trait FileLoader {
/// Text of the file.
Expand Down
2 changes: 2 additions & 0 deletions crates/hir-def/src/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,7 @@ pub struct ConstData {
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibility,
pub rustc_allow_incoherent_impl: bool,
pub has_body: bool,
}

impl ConstData {
Expand All @@ -533,6 +534,7 @@ impl ConstData {
type_ref: konst.type_ref.clone(),
visibility,
rustc_allow_incoherent_impl,
has_body: konst.has_body,
})
}
}
Expand Down
11 changes: 10 additions & 1 deletion crates/hir-def/src/data/adt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use crate::{
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
type_ref::TypeRef,
visibility::RawVisibility,
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId,
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
};

/// Note that we use `StructData` for unions as well!
Expand Down Expand Up @@ -378,6 +378,15 @@ impl VariantData {
VariantData::Unit => StructKind::Unit,
}
}

#[allow(clippy::self_named_constructors)]
pub(crate) fn variant_data(db: &dyn DefDatabase, id: VariantId) -> Arc<VariantData> {
match id {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
}
}
}

#[derive(Debug, Copy, Clone, PartialEq, Eq)]
Expand Down
5 changes: 4 additions & 1 deletion crates/hir-def/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{
attr::{Attrs, AttrsWithOwner},
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
adt::{EnumData, EnumVariantData, StructData},
adt::{EnumData, EnumVariantData, StructData, VariantData},
ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
Expand Down Expand Up @@ -127,6 +127,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
id: EnumVariantId,
) -> (Arc<EnumVariantData>, DefDiagnostics);

#[salsa::transparent]
#[salsa::invoke(VariantData::variant_data)]
fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
#[salsa::transparent]
#[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
Expand Down
1 change: 1 addition & 0 deletions crates/hir-def/src/item_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -716,6 +716,7 @@ pub struct Const {
pub visibility: RawVisibilityId,
pub type_ref: Interned<TypeRef>,
pub ast_id: FileAstId<ast::Const>,
pub has_body: bool,
}

#[derive(Debug, Clone, Eq, PartialEq)]
Expand Down
2 changes: 1 addition & 1 deletion crates/hir-def/src/item_tree/lower.rs
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ impl<'a> Ctx<'a> {
let type_ref = self.lower_type_ref_opt(konst.ty());
let visibility = self.lower_visibility(konst);
let ast_id = self.source_ast_id_map.ast_id(konst);
let res = Const { name, visibility, type_ref, ast_id };
let res = Const { name, visibility, type_ref, ast_id, has_body: konst.body().is_some() };
id(self.data().consts.alloc(res))
}

Expand Down
2 changes: 1 addition & 1 deletion crates/hir-def/src/item_tree/pretty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ impl Printer<'_> {
wln!(self, "}}");
}
ModItem::Const(it) => {
let Const { name, visibility, type_ref, ast_id } = &self.tree[it];
let Const { name, visibility, type_ref, ast_id, has_body: _ } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "const ");
Expand Down
2 changes: 1 addition & 1 deletion crates/hir-ty/src/diagnostics/decl_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ mod allow {
}

pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered();
let _p = tracing::span!(tracing::Level::INFO, "incorrect_case").entered();
let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
Expand Down
93 changes: 52 additions & 41 deletions crates/hir-ty/src/diagnostics/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::constructor::Constructor;
use syntax::{ast, AstNode};
use tracing::debug;
use triomphe::Arc;
Expand Down Expand Up @@ -190,45 +191,45 @@ impl ExprValidator {
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
// Note: Skipping the entire diagnostic rather than just not including a faulty match arm is
// preferred to avoid the chance of false positives.
for arm in arms {
if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
// We only include patterns whose type matches the type
// of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm {
pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(),
arm_data: (),
};
m_arms.push(m_arm);
if !has_lowering_errors {
continue;
}
let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
return;
};

// We only include patterns whose type matches the type
// of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if (pat_ty == scrut_ty
|| scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
&& types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm {
pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(),
arm_data: (),
};
m_arms.push(m_arm);
if !has_lowering_errors {
continue;
}
}

// If we can't resolve the type of a pattern, or the pattern type doesn't
// fit the match expression, we skip this diagnostic. Skipping the entire
// diagnostic rather than just not including this match arm is preferred
// to avoid the chance of false positives.
// If the pattern type doesn't fit the match expression, we skip this diagnostic.
cov_mark::hit!(validate_match_bailed_out);
return;
}
Expand Down Expand Up @@ -266,15 +267,17 @@ impl ExprValidator {

let mut have_errors = false;
let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors);

// optimization, wildcard trivially hold
if have_errors || matches!(deconstructed_pat.ctor(), Constructor::Wildcard) {
continue;
}

let match_arm = rustc_pattern_analysis::MatchArm {
pat: pattern_arena.alloc(deconstructed_pat),
has_guard: false,
arm_data: (),
};
if have_errors {
continue;
}

let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) {
Ok(v) => v,
Err(e) => {
Expand Down Expand Up @@ -531,8 +534,16 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
None if *has_type_mismatches => (),
None => {
body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
let pat = &body[pat];
if let Pat::ConstBlock(expr) | Pat::Lit(expr) = *pat {
*has_type_mismatches |= infer.type_mismatch_for_expr(expr).is_some();
if *has_type_mismatches {
return;
}
}
pat.walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
}
}
}
Expand Down
53 changes: 23 additions & 30 deletions crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
//! Interface with `rustc_pattern_analysis`.
use std::fmt;
use tracing::debug;

use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use once_cell::unsync::Lazy;
use rustc_hash::FxHashMap;
use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
Expand Down Expand Up @@ -91,20 +91,13 @@ impl<'p> MatchCheckCtx<'p> {
}

fn is_uninhabited(&self, ty: &Ty) -> bool {
is_ty_uninhabited_from(ty, self.module, self.db)
is_ty_uninhabited_from(self.db, ty, self.module)
}

/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
match ty.as_adt() {
Some((adt @ hir_def::AdtId::EnumId(_), _)) => {
let has_non_exhaustive_attr =
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
has_non_exhaustive_attr && !is_local
}
_ => false,
}
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
let is_local = adt.krate(self.db.upcast()) == self.module.krate();
!is_local && self.db.attrs(adt.into()).by_key("non_exhaustive").exists()
}

fn variant_id_for_adt(
Expand Down Expand Up @@ -376,24 +369,21 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
single(subst_ty)
} else {
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
let (adt, _) = ty.as_adt().unwrap();

let adt_is_local =
variant.module(self.db.upcast()).krate() == self.module.krate();
// Whether we must not match the fields of this variant exhaustively.
let is_non_exhaustive =
self.db.attrs(variant.into()).by_key("non_exhaustive").exists()
&& !adt_is_local;
let visibilities = self.db.field_visibilities(variant);
let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt));
let visibilities = Lazy::new(|| self.db.field_visibilities(variant));

self.list_variant_fields(ty, variant)
.map(move |(fid, ty)| {
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|| visibilities[fid]
.is_visible_from(self.db.upcast(), self.module);
let is_visible = || {
matches!(adt, hir_def::AdtId::EnumId(..))
|| visibilities[fid]
.is_visible_from(self.db.upcast(), self.module)
};
let is_uninhabited = self.is_uninhabited(&ty);
let private_uninhabited =
is_uninhabited && (!is_visible || is_non_exhaustive);
is_uninhabited && (!is_visible() || *is_non_exhaustive);
(ty, PrivateUninhabitedField(private_uninhabited))
})
.collect()
Expand Down Expand Up @@ -445,17 +435,20 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
TyKind::Scalar(Scalar::Char) => unhandled(),
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => {
let enum_data = cx.db.enum_data(*enum_id);
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
&TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => {
let enum_data = cx.db.enum_data(enum_id);
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt);

if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
ConstructorSet::NoConstructors
} else {
let mut variants = FxHashMap::default();
let mut variants = FxHashMap::with_capacity_and_hasher(
enum_data.variants.len(),
Default::default(),
);
for (i, &(variant, _)) in enum_data.variants.iter().enumerate() {
let is_uninhabited =
is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db);
is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
let visibility = if is_uninhabited {
VariantVisibility::Empty
} else {
Expand Down Expand Up @@ -506,7 +499,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> {
}

fn bug(&self, fmt: fmt::Arguments<'_>) {
debug!("{}", fmt)
never!("{}", fmt)
}

fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Expand Down
Loading

0 comments on commit 90cfa80

Please sign in to comment.