From 34ff35298938f0d0ae477618b17a41d5afc00a7a Mon Sep 17 00:00:00 2001 From: SNCPlay42 Date: Tue, 21 Jul 2020 19:56:21 +0100 Subject: [PATCH 01/19] don't refer to async as 'generators' and give return of async fn a better span --- .../borrow_check/diagnostics/region_name.rs | 30 +++++++++++--- .../issue-74072-lifetime-name-annotations.rs | 29 ++++++++++++++ ...sue-74072-lifetime-name-annotations.stderr | 39 +++++++++++++++++++ 3 files changed, 92 insertions(+), 6 deletions(-) create mode 100644 src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs create mode 100644 src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr diff --git a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs index 2e5a231fef057..bf51245156833 100644 --- a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs +++ b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs @@ -657,15 +657,33 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { let (return_span, mir_description) = match tcx.hir().get(self.mir_hir_id()) { hir::Node::Expr(hir::Expr { - kind: hir::ExprKind::Closure(_, return_ty, _, span, gen_move), + kind: hir::ExprKind::Closure(_, return_ty, body_id, span, _), .. - }) => ( - match return_ty.output { + }) => { + let mut span = match return_ty.output { hir::FnRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span), hir::FnRetTy::Return(_) => return_ty.output.span(), - }, - if gen_move.is_some() { " of generator" } else { " of closure" }, - ), + }; + let mir_description = match tcx.hir().body(*body_id).generator_kind { + Some(hir::GeneratorKind::Async(gen)) => match gen { + hir::AsyncGeneratorKind::Block => " of async block", + hir::AsyncGeneratorKind::Closure => " of async closure", + hir::AsyncGeneratorKind::Fn => { + span = tcx + .hir() + .get(tcx.hir().get_parent_item(mir_hir_id)) + .fn_decl() + .expect("generator lowered from async fn should be in fn") + .output + .span(); + " of async function" + } + }, + Some(hir::GeneratorKind::Gen) => " of generator", + None => " of closure", + }; + (span, mir_description) + } hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn(method_sig, _), .. diff --git a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs new file mode 100644 index 0000000000000..f8e116333592f --- /dev/null +++ b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs @@ -0,0 +1,29 @@ +// edition:2018 +#![feature(async_closure)] +use std::future::Future; + +// test the quality of annotations giving lifetimes names (`'1`) when async constructs are involved + +pub async fn async_fn(x: &mut i32) -> &i32 { + let y = &*x; + *x += 1; //~ ERROR cannot assign to `*x` because it is borrowed + y +} + +pub fn async_closure(x: &mut i32) -> impl Future { + (async move || { + let y = &*x; + *x += 1; //~ ERROR cannot assign to `*x` because it is borrowed + y + })() +} + +pub fn async_block(x: &mut i32) -> impl Future { + async move { + let y = &*x; + *x += 1; //~ ERROR cannot assign to `*x` because it is borrowed + y + } +} + +fn main() {} diff --git a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr new file mode 100644 index 0000000000000..997e36fe22cfe --- /dev/null +++ b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr @@ -0,0 +1,39 @@ +error[E0506]: cannot assign to `*x` because it is borrowed + --> $DIR/issue-74072-lifetime-name-annotations.rs:9:5 + | +LL | pub async fn async_fn(x: &mut i32) -> &i32 { + | ---- return type of async function is &'1 i32 +LL | let y = &*x; + | --- borrow of `*x` occurs here +LL | *x += 1; + | ^^^^^^^ assignment to borrowed `*x` occurs here +LL | y + | - returning this value requires that `*x` is borrowed for `'1` + +error[E0506]: cannot assign to `*x` because it is borrowed + --> $DIR/issue-74072-lifetime-name-annotations.rs:16:9 + | +LL | let y = &*x; + | --- borrow of `*x` occurs here +LL | *x += 1; + | ^^^^^^^ assignment to borrowed `*x` occurs here +LL | y + | - returning this value requires that `*x` is borrowed for `'1` +LL | })() + | - return type of async closure is &'1 i32 + +error[E0506]: cannot assign to `*x` because it is borrowed + --> $DIR/issue-74072-lifetime-name-annotations.rs:24:9 + | +LL | let y = &*x; + | --- borrow of `*x` occurs here +LL | *x += 1; + | ^^^^^^^ assignment to borrowed `*x` occurs here +LL | y + | - returning this value requires that `*x` is borrowed for `'1` +LL | } + | - return type of async block is &'1 i32 + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0506`. From 5042dbdb2e27b141ccc4cffc34efcf06ec24d59f Mon Sep 17 00:00:00 2001 From: SNCPlay42 Date: Tue, 8 Sep 2020 03:06:09 +0100 Subject: [PATCH 02/19] add RegionNameHighlight::Occluded --- .../borrow_check/diagnostics/region_name.rs | 38 +++++++++++++------ 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs index bf51245156833..db5c14843b9fc 100644 --- a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs +++ b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs @@ -57,6 +57,10 @@ crate enum RegionNameHighlight { /// The anonymous region corresponds to a region where the type annotation is completely missing /// from the code, e.g. in a closure arguments `|x| { ... }`, where `x` is a reference. CannotMatchHirTy(Span, String), + /// The anonymous region corresponds to a region where the type annotation is completely missing + /// from the code, and *even if* we print out the full name of the type, the region name won't + /// be included. This currently occurs for opaque types like `impl Future`. + Occluded(Span, String), } impl RegionName { @@ -87,7 +91,8 @@ impl RegionName { RegionNameSource::AnonRegionFromArgument(ref highlight) => match *highlight { RegionNameHighlight::MatchedHirTy(span) | RegionNameHighlight::MatchedAdtAndSegment(span) - | RegionNameHighlight::CannotMatchHirTy(span, _) => Some(span), + | RegionNameHighlight::CannotMatchHirTy(span, _) + | RegionNameHighlight::Occluded(span, _) => Some(span), }, } } @@ -123,6 +128,15 @@ impl RegionName { ) => { diag.span_label(*span, format!("let's call this `{}`", self)); } + RegionNameSource::AnonRegionFromArgument(RegionNameHighlight::Occluded( + span, + type_name, + )) => { + diag.span_label( + *span, + format!("lifetime `{}` appears in the type {}", self, type_name), + ); + } RegionNameSource::AnonRegionFromUpvar(span, upvar_name) => { diag.span_label( *span, @@ -349,19 +363,21 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { argument_index, ); - self.get_argument_hir_ty_for_highlighting(argument_index) + let highlight = self + .get_argument_hir_ty_for_highlighting(argument_index) .and_then(|arg_hir_ty| self.highlight_if_we_can_match_hir_ty(fr, arg_ty, arg_hir_ty)) - .or_else(|| { + .unwrap_or_else(|| { // `highlight_if_we_cannot_match_hir_ty` needs to know the number we will give to // the anonymous region. If it succeeds, the `synthesize_region_name` call below // will increment the counter, "reserving" the number we just used. let counter = *self.next_region_name.try_borrow().unwrap(); self.highlight_if_we_cannot_match_hir_ty(fr, arg_ty, span, counter) - }) - .map(|highlight| RegionName { - name: self.synthesize_region_name(), - source: RegionNameSource::AnonRegionFromArgument(highlight), - }) + }); + + Some(RegionName { + name: self.synthesize_region_name(), + source: RegionNameSource::AnonRegionFromArgument(highlight), + }) } fn get_argument_hir_ty_for_highlighting( @@ -399,7 +415,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { ty: Ty<'tcx>, span: Span, counter: usize, - ) -> Option { + ) -> RegionNameHighlight { let mut highlight = RegionHighlightMode::default(); highlight.highlighting_region_vid(needle_fr, counter); let type_name = @@ -411,9 +427,9 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { ); if type_name.find(&format!("'{}", counter)).is_some() { // Only add a label if we can confirm that a region was labelled. - Some(RegionNameHighlight::CannotMatchHirTy(span, type_name)) + RegionNameHighlight::CannotMatchHirTy(span, type_name) } else { - None + RegionNameHighlight::Occluded(span, type_name) } } From 301bb123f424954cd3faee11bbbfb0605e5ae0e8 Mon Sep 17 00:00:00 2001 From: Joseph Rafael Ferrer Date: Fri, 30 Oct 2020 18:19:56 +0800 Subject: [PATCH 03/19] Enable LLVM Polly via llvm-args. --- config.toml.example | 3 +++ src/bootstrap/config.rs | 4 ++++ src/bootstrap/native.rs | 4 ++++ 3 files changed, 11 insertions(+) diff --git a/config.toml.example b/config.toml.example index 1edb390e0fef2..1dcc5f13415dc 100644 --- a/config.toml.example +++ b/config.toml.example @@ -138,6 +138,9 @@ changelog-seen = 2 # Whether or not to specify `-DLLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN=YES` #allow-old-toolchain = false +# Whether to include the Polly optimizer. +#polly = false + # ============================================================================= # General build configuration options # ============================================================================= diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 7698ff62880d6..8f6fbaebd2c5a 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -99,6 +99,7 @@ pub struct Config { pub llvm_version_suffix: Option, pub llvm_use_linker: Option, pub llvm_allow_old_toolchain: Option, + pub llvm_polly: Option, pub llvm_from_ci: bool, pub use_lld: bool, @@ -418,6 +419,7 @@ struct Llvm { use_libcxx: Option, use_linker: Option, allow_old_toolchain: Option, + polly: Option, download_ci_llvm: Option, } @@ -762,6 +764,7 @@ impl Config { set(&mut config.llvm_use_libcxx, llvm.use_libcxx); config.llvm_use_linker = llvm.use_linker.clone(); config.llvm_allow_old_toolchain = llvm.allow_old_toolchain; + config.llvm_polly = llvm.polly; config.llvm_from_ci = match llvm.download_ci_llvm { Some(StringOrBool::String(s)) => { assert!(s == "if-available", "unknown option `{}` for download-ci-llvm", s); @@ -795,6 +798,7 @@ impl Config { check_ci_llvm!(llvm.use_libcxx); check_ci_llvm!(llvm.use_linker); check_ci_llvm!(llvm.allow_old_toolchain); + check_ci_llvm!(llvm.polly); // CI-built LLVM is shared config.llvm_link_shared = true; diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 37d6fab070b8e..6dc83c7d70af6 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -257,6 +257,10 @@ impl Step for Llvm { enabled_llvm_projects.push("compiler-rt"); } + if let Some(true) = builder.config.llvm_polly { + enabled_llvm_projects.push("polly"); + } + // We want libxml to be disabled. // See https://github.com/rust-lang/rust/pull/50104 cfg.define("LLVM_ENABLE_LIBXML2", "OFF"); From 1f5c655d0c379754c2c0e5a6b8194bd2ee91e455 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Wed, 28 Oct 2020 19:50:08 +0000 Subject: [PATCH 04/19] Fix query cycle when tracing explicit_item_bounds --- compiler/rustc_middle/src/ty/print/pretty.rs | 2 +- compiler/rustc_typeck/src/collect/item_bounds.rs | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 9735099a4e1ee..3ea30fcf9f486 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -65,7 +65,7 @@ thread_local! { /// Avoids running any queries during any prints that occur /// during the closure. This may alter the appearance of some /// types (e.g. forcing verbose printing for opaque types). -/// This method is used during some queries (e.g. `predicates_of` +/// This method is used during some queries (e.g. `explicit_item_bounds` /// for opaque types), to ensure that any debug printing that /// occurs during the query computation does not end up recursively /// calling the same query. diff --git a/compiler/rustc_typeck/src/collect/item_bounds.rs b/compiler/rustc_typeck/src/collect/item_bounds.rs index 9c29ceeb593df..e596dd1a396c9 100644 --- a/compiler/rustc_typeck/src/collect/item_bounds.rs +++ b/compiler/rustc_typeck/src/collect/item_bounds.rs @@ -61,23 +61,23 @@ fn opaque_type_bounds<'tcx>( bounds: &'tcx [hir::GenericBound<'tcx>], span: Span, ) -> &'tcx [(ty::Predicate<'tcx>, Span)] { - let item_ty = - tcx.mk_opaque(opaque_def_id, InternalSubsts::identity_for_item(tcx, opaque_def_id)); + ty::print::with_no_queries(|| { + let item_ty = + tcx.mk_opaque(opaque_def_id, InternalSubsts::identity_for_item(tcx, opaque_def_id)); - let bounds = ty::print::with_no_queries(|| { - AstConv::compute_bounds( + let bounds = AstConv::compute_bounds( &ItemCtxt::new(tcx, opaque_def_id), item_ty, bounds, SizedByDefault::Yes, span, ) - }); + .predicates(tcx, item_ty); - let bounds = bounds.predicates(tcx, item_ty); - debug!("opaque_type_bounds({}) = {:?}", tcx.def_path_str(opaque_def_id), bounds); + debug!("opaque_type_bounds({}) = {:?}", tcx.def_path_str(opaque_def_id), bounds); - tcx.arena.alloc_slice(&bounds) + tcx.arena.alloc_slice(&bounds) + }) } pub(super) fn explicit_item_bounds( From 299a65ff71413de0fc26d880219402ceb7fa1bcb Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Wed, 28 Oct 2020 19:57:56 +0000 Subject: [PATCH 05/19] Update chalk 0.32.0 -> 0.35.0 --- Cargo.lock | 16 +- compiler/rustc_middle/Cargo.toml | 2 +- compiler/rustc_middle/src/traits/chalk.rs | 68 ++-- compiler/rustc_traits/Cargo.toml | 6 +- compiler/rustc_traits/src/chalk/db.rs | 76 ++--- compiler/rustc_traits/src/chalk/lowering.rs | 341 ++++++++------------ compiler/rustc_traits/src/chalk/mod.rs | 9 +- 7 files changed, 214 insertions(+), 304 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 65d20190c0db5..6d4a8cc696d7f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -460,9 +460,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.32.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d072b2ba723f0bada7c515d8b3725224bc4f5052d2a92dcbeb0b118ff37084a" +checksum = "bc6d2895e93c0939074a7a0f525fd549b49da8362dea3def555e4aab95ff64cd" dependencies = [ "proc-macro2", "quote", @@ -472,9 +472,9 @@ dependencies = [ [[package]] name = "chalk-engine" -version = "0.32.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb5475f6083d6d6c509e1c335c4f69ad04144ac090faa1afb134a53c3695841" +checksum = "93ed23c35d243ccc2caeae7ba4660a091e74b11c40e441d7849f07d8e71b5cb8" dependencies = [ "chalk-derive", "chalk-ir", @@ -485,9 +485,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.32.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60cdb0e18c5455cb6a85e8464aad3622b70476018edfa8845691df66f7e9a05" +checksum = "40d7f6140cccc889117e7372b6f9cfbc8103c86a1a0269ff6ab868f20ab414d6" dependencies = [ "chalk-derive", "lazy_static", @@ -495,9 +495,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.32.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "981534d499a8476ecc0b520be4d3864757f96211826a75360fbf2cb6fae362ab" +checksum = "fa65b636e64cbfcba31f053da97c32f3e15f2670b3cc620b84231a1656d754ec" dependencies = [ "chalk-derive", "chalk-ir", diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index 66532ea02f368..03e7f13767258 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -26,7 +26,7 @@ rustc_index = { path = "../rustc_index" } rustc_serialize = { path = "../rustc_serialize" } rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } -chalk-ir = "0.32.0" +chalk-ir = "0.35.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } measureme = "9.0.0" rustc_session = { path = "../rustc_session" } diff --git a/compiler/rustc_middle/src/traits/chalk.rs b/compiler/rustc_middle/src/traits/chalk.rs index d8507d08c1bc5..f864ad8ebcd8a 100644 --- a/compiler/rustc_middle/src/traits/chalk.rs +++ b/compiler/rustc_middle/src/traits/chalk.rs @@ -102,48 +102,6 @@ impl<'tcx> chalk_ir::interner::Interner for RustInterner<'tcx> { Some(write()) } - fn debug_application_ty( - application_ty: &chalk_ir::ApplicationTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - match application_ty.name { - chalk_ir::TypeName::Ref(mutbl) => { - let data = application_ty.substitution.interned(); - match (&**data[0].interned(), &**data[1].interned()) { - ( - chalk_ir::GenericArgData::Lifetime(lifetime), - chalk_ir::GenericArgData::Ty(ty), - ) => Some(match mutbl { - chalk_ir::Mutability::Not => write!(fmt, "(&{:?} {:?})", lifetime, ty), - chalk_ir::Mutability::Mut => write!(fmt, "(&{:?} mut {:?})", lifetime, ty), - }), - _ => unreachable!(), - } - } - chalk_ir::TypeName::Array => { - let data = application_ty.substitution.interned(); - match (&**data[0].interned(), &**data[1].interned()) { - (chalk_ir::GenericArgData::Ty(ty), chalk_ir::GenericArgData::Const(len)) => { - Some(write!(fmt, "[{:?}; {:?}]", ty, len)) - } - _ => unreachable!(), - } - } - chalk_ir::TypeName::Slice => { - let data = application_ty.substitution.interned(); - let ty = match &**data[0].interned() { - chalk_ir::GenericArgData::Ty(t) => t, - _ => unreachable!(), - }; - Some(write!(fmt, "[{:?}]", ty)) - } - _ => { - let chalk_ir::ApplicationTy { name, substitution } = application_ty; - Some(write!(fmt, "{:?}{:?}", name, chalk_ir::debug::Angle(substitution.interned()))) - } - } - } - fn debug_substitution( substitution: &chalk_ir::Substitution, fmt: &mut fmt::Formatter<'_>, @@ -174,6 +132,32 @@ impl<'tcx> chalk_ir::interner::Interner for RustInterner<'tcx> { Some(write!(fmt, "{:?}", clauses.interned())) } + fn debug_ty(ty: &chalk_ir::Ty, fmt: &mut fmt::Formatter<'_>) -> Option { + match &ty.interned().kind { + chalk_ir::TyKind::Ref(chalk_ir::Mutability::Not, lifetime, ty) => { + Some(write!(fmt, "(&{:?} {:?})", lifetime, ty)) + } + chalk_ir::TyKind::Ref(chalk_ir::Mutability::Mut, lifetime, ty) => { + Some(write!(fmt, "(&{:?} mut {:?})", lifetime, ty)) + } + chalk_ir::TyKind::Array(ty, len) => Some(write!(fmt, "[{:?}; {:?}]", ty, len)), + chalk_ir::TyKind::Slice(ty) => Some(write!(fmt, "[{:?}]", ty)), + chalk_ir::TyKind::Tuple(len, substs) => Some((|| { + write!(fmt, "(")?; + for (idx, substitution) in substs.interned().iter().enumerate() { + if idx == *len && *len != 1 { + // Don't add a trailing comma if the tuple has more than one element + write!(fmt, "{:?}", substitution)?; + } else { + write!(fmt, "{:?},", substitution)?; + } + } + write!(fmt, ")") + })()), + _ => None, + } + } + fn debug_alias( alias_ty: &chalk_ir::AliasTy, fmt: &mut fmt::Formatter<'_>, diff --git a/compiler/rustc_traits/Cargo.toml b/compiler/rustc_traits/Cargo.toml index a54fe08394ef9..4dce3c1728098 100644 --- a/compiler/rustc_traits/Cargo.toml +++ b/compiler/rustc_traits/Cargo.toml @@ -12,9 +12,9 @@ rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } -chalk-ir = "0.32.0" -chalk-solve = "0.32.0" -chalk-engine = "0.32.0" +chalk-ir = "0.35.0" +chalk-solve = "0.35.0" +chalk-engine = "0.35.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } rustc_infer = { path = "../rustc_infer" } rustc_trait_selection = { path = "../rustc_trait_selection" } diff --git a/compiler/rustc_traits/src/chalk/db.rs b/compiler/rustc_traits/src/chalk/db.rs index e5ae899a2f356..eba1a525cfeb0 100644 --- a/compiler/rustc_traits/src/chalk/db.rs +++ b/compiler/rustc_traits/src/chalk/db.rs @@ -324,19 +324,19 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t fn impl_provided_for( &self, auto_trait_id: chalk_ir::TraitId>, - app_ty: &chalk_ir::ApplicationTy>, + chalk_ty: &chalk_ir::TyKind>, ) -> bool { use chalk_ir::Scalar::*; - use chalk_ir::TypeName::*; + use chalk_ir::TyKind::*; let trait_def_id = auto_trait_id.0; let all_impls = self.interner.tcx.all_impls(trait_def_id); for impl_def_id in all_impls { let trait_ref = self.interner.tcx.impl_trait_ref(impl_def_id).unwrap(); let self_ty = trait_ref.self_ty(); - let provides = match (self_ty.kind(), app_ty.name) { - (&ty::Adt(impl_adt_def, ..), Adt(id)) => impl_adt_def.did == id.0.did, - (_, AssociatedType(_ty_id)) => { + let provides = match (self_ty.kind(), chalk_ty) { + (&ty::Adt(impl_adt_def, ..), Adt(id, ..)) => impl_adt_def.did == id.0.did, + (_, AssociatedType(_ty_id, ..)) => { // FIXME(chalk): See https://github.com/rust-lang/rust/pull/77152#discussion_r494484774 false } @@ -365,10 +365,10 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t (ast::FloatTy::F32, chalk_ir::FloatTy::F32) | (ast::FloatTy::F64, chalk_ir::FloatTy::F64) ), - (&ty::Tuple(..), Tuple(..)) => true, - (&ty::Array(..), Array) => true, - (&ty::Slice(..), Slice) => true, - (&ty::RawPtr(type_and_mut), Raw(mutability)) => { + (&ty::Tuple(substs), Tuple(len, _)) => substs.len() == *len, + (&ty::Array(..), Array(..)) => true, + (&ty::Slice(..), Slice(..)) => true, + (&ty::RawPtr(type_and_mut), Raw(mutability, _)) => { match (type_and_mut.mutbl, mutability) { (ast::Mutability::Mut, chalk_ir::Mutability::Mut) => true, (ast::Mutability::Mut, chalk_ir::Mutability::Not) => false, @@ -376,17 +376,19 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t (ast::Mutability::Not, chalk_ir::Mutability::Not) => true, } } - (&ty::Ref(.., mutability1), Ref(mutability2)) => match (mutability1, mutability2) { - (ast::Mutability::Mut, chalk_ir::Mutability::Mut) => true, - (ast::Mutability::Mut, chalk_ir::Mutability::Not) => false, - (ast::Mutability::Not, chalk_ir::Mutability::Mut) => false, - (ast::Mutability::Not, chalk_ir::Mutability::Not) => true, - }, - (&ty::Opaque(def_id, ..), OpaqueType(opaque_ty_id)) => def_id == opaque_ty_id.0, - (&ty::FnDef(def_id, ..), FnDef(fn_def_id)) => def_id == fn_def_id.0, + (&ty::Ref(.., mutability1), Ref(mutability2, ..)) => { + match (mutability1, mutability2) { + (ast::Mutability::Mut, chalk_ir::Mutability::Mut) => true, + (ast::Mutability::Mut, chalk_ir::Mutability::Not) => false, + (ast::Mutability::Not, chalk_ir::Mutability::Mut) => false, + (ast::Mutability::Not, chalk_ir::Mutability::Not) => true, + } + } + (&ty::Opaque(def_id, ..), OpaqueType(opaque_ty_id, ..)) => def_id == opaque_ty_id.0, + (&ty::FnDef(def_id, ..), FnDef(fn_def_id, ..)) => def_id == fn_def_id.0, (&ty::Str, Str) => true, (&ty::Never, Never) => true, - (&ty::Closure(def_id, ..), Closure(closure_id)) => def_id == closure_id.0, + (&ty::Closure(def_id, ..), Closure(closure_id, _)) => def_id == closure_id.0, (&ty::Foreign(def_id), Foreign(foreign_def_id)) => def_id == foreign_def_id.0, (&ty::Error(..), Error) => false, _ => false, @@ -506,17 +508,11 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t substs: &chalk_ir::Substitution>, ) -> chalk_solve::rust_ir::ClosureKind { let kind = &substs.as_slice(&self.interner)[substs.len(&self.interner) - 3]; - match kind.assert_ty_ref(&self.interner).data(&self.interner) { - chalk_ir::TyData::Apply(apply) => match apply.name { - chalk_ir::TypeName::Scalar(scalar) => match scalar { - chalk_ir::Scalar::Int(int_ty) => match int_ty { - chalk_ir::IntTy::I8 => chalk_solve::rust_ir::ClosureKind::Fn, - chalk_ir::IntTy::I16 => chalk_solve::rust_ir::ClosureKind::FnMut, - chalk_ir::IntTy::I32 => chalk_solve::rust_ir::ClosureKind::FnOnce, - _ => bug!("bad closure kind"), - }, - _ => bug!("bad closure kind"), - }, + match kind.assert_ty_ref(&self.interner).kind(&self.interner) { + chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Int(int_ty)) => match int_ty { + chalk_ir::IntTy::I8 => chalk_solve::rust_ir::ClosureKind::Fn, + chalk_ir::IntTy::I16 => chalk_solve::rust_ir::ClosureKind::FnMut, + chalk_ir::IntTy::I32 => chalk_solve::rust_ir::ClosureKind::FnOnce, _ => bug!("bad closure kind"), }, _ => bug!("bad closure kind"), @@ -530,23 +526,19 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t ) -> chalk_ir::Binders>> { let sig = &substs.as_slice(&self.interner)[substs.len(&self.interner) - 2]; - match sig.assert_ty_ref(&self.interner).data(&self.interner) { - chalk_ir::TyData::Function(f) => { + match sig.assert_ty_ref(&self.interner).kind(&self.interner) { + chalk_ir::TyKind::Function(f) => { let substitution = f.substitution.as_slice(&self.interner); let return_type = substitution.last().unwrap().assert_ty_ref(&self.interner).clone(); // Closure arguments are tupled let argument_tuple = substitution[0].assert_ty_ref(&self.interner); - let argument_types = match argument_tuple.data(&self.interner) { - chalk_ir::TyData::Apply(apply) => match apply.name { - chalk_ir::TypeName::Tuple(_) => apply - .substitution - .iter(&self.interner) - .map(|arg| arg.assert_ty_ref(&self.interner)) - .cloned() - .collect(), - _ => bug!("Expecting closure FnSig args to be tupled."), - }, + let argument_types = match argument_tuple.kind(&self.interner) { + chalk_ir::TyKind::Tuple(_len, substitution) => substitution + .iter(&self.interner) + .map(|arg| arg.assert_ty_ref(&self.interner)) + .cloned() + .collect(), _ => bug!("Expecting closure FnSig args to be tupled."), }; @@ -637,7 +629,7 @@ fn binders_for<'tcx>( bound_vars.iter().map(|arg| match arg.unpack() { ty::subst::GenericArgKind::Lifetime(_re) => chalk_ir::VariableKind::Lifetime, ty::subst::GenericArgKind::Type(_ty) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General) + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) } ty::subst::GenericArgKind::Const(c) => { chalk_ir::VariableKind::Const(c.ty.lower_into(interner)) diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 5ca0fc0c88b54..3fcbfd6187c8f 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -35,7 +35,7 @@ use rustc_middle::traits::{ChalkEnvironmentAndGoal, ChalkRustInterner as RustInt use rustc_middle::ty::fold::TypeFolder; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef}; use rustc_middle::ty::{ - self, Binder, BoundRegion, Region, RegionKind, Ty, TyCtxt, TyKind, TypeFoldable, TypeVisitor, + self, Binder, BoundRegion, Region, RegionKind, Ty, TyCtxt, TypeFoldable, TypeVisitor, }; use rustc_span::def_id::DefId; @@ -239,24 +239,16 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::AliasEq>> impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { fn lower_into(self, interner: &RustInterner<'tcx>) -> chalk_ir::Ty> { - use chalk_ir::TyData; use rustc_ast as ast; - use TyKind::*; - let empty = || chalk_ir::Substitution::empty(interner); - let struct_ty = - |def_id| chalk_ir::TypeName::Adt(chalk_ir::AdtId(interner.tcx.adt_def(def_id))); - let apply = |name, substitution| { - TyData::Apply(chalk_ir::ApplicationTy { name, substitution }).intern(interner) - }; - let int = |i| apply(chalk_ir::TypeName::Scalar(chalk_ir::Scalar::Int(i)), empty()); - let uint = |i| apply(chalk_ir::TypeName::Scalar(chalk_ir::Scalar::Uint(i)), empty()); - let float = |f| apply(chalk_ir::TypeName::Scalar(chalk_ir::Scalar::Float(f)), empty()); + let int = |i| chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Int(i)); + let uint = |i| chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(i)); + let float = |f| chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Float(f)); match *self.kind() { - Bool => apply(chalk_ir::TypeName::Scalar(chalk_ir::Scalar::Bool), empty()), - Char => apply(chalk_ir::TypeName::Scalar(chalk_ir::Scalar::Char), empty()), - Int(ty) => match ty { + ty::Bool => chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Bool), + ty::Char => chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Char), + ty::Int(ty) => match ty { ast::IntTy::Isize => int(chalk_ir::IntTy::Isize), ast::IntTy::I8 => int(chalk_ir::IntTy::I8), ast::IntTy::I16 => int(chalk_ir::IntTy::I16), @@ -264,7 +256,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { ast::IntTy::I64 => int(chalk_ir::IntTy::I64), ast::IntTy::I128 => int(chalk_ir::IntTy::I128), }, - Uint(ty) => match ty { + ty::Uint(ty) => match ty { ast::UintTy::Usize => uint(chalk_ir::UintTy::Usize), ast::UintTy::U8 => uint(chalk_ir::UintTy::U8), ast::UintTy::U16 => uint(chalk_ir::UintTy::U16), @@ -272,80 +264,47 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { ast::UintTy::U64 => uint(chalk_ir::UintTy::U64), ast::UintTy::U128 => uint(chalk_ir::UintTy::U128), }, - Float(ty) => match ty { + ty::Float(ty) => match ty { ast::FloatTy::F32 => float(chalk_ir::FloatTy::F32), ast::FloatTy::F64 => float(chalk_ir::FloatTy::F64), }, - Adt(def, substs) => apply(struct_ty(def.did), substs.lower_into(interner)), - Foreign(def_id) => apply(chalk_ir::TypeName::Foreign(ForeignDefId(def_id)), empty()), - Str => apply(chalk_ir::TypeName::Str, empty()), - Array(ty, len) => { - let value = match len.val { - ty::ConstKind::Value(val) => { - chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { interned: val }) - } - ty::ConstKind::Bound(db, bound) => { - chalk_ir::ConstValue::BoundVar(chalk_ir::BoundVar::new( - chalk_ir::DebruijnIndex::new(db.as_u32()), - bound.index(), - )) - } - _ => unimplemented!("Const not implemented. {:?}", len.val), - }; - apply( - chalk_ir::TypeName::Array, - chalk_ir::Substitution::from_iter( - interner, - &[ - chalk_ir::GenericArgData::Ty(ty.lower_into(interner)).intern(interner), - chalk_ir::GenericArgData::Const( - chalk_ir::ConstData { ty: len.ty.lower_into(interner), value } - .intern(interner), - ) - .intern(interner), - ], - ), - ) + ty::Adt(def, substs) => { + chalk_ir::TyKind::Adt(chalk_ir::AdtId(def), substs.lower_into(interner)) } - Slice(ty) => apply( - chalk_ir::TypeName::Slice, - chalk_ir::Substitution::from1( - interner, - chalk_ir::GenericArgData::Ty(ty.lower_into(interner)).intern(interner), - ), - ), - RawPtr(ptr) => { - let name = match ptr.mutbl { - ast::Mutability::Mut => chalk_ir::TypeName::Raw(chalk_ir::Mutability::Mut), - ast::Mutability::Not => chalk_ir::TypeName::Raw(chalk_ir::Mutability::Not), - }; - apply(name, chalk_ir::Substitution::from1(interner, ptr.ty.lower_into(interner))) + ty::Foreign(def_id) => chalk_ir::TyKind::Foreign(ForeignDefId(def_id)), + ty::Str => chalk_ir::TyKind::Str, + ty::Array(ty, len) => { + chalk_ir::TyKind::Array(ty.lower_into(interner), len.lower_into(interner)) } - Ref(region, ty, mutability) => { - let name = match mutability { - ast::Mutability::Mut => chalk_ir::TypeName::Ref(chalk_ir::Mutability::Mut), - ast::Mutability::Not => chalk_ir::TypeName::Ref(chalk_ir::Mutability::Not), - }; - apply( - name, - chalk_ir::Substitution::from_iter( - interner, - &[ - chalk_ir::GenericArgData::Lifetime(region.lower_into(interner)) - .intern(interner), - chalk_ir::GenericArgData::Ty(ty.lower_into(interner)).intern(interner), - ], - ), - ) + ty::Slice(ty) => chalk_ir::TyKind::Slice(ty.lower_into(interner)), + + ty::RawPtr(ptr) => match ptr.mutbl { + ast::Mutability::Mut => { + chalk_ir::TyKind::Raw(chalk_ir::Mutability::Mut, ptr.ty.lower_into(interner)) + } + ast::Mutability::Not => { + chalk_ir::TyKind::Raw(chalk_ir::Mutability::Not, ptr.ty.lower_into(interner)) + } + }, + ty::Ref(region, ty, mutability) => match mutability { + ast::Mutability::Mut => chalk_ir::TyKind::Ref( + chalk_ir::Mutability::Mut, + region.lower_into(interner), + ty.lower_into(interner), + ), + ast::Mutability::Not => chalk_ir::TyKind::Ref( + chalk_ir::Mutability::Not, + region.lower_into(interner), + ty.lower_into(interner), + ), + }, + ty::FnDef(def_id, substs) => { + chalk_ir::TyKind::FnDef(chalk_ir::FnDefId(def_id), substs.lower_into(interner)) } - FnDef(def_id, substs) => apply( - chalk_ir::TypeName::FnDef(chalk_ir::FnDefId(def_id)), - substs.lower_into(interner), - ), - FnPtr(sig) => { + ty::FnPtr(sig) => { let (inputs_and_outputs, binders, _named_regions) = collect_bound_vars(interner, interner.tcx, &sig.inputs_and_output()); - TyData::Function(chalk_ir::FnPointer { + chalk_ir::TyKind::Function(chalk_ir::FnPointer { num_binders: binders.len(interner), sig: sig.lower_into(interner), substitution: chalk_ir::Substitution::from_iter( @@ -355,148 +314,122 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { }), ), }) - .intern(interner) } - Dynamic(predicates, region) => TyData::Dyn(chalk_ir::DynTy { + ty::Dynamic(predicates, region) => chalk_ir::TyKind::Dyn(chalk_ir::DynTy { bounds: predicates.lower_into(interner), lifetime: region.lower_into(interner), - }) - .intern(interner), - Closure(def_id, substs) => apply( - chalk_ir::TypeName::Closure(chalk_ir::ClosureId(def_id)), - substs.lower_into(interner), - ), - Generator(_def_id, _substs, _) => unimplemented!(), - GeneratorWitness(_) => unimplemented!(), - Never => apply(chalk_ir::TypeName::Never, empty()), - Tuple(substs) => { - apply(chalk_ir::TypeName::Tuple(substs.len()), substs.lower_into(interner)) + }), + ty::Closure(def_id, substs) => { + chalk_ir::TyKind::Closure(chalk_ir::ClosureId(def_id), substs.lower_into(interner)) } - Projection(proj) => TyData::Alias(proj.lower_into(interner)).intern(interner), - Opaque(def_id, substs) => { - TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { + ty::Generator(_def_id, _substs, _) => unimplemented!(), + ty::GeneratorWitness(_) => unimplemented!(), + ty::Never => chalk_ir::TyKind::Never, + ty::Tuple(substs) => chalk_ir::TyKind::Tuple(substs.len(), substs.lower_into(interner)), + ty::Projection(proj) => chalk_ir::TyKind::Alias(proj.lower_into(interner)), + ty::Opaque(def_id, substs) => { + chalk_ir::TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { opaque_ty_id: chalk_ir::OpaqueTyId(def_id), substitution: substs.lower_into(interner), })) - .intern(interner) } // This should have been done eagerly prior to this, and all Params // should have been substituted to placeholders - Param(_) => panic!("Lowering Param when not expected."), - Bound(db, bound) => TyData::BoundVar(chalk_ir::BoundVar::new( + ty::Param(_) => panic!("Lowering Param when not expected."), + ty::Bound(db, bound) => chalk_ir::TyKind::BoundVar(chalk_ir::BoundVar::new( chalk_ir::DebruijnIndex::new(db.as_u32()), bound.var.index(), - )) - .intern(interner), - Placeholder(_placeholder) => TyData::Placeholder(chalk_ir::PlaceholderIndex { - ui: chalk_ir::UniverseIndex { counter: _placeholder.universe.as_usize() }, - idx: _placeholder.name.as_usize(), - }) - .intern(interner), - Infer(_infer) => unimplemented!(), - Error(_) => apply(chalk_ir::TypeName::Error, empty()), + )), + ty::Placeholder(_placeholder) => { + chalk_ir::TyKind::Placeholder(chalk_ir::PlaceholderIndex { + ui: chalk_ir::UniverseIndex { counter: _placeholder.universe.as_usize() }, + idx: _placeholder.name.as_usize(), + }) + } + ty::Infer(_infer) => unimplemented!(), + ty::Error(_) => chalk_ir::TyKind::Error, } + .intern(interner) } } impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty> { fn lower_into(self, interner: &RustInterner<'tcx>) -> Ty<'tcx> { - use chalk_ir::TyData; + use chalk_ir::TyKind; use rustc_ast::ast; - let kind = match self.data(interner) { - TyData::Apply(application_ty) => match application_ty.name { - chalk_ir::TypeName::Adt(struct_id) => { - ty::Adt(struct_id.0, application_ty.substitution.lower_into(interner)) - } - chalk_ir::TypeName::Scalar(scalar) => match scalar { - chalk_ir::Scalar::Bool => ty::Bool, - chalk_ir::Scalar::Char => ty::Char, - chalk_ir::Scalar::Int(int_ty) => match int_ty { - chalk_ir::IntTy::Isize => ty::Int(ast::IntTy::Isize), - chalk_ir::IntTy::I8 => ty::Int(ast::IntTy::I8), - chalk_ir::IntTy::I16 => ty::Int(ast::IntTy::I16), - chalk_ir::IntTy::I32 => ty::Int(ast::IntTy::I32), - chalk_ir::IntTy::I64 => ty::Int(ast::IntTy::I64), - chalk_ir::IntTy::I128 => ty::Int(ast::IntTy::I128), - }, - chalk_ir::Scalar::Uint(int_ty) => match int_ty { - chalk_ir::UintTy::Usize => ty::Uint(ast::UintTy::Usize), - chalk_ir::UintTy::U8 => ty::Uint(ast::UintTy::U8), - chalk_ir::UintTy::U16 => ty::Uint(ast::UintTy::U16), - chalk_ir::UintTy::U32 => ty::Uint(ast::UintTy::U32), - chalk_ir::UintTy::U64 => ty::Uint(ast::UintTy::U64), - chalk_ir::UintTy::U128 => ty::Uint(ast::UintTy::U128), - }, - chalk_ir::Scalar::Float(float_ty) => match float_ty { - chalk_ir::FloatTy::F32 => ty::Float(ast::FloatTy::F32), - chalk_ir::FloatTy::F64 => ty::Float(ast::FloatTy::F64), - }, + let kind = match self.kind(interner) { + TyKind::Adt(struct_id, substitution) => { + ty::Adt(struct_id.0, substitution.lower_into(interner)) + } + TyKind::Scalar(scalar) => match scalar { + chalk_ir::Scalar::Bool => ty::Bool, + chalk_ir::Scalar::Char => ty::Char, + chalk_ir::Scalar::Int(int_ty) => match int_ty { + chalk_ir::IntTy::Isize => ty::Int(ast::IntTy::Isize), + chalk_ir::IntTy::I8 => ty::Int(ast::IntTy::I8), + chalk_ir::IntTy::I16 => ty::Int(ast::IntTy::I16), + chalk_ir::IntTy::I32 => ty::Int(ast::IntTy::I32), + chalk_ir::IntTy::I64 => ty::Int(ast::IntTy::I64), + chalk_ir::IntTy::I128 => ty::Int(ast::IntTy::I128), + }, + chalk_ir::Scalar::Uint(int_ty) => match int_ty { + chalk_ir::UintTy::Usize => ty::Uint(ast::UintTy::Usize), + chalk_ir::UintTy::U8 => ty::Uint(ast::UintTy::U8), + chalk_ir::UintTy::U16 => ty::Uint(ast::UintTy::U16), + chalk_ir::UintTy::U32 => ty::Uint(ast::UintTy::U32), + chalk_ir::UintTy::U64 => ty::Uint(ast::UintTy::U64), + chalk_ir::UintTy::U128 => ty::Uint(ast::UintTy::U128), + }, + chalk_ir::Scalar::Float(float_ty) => match float_ty { + chalk_ir::FloatTy::F32 => ty::Float(ast::FloatTy::F32), + chalk_ir::FloatTy::F64 => ty::Float(ast::FloatTy::F64), }, - chalk_ir::TypeName::Array => { - let substs = application_ty.substitution.as_slice(interner); - let ty = substs[0].assert_ty_ref(interner).lower_into(interner); - let c = substs[1].assert_const_ref(interner).lower_into(interner); - ty::Array(ty, interner.tcx.mk_const(c)) - } - chalk_ir::TypeName::FnDef(id) => { - ty::FnDef(id.0, application_ty.substitution.lower_into(interner)) - } - chalk_ir::TypeName::Closure(closure) => { - ty::Closure(closure.0, application_ty.substitution.lower_into(interner)) - } - chalk_ir::TypeName::Generator(_) => unimplemented!(), - chalk_ir::TypeName::GeneratorWitness(_) => unimplemented!(), - chalk_ir::TypeName::Never => ty::Never, - chalk_ir::TypeName::Tuple(_size) => { - ty::Tuple(application_ty.substitution.lower_into(interner)) - } - chalk_ir::TypeName::Slice => ty::Slice( - application_ty.substitution.as_slice(interner)[0] - .ty(interner) - .unwrap() - .lower_into(interner), - ), - chalk_ir::TypeName::Raw(mutbl) => ty::RawPtr(ty::TypeAndMut { - ty: application_ty.substitution.as_slice(interner)[0] - .ty(interner) - .unwrap() - .lower_into(interner), - mutbl: match mutbl { - chalk_ir::Mutability::Mut => ast::Mutability::Mut, - chalk_ir::Mutability::Not => ast::Mutability::Not, - }, - }), - chalk_ir::TypeName::Ref(mutbl) => ty::Ref( - application_ty.substitution.as_slice(interner)[0] - .lifetime(interner) - .unwrap() - .lower_into(interner), - application_ty.substitution.as_slice(interner)[1] - .ty(interner) - .unwrap() - .lower_into(interner), - match mutbl { - chalk_ir::Mutability::Mut => ast::Mutability::Mut, - chalk_ir::Mutability::Not => ast::Mutability::Not, - }, - ), - chalk_ir::TypeName::Str => ty::Str, - chalk_ir::TypeName::OpaqueType(opaque_ty) => { - ty::Opaque(opaque_ty.0, application_ty.substitution.lower_into(interner)) - } - chalk_ir::TypeName::AssociatedType(assoc_ty) => ty::Projection(ty::ProjectionTy { - substs: application_ty.substitution.lower_into(interner), - item_def_id: assoc_ty.0, - }), - chalk_ir::TypeName::Foreign(def_id) => ty::Foreign(def_id.0), - chalk_ir::TypeName::Error => unimplemented!(), }, - TyData::Placeholder(placeholder) => ty::Placeholder(ty::Placeholder { + TyKind::Array(ty, c) => { + let ty = ty.lower_into(interner); + let c = c.lower_into(interner); + ty::Array(ty, interner.tcx.mk_const(c)) + } + TyKind::FnDef(id, substitution) => ty::FnDef(id.0, substitution.lower_into(interner)), + TyKind::Closure(closure, substitution) => { + ty::Closure(closure.0, substitution.lower_into(interner)) + } + TyKind::Generator(..) => unimplemented!(), + TyKind::GeneratorWitness(..) => unimplemented!(), + TyKind::Never => ty::Never, + TyKind::Tuple(_len, substitution) => ty::Tuple(substitution.lower_into(interner)), + TyKind::Slice(ty) => ty::Slice(ty.lower_into(interner)), + TyKind::Raw(mutbl, ty) => ty::RawPtr(ty::TypeAndMut { + ty: ty.lower_into(interner), + mutbl: match mutbl { + chalk_ir::Mutability::Mut => ast::Mutability::Mut, + chalk_ir::Mutability::Not => ast::Mutability::Not, + }, + }), + TyKind::Ref(mutbl, lifetime, ty) => ty::Ref( + lifetime.lower_into(interner), + ty.lower_into(interner), + match mutbl { + chalk_ir::Mutability::Mut => ast::Mutability::Mut, + chalk_ir::Mutability::Not => ast::Mutability::Not, + }, + ), + TyKind::Str => ty::Str, + TyKind::OpaqueType(opaque_ty, substitution) => { + ty::Opaque(opaque_ty.0, substitution.lower_into(interner)) + } + TyKind::AssociatedType(assoc_ty, substitution) => ty::Projection(ty::ProjectionTy { + substs: substitution.lower_into(interner), + item_def_id: assoc_ty.0, + }), + TyKind::Foreign(def_id) => ty::Foreign(def_id.0), + TyKind::Error => return interner.tcx.ty_error(), + TyKind::Placeholder(placeholder) => ty::Placeholder(ty::Placeholder { universe: ty::UniverseIndex::from_usize(placeholder.ui.counter), name: ty::BoundVar::from_usize(placeholder.idx), }), - chalk_ir::TyData::Alias(alias_ty) => match alias_ty { + TyKind::Alias(alias_ty) => match alias_ty { chalk_ir::AliasTy::Projection(projection) => ty::Projection(ty::ProjectionTy { item_def_id: projection.associated_ty_id.0, substs: projection.substitution.lower_into(interner), @@ -505,16 +438,16 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty> { ty::Opaque(opaque.opaque_ty_id.0, opaque.substitution.lower_into(interner)) } }, - TyData::Function(_quantified_ty) => unimplemented!(), - TyData::BoundVar(_bound) => ty::Bound( + TyKind::Function(_quantified_ty) => unimplemented!(), + TyKind::BoundVar(_bound) => ty::Bound( ty::DebruijnIndex::from_usize(_bound.debruijn.depth() as usize), ty::BoundTy { var: ty::BoundVar::from_usize(_bound.index), kind: ty::BoundTyKind::Anon, }, ), - TyData::InferenceVar(_, _) => unimplemented!(), - TyData::Dyn(_) => unimplemented!(), + TyKind::InferenceVar(_, _) => unimplemented!(), + TyKind::Dyn(_) => unimplemented!(), }; interner.tcx.mk_ty(kind) } @@ -909,7 +842,7 @@ impl<'tcx> TypeVisitor<'tcx> for BoundVarsCollector<'tcx> { ty::Bound(debruijn, bound_ty) if debruijn == self.binder_index => { match self.parameters.entry(bound_ty.var.as_u32()) { Entry::Vacant(entry) => { - entry.insert(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)); + entry.insert(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)); } Entry::Occupied(entry) => match entry.get() { chalk_ir::VariableKind::Ty(_) => {} diff --git a/compiler/rustc_traits/src/chalk/mod.rs b/compiler/rustc_traits/src/chalk/mod.rs index f174a92274ed6..b117e28875e76 100644 --- a/compiler/rustc_traits/src/chalk/mod.rs +++ b/compiler/rustc_traits/src/chalk/mod.rs @@ -69,15 +69,15 @@ crate fn evaluate_goal<'tcx>( CanonicalVarKind::PlaceholderRegion(_ui) => unimplemented!(), CanonicalVarKind::Ty(ty) => match ty { CanonicalTyVarKind::General(ui) => chalk_ir::WithKind::new( - chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General), + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), chalk_ir::UniverseIndex { counter: ui.index() }, ), CanonicalTyVarKind::Int => chalk_ir::WithKind::new( - chalk_ir::VariableKind::Ty(chalk_ir::TyKind::Integer), + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::Integer), chalk_ir::UniverseIndex::root(), ), CanonicalTyVarKind::Float => chalk_ir::WithKind::new( - chalk_ir::VariableKind::Ty(chalk_ir::TyKind::Float), + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::Float), chalk_ir::UniverseIndex::root(), ), }, @@ -97,7 +97,8 @@ crate fn evaluate_goal<'tcx>( use chalk_solve::Solver; let mut solver = chalk_engine::solve::SLGSolver::new(32, None); let db = ChalkRustIrDatabase { interner, reempty_placeholder }; - let solution = chalk_solve::logging::with_tracing_logs(|| solver.solve(&db, &lowered_goal)); + let solution = solver.solve(&db, &lowered_goal); + debug!(?obligation, ?solution, "evaluatate goal"); // Ideally, the code to convert *back* to rustc types would live close to // the code to convert *from* rustc types. Right now though, we don't From acb6a061237053971bc75ca6477b39a8cbf13822 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Wed, 28 Oct 2020 20:04:39 +0000 Subject: [PATCH 06/19] Fix various Chalk lowering bugs - Add more well-known traits - Use the correct binders when lowering trait objects - Use correct substs when lowering trait objects - Use the correct binders for opaque_ty_data - Lower negative impls with the correct polarity - Supply associated type values - Use `predicates_defined_on` for where clauses --- compiler/rustc_traits/src/chalk/db.rs | 130 ++++++++++++++------ compiler/rustc_traits/src/chalk/lowering.rs | 40 +++++- src/test/ui/chalkify/arithmetic.rs | 20 +++ src/test/ui/chalkify/trait-objects.rs | 13 ++ 4 files changed, 161 insertions(+), 42 deletions(-) create mode 100644 src/test/ui/chalkify/arithmetic.rs create mode 100644 src/test/ui/chalkify/trait-objects.rs diff --git a/compiler/rustc_traits/src/chalk/db.rs b/compiler/rustc_traits/src/chalk/db.rs index eba1a525cfeb0..c5a46b1003dfa 100644 --- a/compiler/rustc_traits/src/chalk/db.rs +++ b/compiler/rustc_traits/src/chalk/db.rs @@ -37,7 +37,7 @@ impl<'tcx> RustIrDatabase<'tcx> { def_id: DefId, bound_vars: SubstsRef<'tcx>, ) -> Vec>> { - let predicates = self.interner.tcx.predicates_of(def_id).predicates; + let predicates = self.interner.tcx.predicates_defined_on(def_id).predicates; let mut regions_substitutor = lowering::RegionsSubstitutor::new(self.interner.tcx, self.reempty_placeholder); predicates @@ -118,34 +118,27 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t .map(|i| chalk_ir::AssocTypeId(i.def_id)) .collect(); - let well_known = if self.interner.tcx.lang_items().sized_trait() == Some(def_id) { + let lang_items = self.interner.tcx.lang_items(); + let well_known = if lang_items.sized_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::Sized) - } else if self.interner.tcx.lang_items().copy_trait() == Some(def_id) { + } else if lang_items.copy_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::Copy) - } else if self.interner.tcx.lang_items().clone_trait() == Some(def_id) { + } else if lang_items.clone_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::Clone) - } else if self.interner.tcx.lang_items().drop_trait() == Some(def_id) { + } else if lang_items.drop_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::Drop) - } else if self.interner.tcx.lang_items().fn_trait() == Some(def_id) { + } else if lang_items.fn_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::Fn) - } else if self - .interner - .tcx - .lang_items() - .fn_once_trait() - .map(|t| def_id == t) - .unwrap_or(false) - { + } else if lang_items.fn_once_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::FnOnce) - } else if self - .interner - .tcx - .lang_items() - .fn_mut_trait() - .map(|t| def_id == t) - .unwrap_or(false) - { + } else if lang_items.fn_mut_trait() == Some(def_id) { Some(chalk_solve::rust_ir::WellKnownTrait::FnMut) + } else if lang_items.unsize_trait() == Some(def_id) { + Some(chalk_solve::rust_ir::WellKnownTrait::Unsize) + } else if lang_items.unpin_trait() == Some(def_id) { + Some(chalk_solve::rust_ir::WellKnownTrait::Unpin) + } else if lang_items.coerce_unsized_trait() == Some(def_id) { + Some(chalk_solve::rust_ir::WellKnownTrait::CoerceUnsized) } else { None }; @@ -281,11 +274,20 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t where_clauses, }; + let associated_ty_value_ids: Vec<_> = self + .interner + .tcx + .associated_items(def_id) + .in_definition_order() + .filter(|i| i.kind == AssocKind::Type) + .map(|i| chalk_solve::rust_ir::AssociatedTyValueId(i.def_id)) + .collect(); + Arc::new(chalk_solve::rust_ir::ImplDatum { - polarity: chalk_solve::rust_ir::Polarity::Positive, + polarity: self.interner.tcx.impl_polarity(def_id).lower_into(&self.interner), binders: chalk_ir::Binders::new(binders, value), impl_type: chalk_solve::rust_ir::ImplType::Local, - associated_ty_value_ids: vec![], + associated_ty_value_ids, }) } @@ -406,24 +408,38 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t ) -> Arc>> { let def_id = associated_ty_id.0; let assoc_item = self.interner.tcx.associated_item(def_id); - let impl_id = match assoc_item.container { - AssocItemContainer::TraitContainer(def_id) => def_id, - _ => unimplemented!("Not possible??"), + let (impl_id, trait_id) = match assoc_item.container { + AssocItemContainer::TraitContainer(def_id) => (def_id, def_id), + AssocItemContainer::ImplContainer(def_id) => { + (def_id, self.interner.tcx.impl_trait_ref(def_id).unwrap().def_id) + } }; match assoc_item.kind { AssocKind::Type => {} _ => unimplemented!("Not possible??"), } + + let trait_item = self + .interner + .tcx + .associated_items(trait_id) + .find_by_name_and_kind(self.interner.tcx, assoc_item.ident, assoc_item.kind, trait_id) + .unwrap(); let bound_vars = bound_vars_for_item(self.interner.tcx, def_id); let binders = binders_for(&self.interner, bound_vars); - let ty = self.interner.tcx.type_of(def_id); + let ty = self + .interner + .tcx + .type_of(def_id) + .subst(self.interner.tcx, bound_vars) + .lower_into(&self.interner); Arc::new(chalk_solve::rust_ir::AssociatedTyValue { impl_id: chalk_ir::ImplId(impl_id), - associated_ty_id: chalk_ir::AssocTypeId(def_id), + associated_ty_id: chalk_ir::AssocTypeId(trait_item.def_id), value: chalk_ir::Binders::new( binders, - chalk_solve::rust_ir::AssociatedTyValueBound { ty: ty.lower_into(&self.interner) }, + chalk_solve::rust_ir::AssociatedTyValueBound { ty }, ), }) } @@ -443,19 +459,61 @@ impl<'tcx> chalk_solve::RustIrDatabase> for RustIrDatabase<'t &self, opaque_ty_id: chalk_ir::OpaqueTyId>, ) -> Arc>> { - let bound_vars = bound_vars_for_item(self.interner.tcx, opaque_ty_id.0); - let binders = binders_for(&self.interner, bound_vars); + let bound_vars = ty::fold::shift_vars( + self.interner.tcx, + &bound_vars_for_item(self.interner.tcx, opaque_ty_id.0), + 1, + ); let where_clauses = self.where_clauses_for(opaque_ty_id.0, bound_vars); - let bounds = self.bounds_for(opaque_ty_id.0, bound_vars); + + let identity_substs = InternalSubsts::identity_for_item(self.interner.tcx, opaque_ty_id.0); + + let bounds = + self.interner + .tcx + .explicit_item_bounds(opaque_ty_id.0) + .iter() + .map(|(bound, _)| bound.subst(self.interner.tcx, &bound_vars)) + .map(|bound| { + bound.fold_with(&mut ty::fold::BottomUpFolder { + tcx: self.interner.tcx, + ty_op: |ty| { + if let ty::Opaque(def_id, substs) = *ty.kind() { + if def_id == opaque_ty_id.0 && substs == identity_substs { + return self.interner.tcx.mk_ty(ty::Bound( + ty::INNERMOST, + ty::BoundTy::from(ty::BoundVar::from_u32(0)), + )); + } + } + ty + }, + lt_op: |lt| lt, + ct_op: |ct| ct, + }) + }) + .filter_map(|bound| { + LowerInto::< + Option>> + >::lower_into(bound, &self.interner) + }) + .collect(); + + // Binder for the bound variable representing the concrete impl Trait type. + let existential_binder = chalk_ir::VariableKinds::from1( + &self.interner, + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), + ); let value = chalk_solve::rust_ir::OpaqueTyDatumBound { - bounds: chalk_ir::Binders::new(binders.clone(), bounds), - where_clauses: chalk_ir::Binders::new(binders, where_clauses), + bounds: chalk_ir::Binders::new(existential_binder.clone(), bounds), + where_clauses: chalk_ir::Binders::new(existential_binder, where_clauses), }; + let binders = binders_for(&self.interner, bound_vars); Arc::new(chalk_solve::rust_ir::OpaqueTyDatum { opaque_ty_id, - bound: chalk_ir::Binders::empty(&self.interner, value), + bound: chalk_ir::Binders::new(binders, value), }) } diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 3fcbfd6187c8f..81e7fa4aa3082 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -638,8 +638,16 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Binders, ) -> chalk_ir::Binders>> { + // `Self` has one binder: + // Binder<&'tcx ty::List>> + // The return type has two: + // Binders<&[Binders>]> + // This means that any variables that are escaping `self` need to be + // shifted in by one so that they are still escaping. + let shifted_predicates = ty::fold::shift_vars(interner.tcx, &self, 1); + let (predicates, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &self); + collect_bound_vars(interner, interner.tcx, &shifted_predicates); let self_ty = interner.tcx.mk_ty(ty::Bound( // This is going to be wrapped in a binder ty::DebruijnIndex::from_usize(1), @@ -648,7 +656,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Binders { chalk_ir::Binders::new( - chalk_ir::VariableKinds::empty(interner), + binders.clone(), chalk_ir::WhereClause::Implemented(chalk_ir::TraitRef { trait_id: chalk_ir::TraitId(def_id), substitution: interner @@ -659,25 +667,34 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Binders chalk_ir::Binders::new( - chalk_ir::VariableKinds::empty(interner), + binders.clone(), chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias: chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { associated_ty_id: chalk_ir::AssocTypeId(predicate.item_def_id), - substitution: predicate.substs.lower_into(interner), + substitution: interner + .tcx + .mk_substs_trait(self_ty, predicate.substs) + .lower_into(interner), }), ty: predicate.ty.lower_into(interner), }), ), ty::ExistentialPredicate::AutoTrait(def_id) => chalk_ir::Binders::new( - chalk_ir::VariableKinds::empty(interner), + binders.clone(), chalk_ir::WhereClause::Implemented(chalk_ir::TraitRef { trait_id: chalk_ir::TraitId(def_id), substitution: interner.tcx.mk_substs_trait(self_ty, &[]).lower_into(interner), }), ), }); + + // Binder for the bound variable representing the concrete underlying type. + let existential_binder = chalk_ir::VariableKinds::from1( + interner, + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), + ); let value = chalk_ir::QuantifiedWhereClauses::from_iter(interner, where_clauses); - chalk_ir::Binders::new(binders, value) + chalk_ir::Binders::new(existential_binder, value) } } @@ -750,6 +767,17 @@ impl<'tcx> LowerInto<'tcx, chalk_solve::rust_ir::TraitBound>> } } +impl<'tcx> LowerInto<'tcx, chalk_solve::rust_ir::Polarity> for ty::ImplPolarity { + fn lower_into(self, _interner: &RustInterner<'tcx>) -> chalk_solve::rust_ir::Polarity { + match self { + ty::ImplPolarity::Positive => chalk_solve::rust_ir::Polarity::Positive, + ty::ImplPolarity::Negative => chalk_solve::rust_ir::Polarity::Negative, + // FIXME(chalk) reservation impls + ty::ImplPolarity::Reservation => chalk_solve::rust_ir::Polarity::Negative, + } + } +} + impl<'tcx> LowerInto<'tcx, chalk_solve::rust_ir::AliasEqBound>> for ty::ProjectionPredicate<'tcx> { diff --git a/src/test/ui/chalkify/arithmetic.rs b/src/test/ui/chalkify/arithmetic.rs new file mode 100644 index 0000000000000..a20acce4c76b2 --- /dev/null +++ b/src/test/ui/chalkify/arithmetic.rs @@ -0,0 +1,20 @@ +// check-pass +// compile-flags: -Z chalk + +fn main() { + 1 + 2; + 3 * 6; + 2 - 5; + 17 / 6; + 23 % 11; + 4 & 6; + 7 | 15; + 4 << 7; + 123 >> 3; + 1 == 2; + 5 != 5; + 6 < 2; + 7 > 11; + 3 <= 1; + 9 >= 14; +} diff --git a/src/test/ui/chalkify/trait-objects.rs b/src/test/ui/chalkify/trait-objects.rs new file mode 100644 index 0000000000000..13d9e6a657885 --- /dev/null +++ b/src/test/ui/chalkify/trait-objects.rs @@ -0,0 +1,13 @@ +// check-pass +// compile-flags: -Z chalk + +use std::fmt::Display; + +fn main() { + let d: &dyn Display = &mut 3; + // FIXME(chalk) should be able to call d.to_string() as well, but doing so + // requires Chalk to be able to prove trait object well-formed goals. + (&d).to_string(); + let f: &dyn Fn(i32) -> _ = &|x| x + x; + f(2); +} From 4d60a80713533cad5221f8b4aa2faff54b46d21d Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Thu, 29 Oct 2020 22:23:58 +0000 Subject: [PATCH 07/19] Address review comment and update chalk to 0.36.0 --- Cargo.lock | 16 +++--- compiler/rustc_middle/Cargo.toml | 2 +- compiler/rustc_traits/Cargo.toml | 6 +-- compiler/rustc_traits/src/chalk/lowering.rs | 58 ++++++++++----------- 4 files changed, 41 insertions(+), 41 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d4a8cc696d7f..cd5ba897ae70d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -460,9 +460,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6d2895e93c0939074a7a0f525fd549b49da8362dea3def555e4aab95ff64cd" +checksum = "9f88ce4deae1dace71e49b7611cfae2d5489de3530d6daba5758043c47ac3a10" dependencies = [ "proc-macro2", "quote", @@ -472,9 +472,9 @@ dependencies = [ [[package]] name = "chalk-engine" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ed23c35d243ccc2caeae7ba4660a091e74b11c40e441d7849f07d8e71b5cb8" +checksum = "0e34c9b1b10616782143d7f49490f91ae94afaf2202de3ab0b2835e78b4f0ccc" dependencies = [ "chalk-derive", "chalk-ir", @@ -485,9 +485,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40d7f6140cccc889117e7372b6f9cfbc8103c86a1a0269ff6ab868f20ab414d6" +checksum = "63362c629c2014ab639b04029070763fb8224df136d1363d30e9ece4c8877da3" dependencies = [ "chalk-derive", "lazy_static", @@ -495,9 +495,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa65b636e64cbfcba31f053da97c32f3e15f2670b3cc620b84231a1656d754ec" +checksum = "cac338a67af52a7f50bb2f8232e730a3518ce432dbe303246acfe525ddd838c7" dependencies = [ "chalk-derive", "chalk-ir", diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index 03e7f13767258..3250f1830de18 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -26,7 +26,7 @@ rustc_index = { path = "../rustc_index" } rustc_serialize = { path = "../rustc_serialize" } rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } -chalk-ir = "0.35.0" +chalk-ir = "0.36.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } measureme = "9.0.0" rustc_session = { path = "../rustc_session" } diff --git a/compiler/rustc_traits/Cargo.toml b/compiler/rustc_traits/Cargo.toml index 4dce3c1728098..8bd9e29629dce 100644 --- a/compiler/rustc_traits/Cargo.toml +++ b/compiler/rustc_traits/Cargo.toml @@ -12,9 +12,9 @@ rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } -chalk-ir = "0.35.0" -chalk-solve = "0.35.0" -chalk-engine = "0.35.0" +chalk-ir = "0.36.0" +chalk-solve = "0.36.0" +chalk-engine = "0.36.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } rustc_infer = { path = "../rustc_infer" } rustc_trait_selection = { path = "../rustc_trait_selection" } diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 81e7fa4aa3082..b89ed6da58b26 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -31,6 +31,7 @@ //! not. To lower anything wrapped in a `Binder`, we first deeply find any bound //! variables from the current `Binder`. +use rustc_ast::ast; use rustc_middle::traits::{ChalkEnvironmentAndGoal, ChalkRustInterner as RustInterner}; use rustc_middle::ty::fold::TypeFolder; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef}; @@ -278,26 +279,14 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { } ty::Slice(ty) => chalk_ir::TyKind::Slice(ty.lower_into(interner)), - ty::RawPtr(ptr) => match ptr.mutbl { - ast::Mutability::Mut => { - chalk_ir::TyKind::Raw(chalk_ir::Mutability::Mut, ptr.ty.lower_into(interner)) - } - ast::Mutability::Not => { - chalk_ir::TyKind::Raw(chalk_ir::Mutability::Not, ptr.ty.lower_into(interner)) - } - }, - ty::Ref(region, ty, mutability) => match mutability { - ast::Mutability::Mut => chalk_ir::TyKind::Ref( - chalk_ir::Mutability::Mut, - region.lower_into(interner), - ty.lower_into(interner), - ), - ast::Mutability::Not => chalk_ir::TyKind::Ref( - chalk_ir::Mutability::Not, - region.lower_into(interner), - ty.lower_into(interner), - ), - }, + ty::RawPtr(ptr) => { + chalk_ir::TyKind::Raw(ptr.mutbl.lower_into(interner), ptr.ty.lower_into(interner)) + } + ty::Ref(region, ty, mutability) => chalk_ir::TyKind::Ref( + mutability.lower_into(interner), + region.lower_into(interner), + ty.lower_into(interner), + ), ty::FnDef(def_id, substs) => { chalk_ir::TyKind::FnDef(chalk_ir::FnDefId(def_id), substs.lower_into(interner)) } @@ -356,7 +345,6 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty>> for Ty<'tcx> { impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty> { fn lower_into(self, interner: &RustInterner<'tcx>) -> Ty<'tcx> { use chalk_ir::TyKind; - use rustc_ast::ast; let kind = match self.kind(interner) { TyKind::Adt(struct_id, substitution) => { @@ -402,18 +390,12 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty> { TyKind::Slice(ty) => ty::Slice(ty.lower_into(interner)), TyKind::Raw(mutbl, ty) => ty::RawPtr(ty::TypeAndMut { ty: ty.lower_into(interner), - mutbl: match mutbl { - chalk_ir::Mutability::Mut => ast::Mutability::Mut, - chalk_ir::Mutability::Not => ast::Mutability::Not, - }, + mutbl: mutbl.lower_into(interner), }), TyKind::Ref(mutbl, lifetime, ty) => ty::Ref( lifetime.lower_into(interner), ty.lower_into(interner), - match mutbl { - chalk_ir::Mutability::Mut => ast::Mutability::Mut, - chalk_ir::Mutability::Not => ast::Mutability::Not, - }, + mutbl.lower_into(interner), ), TyKind::Str => ty::Str, TyKind::OpaqueType(opaque_ty, substitution) => { @@ -767,6 +749,24 @@ impl<'tcx> LowerInto<'tcx, chalk_solve::rust_ir::TraitBound>> } } +impl<'tcx> LowerInto<'tcx, chalk_ir::Mutability> for ast::Mutability { + fn lower_into(self, _interner: &RustInterner<'tcx>) -> chalk_ir::Mutability { + match self { + rustc_ast::Mutability::Mut => chalk_ir::Mutability::Mut, + rustc_ast::Mutability::Not => chalk_ir::Mutability::Not, + } + } +} + +impl<'tcx> LowerInto<'tcx, ast::Mutability> for chalk_ir::Mutability { + fn lower_into(self, _interner: &RustInterner<'tcx>) -> ast::Mutability { + match self { + chalk_ir::Mutability::Mut => ast::Mutability::Mut, + chalk_ir::Mutability::Not => ast::Mutability::Not, + } + } +} + impl<'tcx> LowerInto<'tcx, chalk_solve::rust_ir::Polarity> for ty::ImplPolarity { fn lower_into(self, _interner: &RustInterner<'tcx>) -> chalk_solve::rust_ir::Polarity { match self { From 3237b3886c8d1bd19b78eda6040e2c55e5332a82 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 3 Nov 2020 20:26:17 +0300 Subject: [PATCH 08/19] rustc_ast: Do not panic by default when visiting macro calls --- compiler/rustc_ast/src/mut_visit.rs | 7 ++----- compiler/rustc_ast/src/visit.rs | 9 ++------- compiler/rustc_ast_passes/src/node_count.rs | 4 ++-- compiler/rustc_ast_passes/src/show_span.rs | 4 ---- compiler/rustc_builtin_macros/src/proc_macro_harness.rs | 4 ---- compiler/rustc_builtin_macros/src/test_harness.rs | 8 -------- compiler/rustc_expand/src/config.rs | 5 ----- compiler/rustc_expand/src/expand.rs | 2 -- compiler/rustc_expand/src/mbe/transcribe.rs | 4 ---- compiler/rustc_expand/src/mut_visit/tests.rs | 3 --- compiler/rustc_expand/src/placeholders.rs | 4 ---- compiler/rustc_interface/src/util.rs | 6 ------ compiler/rustc_lint/src/early.rs | 8 +------- compiler/rustc_parse/src/parser/pat.rs | 4 ---- compiler/rustc_passes/src/hir_stats.rs | 1 + .../clippy/clippy_lints/src/non_expressive_names.rs | 6 ------ 16 files changed, 8 insertions(+), 71 deletions(-) diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index fe9ad58c9ac84..d99337ee06e11 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -210,11 +210,8 @@ pub trait MutVisitor: Sized { noop_visit_local(l, self); } - fn visit_mac(&mut self, _mac: &mut MacCall) { - panic!("visit_mac disabled by default"); - // N.B., see note about macros above. If you really want a visitor that - // works on macros, use this definition in your trait impl: - // mut_visit::noop_visit_mac(_mac, self); + fn visit_mac(&mut self, mac: &mut MacCall) { + noop_visit_mac(mac, self); } fn visit_macro_def(&mut self, def: &mut MacroDef) { diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 2ab6667ac3cf1..1b8026bf439f4 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -176,13 +176,8 @@ pub trait Visitor<'ast>: Sized { fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { walk_lifetime(self, lifetime) } - fn visit_mac(&mut self, _mac: &'ast MacCall) { - panic!("visit_mac disabled by default"); - // N.B., see note about macros above. - // if you really want a visitor that - // works on macros, use this - // definition in your trait impl: - // visit::walk_mac(self, _mac) + fn visit_mac(&mut self, mac: &'ast MacCall) { + walk_mac(self, mac) } fn visit_mac_def(&mut self, _mac: &'ast MacroDef, _id: NodeId) { // Nothing to do diff --git a/compiler/rustc_ast_passes/src/node_count.rs b/compiler/rustc_ast_passes/src/node_count.rs index 706dca2b7f455..773b81d771ca9 100644 --- a/compiler/rustc_ast_passes/src/node_count.rs +++ b/compiler/rustc_ast_passes/src/node_count.rs @@ -114,9 +114,9 @@ impl<'ast> Visitor<'ast> for NodeCounter { self.count += 1; walk_lifetime(self, lifetime) } - fn visit_mac(&mut self, _mac: &MacCall) { + fn visit_mac(&mut self, mac: &MacCall) { self.count += 1; - walk_mac(self, _mac) + walk_mac(self, mac) } fn visit_path(&mut self, path: &Path, _id: NodeId) { self.count += 1; diff --git a/compiler/rustc_ast_passes/src/show_span.rs b/compiler/rustc_ast_passes/src/show_span.rs index 053aba8622271..6cef26a13e6b0 100644 --- a/compiler/rustc_ast_passes/src/show_span.rs +++ b/compiler/rustc_ast_passes/src/show_span.rs @@ -54,10 +54,6 @@ impl<'a> Visitor<'a> for ShowSpanVisitor<'a> { } visit::walk_ty(self, t); } - - fn visit_mac(&mut self, mac: &'a ast::MacCall) { - visit::walk_mac(self, mac); - } } pub fn run(span_diagnostic: &rustc_errors::Handler, mode: &str, krate: &ast::Crate) { diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs index c6ab3faf56897..4e91436199a53 100644 --- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs +++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs @@ -344,10 +344,6 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { visit::walk_item(self, item); self.in_root = prev_in_root; } - - fn visit_mac(&mut self, mac: &'a ast::MacCall) { - visit::walk_mac(self, mac) - } } // Creates a new module which looks like: diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index da74f0aeaa193..e68b626e22d9e 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -130,10 +130,6 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { } smallvec![P(item)] } - - fn visit_mac(&mut self, _mac: &mut ast::MacCall) { - // Do nothing. - } } // Beware, this is duplicated in librustc_passes/entry.rs (with @@ -201,10 +197,6 @@ impl<'a> MutVisitor for EntryPointCleaner<'a> { smallvec![item] } - - fn visit_mac(&mut self, _mac: &mut ast::MacCall) { - // Do nothing. - } } /// Crawl over the crate, inserting test reexports and the test main function diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index c124ab6421862..a07dd8ede8bba 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -547,11 +547,6 @@ impl<'a> MutVisitor for StripUnconfigured<'a> { noop_flat_map_assoc_item(configure!(self, item), self) } - fn visit_mac(&mut self, _mac: &mut ast::MacCall) { - // Don't configure interpolated AST (cf. issue #34171). - // Interpolated AST will get configured once the surrounding tokens are parsed. - } - fn visit_pat(&mut self, pat: &mut P) { self.configure_pat(pat); noop_visit_pat(pat, self) diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 3e5762ab992f4..9ac3550d223c9 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -850,8 +850,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { visit::walk_item(self, item); } - - fn visit_mac(&mut self, _: &'ast ast::MacCall) {} } if !self.cx.ecfg.proc_macro_hygiene() { diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 629e0e702b654..70eb7e228358b 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -27,10 +27,6 @@ impl MutVisitor for Marker { fn visit_span(&mut self, span: &mut Span) { *span = span.apply_mark(self.0, self.1) } - - fn visit_mac(&mut self, mac: &mut MacCall) { - mut_visit::noop_visit_mac(mac, self) - } } /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). diff --git a/compiler/rustc_expand/src/mut_visit/tests.rs b/compiler/rustc_expand/src/mut_visit/tests.rs index 9e65fc2eca739..568618fa8df84 100644 --- a/compiler/rustc_expand/src/mut_visit/tests.rs +++ b/compiler/rustc_expand/src/mut_visit/tests.rs @@ -21,9 +21,6 @@ impl MutVisitor for ToZzIdentMutVisitor { fn visit_ident(&mut self, ident: &mut Ident) { *ident = Ident::from_str("zz"); } - fn visit_mac(&mut self, mac: &mut ast::MacCall) { - mut_visit::noop_visit_mac(mac, self) - } } // Maybe add to `expand.rs`. diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index 0cffca1727124..552a4c899dd61 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -386,8 +386,4 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> { |item| !matches!(item.kind, ast::ItemKind::MacCall(_) if !self.cx.ecfg.keep_macs), ); } - - fn visit_mac(&mut self, _mac: &mut ast::MacCall) { - // Do nothing. - } } diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 46a6c5861d59d..f406578f997e5 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -881,12 +881,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> { }) } } - - // in general the pretty printer processes unexpanded code, so - // we override the default `visit_mac` method which panics. - fn visit_mac(&mut self, mac: &mut ast::MacCall) { - noop_visit_mac(mac, self) - } } /// Returns a version string such as "rustc 1.46.0 (04488afe3 2020-08-24)" diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index 9aeeb6277924e..ba5b60bd53539 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -271,14 +271,8 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> } fn visit_mac(&mut self, mac: &'a ast::MacCall) { - // FIXME(#54110): So, this setup isn't really right. I think - // that (a) the librustc_ast visitor ought to be doing this as - // part of `walk_mac`, and (b) we should be calling - // `visit_path`, *but* that would require a `NodeId`, and I - // want to get #53686 fixed quickly. -nmatsakis - ast_visit::walk_path(self, &mac.path); - run_early_pass!(self, check_mac, mac); + ast_visit::walk_mac(self, mac); } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 196790a0ab323..c06b012d8c01c 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -570,10 +570,6 @@ impl<'a> Parser<'a> { fn make_all_value_bindings_mutable(pat: &mut P) -> bool { struct AddMut(bool); impl MutVisitor for AddMut { - fn visit_mac(&mut self, mac: &mut MacCall) { - noop_visit_mac(mac, self); - } - fn visit_pat(&mut self, pat: &mut P) { if let PatKind::Ident(BindingMode::ByValue(m @ Mutability::Not), ..) = &mut pat.kind { diff --git a/compiler/rustc_passes/src/hir_stats.rs b/compiler/rustc_passes/src/hir_stats.rs index 9537321026e55..697b48718b21f 100644 --- a/compiler/rustc_passes/src/hir_stats.rs +++ b/compiler/rustc_passes/src/hir_stats.rs @@ -338,6 +338,7 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> { fn visit_mac(&mut self, mac: &'v ast::MacCall) { self.record("MacCall", Id::None, mac); + ast_visit::walk_mac(self, mac) } fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v ast::PathSegment) { diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs index 603440c0f8376..6b56380edf8c0 100644 --- a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs +++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs @@ -149,9 +149,6 @@ impl<'a, 'tcx, 'b> Visitor<'tcx> for SimilarNamesNameVisitor<'a, 'tcx, 'b> { _ => walk_pat(self, pat), } } - fn visit_mac(&mut self, _mac: &MacCall) { - // do not check macs - } } #[must_use] @@ -356,9 +353,6 @@ impl<'a, 'tcx> Visitor<'tcx> for SimilarNamesLocalVisitor<'a, 'tcx> { fn visit_item(&mut self, _: &Item) { // do not recurse into inner items } - fn visit_mac(&mut self, _mac: &MacCall) { - // do not check macs - } } impl EarlyLintPass for NonExpressiveNames { From 90fafc8c8ff680fc631a44230d8352d7911e70f2 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 3 Nov 2020 20:34:57 +0300 Subject: [PATCH 09/19] rustc_ast: `visit_mac` -> `visit_mac_call` --- compiler/rustc_ast/src/mut_visit.rs | 16 ++++++++-------- compiler/rustc_ast/src/visit.rs | 16 ++++++++-------- compiler/rustc_ast_passes/src/node_count.rs | 2 +- .../src/deriving/generic/mod.rs | 2 +- compiler/rustc_expand/src/mbe/transcribe.rs | 1 - compiler/rustc_expand/src/mut_visit/tests.rs | 2 +- compiler/rustc_lint/src/early.rs | 2 +- compiler/rustc_parse/src/parser/pat.rs | 2 +- compiler/rustc_passes/src/hir_stats.rs | 2 +- .../issue-65122-mac-invoc-in-mut-patterns.rs | 2 +- .../clippy_lints/src/non_expressive_names.rs | 2 +- 11 files changed, 24 insertions(+), 25 deletions(-) diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index d99337ee06e11..7cafac6ac8b7f 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -210,7 +210,7 @@ pub trait MutVisitor: Sized { noop_visit_local(l, self); } - fn visit_mac(&mut self, mac: &mut MacCall) { + fn visit_mac_call(&mut self, mac: &mut MacCall) { noop_visit_mac(mac, self); } @@ -491,7 +491,7 @@ pub fn noop_visit_ty(ty: &mut P, vis: &mut T) { vis.visit_id(id); visit_vec(bounds, |bound| vis.visit_param_bound(bound)); } - TyKind::MacCall(mac) => vis.visit_mac(mac), + TyKind::MacCall(mac) => vis.visit_mac_call(mac), } vis.visit_span(span); } @@ -943,7 +943,7 @@ pub fn noop_visit_item_kind(kind: &mut ItemKind, vis: &mut T) { vis.visit_generics(generics); visit_bounds(bounds, vis); } - ItemKind::MacCall(m) => vis.visit_mac(m), + ItemKind::MacCall(m) => vis.visit_mac_call(m), ItemKind::MacroDef(def) => vis.visit_macro_def(def), } } @@ -972,7 +972,7 @@ pub fn noop_flat_map_assoc_item( visit_bounds(bounds, visitor); visit_opt(ty, |ty| visitor.visit_ty(ty)); } - AssocItemKind::MacCall(mac) => visitor.visit_mac(mac), + AssocItemKind::MacCall(mac) => visitor.visit_mac_call(mac), } visitor.visit_span(span); smallvec![item] @@ -1063,7 +1063,7 @@ pub fn noop_flat_map_foreign_item( visit_bounds(bounds, visitor); visit_opt(ty, |ty| visitor.visit_ty(ty)); } - ForeignItemKind::MacCall(mac) => visitor.visit_mac(mac), + ForeignItemKind::MacCall(mac) => visitor.visit_mac_call(mac), } visitor.visit_span(span); smallvec![item] @@ -1102,7 +1102,7 @@ pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { visit_vec(elems, |elem| vis.visit_pat(elem)) } PatKind::Paren(inner) => vis.visit_pat(inner), - PatKind::MacCall(mac) => vis.visit_mac(mac), + PatKind::MacCall(mac) => vis.visit_mac_call(mac), } vis.visit_span(span); } @@ -1267,7 +1267,7 @@ pub fn noop_visit_expr( } visit_vec(inputs, |(_c, expr)| vis.visit_expr(expr)); } - ExprKind::MacCall(mac) => vis.visit_mac(mac), + ExprKind::MacCall(mac) => vis.visit_mac_call(mac), ExprKind::Struct(path, fields, expr) => { vis.visit_path(path); fields.flat_map_in_place(|field| vis.flat_map_field(field)); @@ -1328,7 +1328,7 @@ pub fn noop_flat_map_stmt_kind( StmtKind::Empty => smallvec![StmtKind::Empty], StmtKind::MacCall(mut mac) => { let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut(); - vis.visit_mac(mac_); + vis.visit_mac_call(mac_); visit_thin_attrs(attrs, vis); smallvec![StmtKind::MacCall(mac)] } diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 1b8026bf439f4..8751f09cfcbbe 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -176,7 +176,7 @@ pub trait Visitor<'ast>: Sized { fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { walk_lifetime(self, lifetime) } - fn visit_mac(&mut self, mac: &'ast MacCall) { + fn visit_mac_call(&mut self, mac: &'ast MacCall) { walk_mac(self, mac) } fn visit_mac_def(&mut self, _mac: &'ast MacroDef, _id: NodeId) { @@ -341,7 +341,7 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) { visitor.visit_generics(generics); walk_list!(visitor, visit_param_bound, bounds); } - ItemKind::MacCall(ref mac) => visitor.visit_mac(mac), + ItemKind::MacCall(ref mac) => visitor.visit_mac_call(mac), ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id), } walk_list!(visitor, visit_attribute, &item.attrs); @@ -409,7 +409,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) { } TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression), TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => {} - TyKind::MacCall(ref mac) => visitor.visit_mac(mac), + TyKind::MacCall(ref mac) => visitor.visit_mac_call(mac), TyKind::Never | TyKind::CVarArgs => {} } } @@ -527,7 +527,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { PatKind::Tuple(ref elems) | PatKind::Slice(ref elems) | PatKind::Or(ref elems) => { walk_list!(visitor, visit_pat, elems); } - PatKind::MacCall(ref mac) => visitor.visit_mac(mac), + PatKind::MacCall(ref mac) => visitor.visit_mac_call(mac), } } @@ -552,7 +552,7 @@ pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a ForeignI walk_list!(visitor, visit_ty, ty); } ForeignItemKind::MacCall(mac) => { - visitor.visit_mac(mac); + visitor.visit_mac_call(mac); } } } @@ -657,7 +657,7 @@ pub fn walk_assoc_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a AssocItem, walk_list!(visitor, visit_ty, ty); } AssocItemKind::MacCall(mac) => { - visitor.visit_mac(mac); + visitor.visit_mac_call(mac); } } } @@ -687,7 +687,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) { StmtKind::Empty => {} StmtKind::MacCall(ref mac) => { let MacCallStmt { ref mac, style: _, ref attrs } = **mac; - visitor.visit_mac(mac); + visitor.visit_mac_call(mac); for attr in attrs.iter() { visitor.visit_attribute(attr); } @@ -818,7 +818,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { ExprKind::Ret(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); } - ExprKind::MacCall(ref mac) => visitor.visit_mac(mac), + ExprKind::MacCall(ref mac) => visitor.visit_mac_call(mac), ExprKind::Paren(ref subexpression) => visitor.visit_expr(subexpression), ExprKind::InlineAsm(ref ia) => { for (op, _) in &ia.operands { diff --git a/compiler/rustc_ast_passes/src/node_count.rs b/compiler/rustc_ast_passes/src/node_count.rs index 773b81d771ca9..6efc78c88427e 100644 --- a/compiler/rustc_ast_passes/src/node_count.rs +++ b/compiler/rustc_ast_passes/src/node_count.rs @@ -114,7 +114,7 @@ impl<'ast> Visitor<'ast> for NodeCounter { self.count += 1; walk_lifetime(self, lifetime) } - fn visit_mac(&mut self, mac: &MacCall) { + fn visit_mac_call(&mut self, mac: &MacCall) { self.count += 1; walk_mac(self, mac) } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 2e52d2a39236a..0642edff6b678 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -358,7 +358,7 @@ fn find_type_parameters( visit::walk_ty(self, ty) } - fn visit_mac(&mut self, mac: &ast::MacCall) { + fn visit_mac_call(&mut self, mac: &ast::MacCall) { self.cx.span_err(mac.span(), "`derive` cannot be used on items with type macros"); } } diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 70eb7e228358b..dde65d998d81b 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -5,7 +5,6 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch}; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::token::{self, NtTT, Token}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; -use rustc_ast::MacCall; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; use rustc_errors::{pluralize, PResult}; diff --git a/compiler/rustc_expand/src/mut_visit/tests.rs b/compiler/rustc_expand/src/mut_visit/tests.rs index 568618fa8df84..be0300bad98bd 100644 --- a/compiler/rustc_expand/src/mut_visit/tests.rs +++ b/compiler/rustc_expand/src/mut_visit/tests.rs @@ -1,7 +1,7 @@ use crate::tests::{matches_codepattern, string_to_crate}; use rustc_ast as ast; -use rustc_ast::mut_visit::{self, MutVisitor}; +use rustc_ast::mut_visit::MutVisitor; use rustc_ast_pretty::pprust; use rustc_span::symbol::Ident; use rustc_span::with_default_session_globals; diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index ba5b60bd53539..08c147ec3ac3f 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -270,7 +270,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> self.check_id(id); } - fn visit_mac(&mut self, mac: &'a ast::MacCall) { + fn visit_mac_call(&mut self, mac: &'a ast::MacCall) { run_early_pass!(self, check_mac, mac); ast_visit::walk_mac(self, mac); } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index c06b012d8c01c..ee9a6dca5ade9 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1,6 +1,6 @@ use super::{Parser, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; -use rustc_ast::mut_visit::{noop_visit_mac, noop_visit_pat, MutVisitor}; +use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor}; use rustc_ast::ptr::P; use rustc_ast::token; use rustc_ast::{self as ast, AttrVec, Attribute, FieldPat, MacCall, Pat, PatKind, RangeEnd}; diff --git a/compiler/rustc_passes/src/hir_stats.rs b/compiler/rustc_passes/src/hir_stats.rs index 697b48718b21f..1d02c9aa6375d 100644 --- a/compiler/rustc_passes/src/hir_stats.rs +++ b/compiler/rustc_passes/src/hir_stats.rs @@ -336,7 +336,7 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> { ast_visit::walk_lifetime(self, lifetime) } - fn visit_mac(&mut self, mac: &'v ast::MacCall) { + fn visit_mac_call(&mut self, mac: &'v ast::MacCall) { self.record("MacCall", Id::None, mac); ast_visit::walk_mac(self, mac) } diff --git a/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs b/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs index 97a405b6999c3..30f3781bf7743 100644 --- a/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs +++ b/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs @@ -1,4 +1,4 @@ -// Regression test; used to ICE with 'visit_mac disabled by default' due to a +// Regression test; used to ICE with 'visit_mac_call disabled by default' due to a // `MutVisitor` in `fn make_all_value_bindings_mutable` (`parse/parser/pat.rs`). macro_rules! mac1 { diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs index 6b56380edf8c0..c5f38c1f08025 100644 --- a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs +++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs @@ -1,6 +1,6 @@ use crate::utils::{span_lint, span_lint_and_then}; use rustc_ast::ast::{ - Arm, AssocItem, AssocItemKind, Attribute, Block, FnDecl, Item, ItemKind, Local, MacCall, Pat, PatKind, + Arm, AssocItem, AssocItemKind, Attribute, Block, FnDecl, Item, ItemKind, Local, Pat, PatKind, }; use rustc_ast::visit::{walk_block, walk_expr, walk_pat, Visitor}; use rustc_lint::{EarlyContext, EarlyLintPass}; From 53c1eb7a26b3e23086534cfcd94a8b3db9e2be23 Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Wed, 4 Nov 2020 18:38:49 +0100 Subject: [PATCH 10/19] ci: demote i686-unknown-freebsd to tier 2 compiler target While technically the i686-unknown-freebsd target has been a tier 2 development platform for a long time, with full toolchain tarballs available on static.rust-lang.org, due to a bug in the manifest generation the target was never available for download through rustup. The infrastructure team privately inquired the FreeBSD package maintainers, and they weren't relying on those tarballs either, so it's a fair assumption to say practically nobody is using those tarballs. This PR then removes the CI builder that produces full tarballs for the target, and moves the compilation of rust-std for the target in dist-various-2. The x86_64-unknown-freebsd target is *not* affected. --- .github/workflows/ci.yml | 3 -- .../host-x86_64/dist-i686-freebsd/Dockerfile | 34 ------------------- .../host-x86_64/dist-various-2/Dockerfile | 7 ++++ src/ci/github-actions/ci.yml | 3 -- src/doc/rustc/src/platform-support.md | 2 +- 5 files changed, 8 insertions(+), 41 deletions(-) delete mode 100644 src/ci/docker/host-x86_64/dist-i686-freebsd/Dockerfile diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6025808eb615d..970bcac027962 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -178,9 +178,6 @@ jobs: - name: dist-i586-gnu-i586-i686-musl os: ubuntu-latest-xl env: {} - - name: dist-i686-freebsd - os: ubuntu-latest-xl - env: {} - name: dist-i686-linux os: ubuntu-latest-xl env: {} diff --git a/src/ci/docker/host-x86_64/dist-i686-freebsd/Dockerfile b/src/ci/docker/host-x86_64/dist-i686-freebsd/Dockerfile deleted file mode 100644 index 7db6e58c4d688..0000000000000 --- a/src/ci/docker/host-x86_64/dist-i686-freebsd/Dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -FROM ubuntu:18.04 - -RUN apt-get update && apt-get install -y --no-install-recommends \ - clang \ - make \ - ninja-build \ - file \ - curl \ - ca-certificates \ - python3 \ - git \ - cmake \ - sudo \ - bzip2 \ - xz-utils \ - wget \ - libssl-dev \ - pkg-config - -COPY scripts/freebsd-toolchain.sh /tmp/ -RUN /tmp/freebsd-toolchain.sh i686 - -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -ENV \ - AR_i686_unknown_freebsd=i686-unknown-freebsd11-ar \ - CC_i686_unknown_freebsd=i686-unknown-freebsd11-clang \ - CXX_i686_unknown_freebsd=i686-unknown-freebsd11-clang++ - -ENV HOSTS=i686-unknown-freebsd - -ENV RUST_CONFIGURE_ARGS --enable-extended --enable-profiler --disable-docs -ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile index 47a66f748087c..b8b81ab327b02 100644 --- a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile @@ -48,6 +48,9 @@ ENV \ CFLAGS_x86_64_fortanix_unknown_sgx="-mlvi-hardening -mllvm -x86-experimental-lvi-inline-asm-hardening" \ CXX_x86_64_fortanix_unknown_sgx=x86_64-fortanix-unknown-sgx-clang++-11 \ CXXFLAGS_x86_64_fortanix_unknown_sgx="-mlvi-hardening -mllvm -x86-experimental-lvi-inline-asm-hardening" \ + AR_i686_unknown_freebsd=i686-unknown-freebsd11-ar \ + CC_i686_unknown_freebsd=i686-unknown-freebsd11-clang \ + CXX_i686_unknown_freebsd=i686-unknown-freebsd11-clang++ \ CC=gcc-7 \ CXX=g++-7 @@ -74,6 +77,9 @@ RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh COPY host-x86_64/dist-various-2/build-wasi-toolchain.sh /tmp/ RUN /tmp/build-wasi-toolchain.sh +COPY scripts/freebsd-toolchain.sh /tmp/ +RUN /tmp/freebsd-toolchain.sh i686 + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh @@ -99,6 +105,7 @@ ENV TARGETS=$TARGETS,x86_64-fortanix-unknown-sgx ENV TARGETS=$TARGETS,nvptx64-nvidia-cuda ENV TARGETS=$TARGETS,armv7-unknown-linux-gnueabi ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabi +ENV TARGETS=$TARGETS,i686-unknown-freebsd # As per https://bugs.launchpad.net/ubuntu/+source/gcc-defaults/+bug/1300211 # we need asm in the search path for gcc-7 (for gnux32) but not in the search path of the diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 01f15a82f0f5a..8d56da198955b 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -325,9 +325,6 @@ jobs: - name: dist-i586-gnu-i586-i686-musl <<: *job-linux-xl - - name: dist-i686-freebsd - <<: *job-linux-xl - - name: dist-i686-linux <<: *job-linux-xl diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index 85c6f91f08582..2e9c74d53de02 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -88,7 +88,7 @@ target | std | host | notes `i586-unknown-linux-gnu` | ✓ | | 32-bit Linux w/o SSE (kernel 4.4, glibc 2.23) `i586-unknown-linux-musl` | ✓ | | 32-bit Linux w/o SSE, MUSL `i686-linux-android` | ✓ | | 32-bit x86 Android -`i686-unknown-freebsd` | ✓ | ✓ | 32-bit FreeBSD +`i686-unknown-freebsd` | ✓ | | 32-bit FreeBSD `i686-unknown-linux-musl` | ✓ | | 32-bit Linux with MUSL `mips-unknown-linux-gnu` | ✓ | ✓ | MIPS Linux (kernel 4.4, glibc 2.23) `mips-unknown-linux-musl` | ✓ | | MIPS Linux with MUSL From 3863dee15988d8e2b8b33ff6ba8759342e8dd98f Mon Sep 17 00:00:00 2001 From: Joshua Nelson Date: Wed, 28 Oct 2020 23:09:41 -0400 Subject: [PATCH 11/19] Infer the default host target from the host toolchain if possible This fixes ongoing issues where x.py will detect the wrong host triple between MSVC and GNU. - Add line to changelog --- src/bootstrap/CHANGELOG.md | 2 ++ src/bootstrap/bootstrap.py | 19 +++++++++++++++++-- src/bootstrap/configure.py | 2 +- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/src/bootstrap/CHANGELOG.md b/src/bootstrap/CHANGELOG.md index 7bb4e504275d8..a103c9fb0b78c 100644 --- a/src/bootstrap/CHANGELOG.md +++ b/src/bootstrap/CHANGELOG.md @@ -8,6 +8,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - `x.py check` needs opt-in to check tests (--all-targets) [#77473](https://github.com/rust-lang/rust/pull/77473) - The default bootstrap profiles are now located at `bootstrap/defaults/config.$PROFILE.toml` (previously they were located at `bootstrap/defaults/config.toml.$PROFILE`) [#77558](https://github.com/rust-lang/rust/pull/77558) +- If you have Rust already installed, `x.py` will now infer the host target + from the default rust toolchain. [#78513](https://github.com/rust-lang/rust/pull/78513) ## [Version 2] - 2020-09-25 diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 87e1536381841..54d0a23dec58d 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -187,8 +187,23 @@ def format_build_time(duration): return str(datetime.timedelta(seconds=int(duration))) -def default_build_triple(): +def default_build_triple(verbose): """Build triple as in LLVM""" + # If the user already has a host build triple with an existing `rustc` + # install, use their preference. This fixes most issues with Windows builds + # being detected as GNU instead of MSVC. + try: + version = subprocess.check_output(["rustc", "--version", "--verbose"]) + host = next(x for x in version.split('\n') if x.startswith("host: ")) + triple = host.split("host: ")[1] + if verbose: + print("detected default triple {}".format(triple)) + return triple + except Exception as e: + if verbose: + print("rustup not detected: {}".format(e)) + print("falling back to auto-detect") + default_encoding = sys.getdefaultencoding() required = sys.platform != 'win32' ostype = require(["uname", "-s"], exit=required) @@ -831,7 +846,7 @@ def build_triple(self): config = self.get_toml('build') if config: return config - return default_build_triple() + return default_build_triple(self.verbose) def check_submodule(self, module, slow_submodules): if not slow_submodules: diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index e156952d56f3d..322e9d6923295 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -266,7 +266,7 @@ def err(msg): def build(): if 'build' in known_args: return known_args['build'][-1][1] - return bootstrap.default_build_triple() + return bootstrap.default_build_triple(verbose=False) def set(key, value): From f78f36cdb79db07ae2b650426fd42c94a2f29ae9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Sat, 7 Nov 2020 00:00:00 +0000 Subject: [PATCH 12/19] Monomorphize a type argument of size-of operation during codegen This wasn't necessary until MIR inliner started to consider drop glue as a candidate for inlining; introducing for the first time a generic use of size-of operation. No test at this point since this only happens with a custom inlining threshold. --- compiler/rustc_codegen_ssa/src/mir/rvalue.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index 7ce110dcbfc48..40ae0a13c7295 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -502,6 +502,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => { + let ty = self.monomorphize(&ty); assert!(bx.cx().type_is_sized(ty)); let val = bx.cx().const_usize(bx.cx().layout_of(ty).size.bytes()); let tcx = self.cx.tcx(); From 103f7a499b2eeff908ce9234c812262a9e87a16f Mon Sep 17 00:00:00 2001 From: Bastian Kauschke Date: Sat, 7 Nov 2020 11:54:35 +0100 Subject: [PATCH 13/19] fix `super_visit_with` for `Terminator` --- .../rustc_middle/src/mir/type_foldable.rs | 31 +++++++++---------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/compiler/rustc_middle/src/mir/type_foldable.rs b/compiler/rustc_middle/src/mir/type_foldable.rs index 391bd8be7e4c5..0801188b27881 100644 --- a/compiler/rustc_middle/src/mir/type_foldable.rs +++ b/compiler/rustc_middle/src/mir/type_foldable.rs @@ -109,24 +109,21 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { args.visit_with(visitor) } Assert { ref cond, ref msg, .. } => { - if cond.visit_with(visitor).is_break() { - use AssertKind::*; - match msg { - BoundsCheck { ref len, ref index } => { - len.visit_with(visitor)?; - index.visit_with(visitor) - } - Overflow(_, l, r) => { - l.visit_with(visitor)?; - r.visit_with(visitor) - } - OverflowNeg(op) | DivisionByZero(op) | RemainderByZero(op) => { - op.visit_with(visitor) - } - ResumedAfterReturn(_) | ResumedAfterPanic(_) => ControlFlow::CONTINUE, + cond.visit_with(visitor)?; + use AssertKind::*; + match msg { + BoundsCheck { ref len, ref index } => { + len.visit_with(visitor)?; + index.visit_with(visitor) + } + Overflow(_, l, r) => { + l.visit_with(visitor)?; + r.visit_with(visitor) + } + OverflowNeg(op) | DivisionByZero(op) | RemainderByZero(op) => { + op.visit_with(visitor) } - } else { - ControlFlow::CONTINUE + ResumedAfterReturn(_) | ResumedAfterPanic(_) => ControlFlow::CONTINUE, } } InlineAsm { ref operands, .. } => operands.visit_with(visitor), From d847299674c35d36091b166051909a2e1b965de8 Mon Sep 17 00:00:00 2001 From: Mark Rousskov Date: Sat, 7 Nov 2020 21:22:48 -0500 Subject: [PATCH 14/19] Run tools builder on subtree changes --- src/ci/scripts/should-skip-this.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/ci/scripts/should-skip-this.sh b/src/ci/scripts/should-skip-this.sh index f945db0ada27d..36bf4368990c9 100755 --- a/src/ci/scripts/should-skip-this.sh +++ b/src/ci/scripts/should-skip-this.sh @@ -14,6 +14,10 @@ elif git diff HEAD^ | grep --quiet "^index .* 160000"; then # Submodules pseudo-files inside git have the 160000 permissions, so when # those files are present in the diff a submodule was updated. echo "Executing the job since submodules are updated" +elif git diff --name-only HEAD^ | grep --quiet src/tools/clippy; then + # There is not an easy blanket search for subtrees. For now, manually list + # clippy. + echo "Executing the job since clippy subtree was updated" else echo "Not executing this job since no submodules were updated" ciCommandSetEnv SKIP_JOB 1 From b7f16c56d10abbd664407b4eef41b5e69d18666d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Fri, 6 Nov 2020 00:00:00 +0000 Subject: [PATCH 15/19] inliner: Make `inline_call` infallible The inliner does not support inlining of divering calls. Reject them early on and turn `inline_call` into an infallible operation. --- compiler/rustc_mir/src/transform/inline.rs | 23 ++++++---------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/compiler/rustc_mir/src/transform/inline.rs b/compiler/rustc_mir/src/transform/inline.rs index a41304236b23d..503b40b506a6e 100644 --- a/compiler/rustc_mir/src/transform/inline.rs +++ b/compiler/rustc_mir/src/transform/inline.rs @@ -138,12 +138,7 @@ impl Inliner<'tcx> { ); let start = caller_body.basic_blocks().len(); - debug!("attempting to inline callsite {:?} - body={:?}", callsite, callee_body); - if !self.inline_call(callsite, caller_body, callee_body) { - debug!("attempting to inline callsite {:?} - failure", callsite); - continue; - } - debug!("attempting to inline callsite {:?} - success", callsite); + self.inline_call(callsite, caller_body, callee_body); // Add callsites from inlined function for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start) { @@ -179,7 +174,8 @@ impl Inliner<'tcx> { // Only consider direct calls to functions let terminator = bb_data.terminator(); - if let TerminatorKind::Call { func: ref op, .. } = terminator.kind { + // FIXME: Handle inlining of diverging calls + if let TerminatorKind::Call { func: ref op, destination: Some(_), .. } = terminator.kind { if let ty::FnDef(callee_def_id, substs) = *op.ty(caller_body, self.tcx).kind() { // To resolve an instance its substs have to be fully normalized, so // we do this here. @@ -397,12 +393,11 @@ impl Inliner<'tcx> { callsite: CallSite<'tcx>, caller_body: &mut Body<'tcx>, mut callee_body: Body<'tcx>, - ) -> bool { + ) { let terminator = caller_body[callsite.bb].terminator.take().unwrap(); match terminator.kind { - // FIXME: Handle inlining of diverging calls TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => { - debug!("inlined {:?} into {:?}", callsite.callee, caller_body.source); + debug!("inlined {} into {:?}", callsite.callee, caller_body.source.instance); // If the call is something like `a[*i] = f(i)`, where // `i : &mut usize`, then just duplicating the `a[*i]` @@ -519,14 +514,8 @@ impl Inliner<'tcx> { matches!(constant.literal.val, ConstKind::Unevaluated(_, _, _)) }), ); - - true - } - kind => { - caller_body[callsite.bb].terminator = - Some(Terminator { source_info: terminator.source_info, kind }); - false } + kind => bug!("unexpected terminator kind {:?}", kind), } } From dc4d74d149198c60ca1cf3a8513a7f3d031503eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Mon, 9 Nov 2020 00:00:00 +0000 Subject: [PATCH 16/19] inliner: Break inlining cycles When examining candidates for inlining, reject those that are determined to be recursive either because of self-recursive calls or calls to any instances already inlined. --- compiler/rustc_mir/src/transform/inline.rs | 214 +++++++++--------- src/test/mir-opt/inline/inline-cycle.rs | 60 +++++ .../inline/inline_cycle.one.Inline.diff | 27 +++ .../inline/inline_cycle.two.Inline.diff | 47 ++++ 4 files changed, 242 insertions(+), 106 deletions(-) create mode 100644 src/test/mir-opt/inline/inline-cycle.rs create mode 100644 src/test/mir-opt/inline/inline_cycle.one.Inline.diff create mode 100644 src/test/mir-opt/inline/inline_cycle.two.Inline.diff diff --git a/compiler/rustc_mir/src/transform/inline.rs b/compiler/rustc_mir/src/transform/inline.rs index 503b40b506a6e..97b513445264a 100644 --- a/compiler/rustc_mir/src/transform/inline.rs +++ b/compiler/rustc_mir/src/transform/inline.rs @@ -1,6 +1,7 @@ //! Inlining pass for MIR functions use rustc_attr as attr; +use rustc_hir as hir; use rustc_index::bit_set::BitSet; use rustc_index::vec::Idx; use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}; @@ -12,9 +13,8 @@ use rustc_target::spec::abi::Abi; use super::simplify::{remove_dead_blocks, CfgSimplifier}; use crate::transform::MirPass; -use std::collections::VecDeque; use std::iter; -use std::ops::RangeFrom; +use std::ops::{Range, RangeFrom}; const DEFAULT_THRESHOLD: usize = 50; const HINT_THRESHOLD: usize = 100; @@ -37,127 +37,128 @@ struct CallSite<'tcx> { impl<'tcx> MirPass<'tcx> for Inline { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 { - if tcx.sess.opts.debugging_opts.instrument_coverage { - // The current implementation of source code coverage injects code region counters - // into the MIR, and assumes a 1-to-1 correspondence between MIR and source-code- - // based function. - debug!("function inlining is disabled when compiling with `instrument_coverage`"); - } else { - Inliner { - tcx, - param_env: tcx.param_env_reveal_all_normalized(body.source.def_id()), - codegen_fn_attrs: tcx.codegen_fn_attrs(body.source.def_id()), - } - .run_pass(body); - } + if tcx.sess.opts.debugging_opts.mir_opt_level < 2 { + return; + } + + if tcx.sess.opts.debugging_opts.instrument_coverage { + // The current implementation of source code coverage injects code region counters + // into the MIR, and assumes a 1-to-1 correspondence between MIR and source-code- + // based function. + debug!("function inlining is disabled when compiling with `instrument_coverage`"); + return; + } + + if inline(tcx, body) { + debug!("running simplify cfg on {:?}", body.source); + CfgSimplifier::new(body).simplify(); + remove_dead_blocks(body); } } } +fn inline(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool { + let def_id = body.source.def_id(); + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); + + // Only do inlining into fn bodies. + if !tcx.hir().body_owner_kind(hir_id).is_fn_or_closure() { + return false; + } + if body.source.promoted.is_some() { + return false; + } + + let mut this = Inliner { + tcx, + param_env: tcx.param_env_reveal_all_normalized(body.source.def_id()), + codegen_fn_attrs: tcx.codegen_fn_attrs(body.source.def_id()), + hir_id, + history: Vec::new(), + changed: false, + }; + let blocks = BasicBlock::new(0)..body.basic_blocks().next_index(); + this.process_blocks(body, blocks); + this.changed +} + struct Inliner<'tcx> { tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, + /// Caller codegen attributes. codegen_fn_attrs: &'tcx CodegenFnAttrs, + /// Caller HirID. + hir_id: hir::HirId, + /// Stack of inlined instances. + history: Vec>, + /// Indicates that the caller body has been modified. + changed: bool, } impl Inliner<'tcx> { - fn run_pass(&self, caller_body: &mut Body<'tcx>) { - // Keep a queue of callsites to try inlining on. We take - // advantage of the fact that queries detect cycles here to - // allow us to try and fetch the fully optimized MIR of a - // call; if it succeeds, we can inline it and we know that - // they do not call us. Otherwise, we just don't try to - // inline. - // - // We use a queue so that we inline "broadly" before we inline - // in depth. It is unclear if this is the best heuristic, - // really, but that's true of all the heuristics in this - // file. =) - - let mut callsites = VecDeque::new(); - - let def_id = caller_body.source.def_id(); - - // Only do inlining into fn bodies. - let self_hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); - if self.tcx.hir().body_owner_kind(self_hir_id).is_fn_or_closure() - && caller_body.source.promoted.is_none() - { - for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() { - if let Some(callsite) = self.get_valid_function_call(bb, bb_data, caller_body) { - callsites.push_back(callsite); - } - } - } else { - return; - } - - let mut changed = false; - while let Some(callsite) = callsites.pop_front() { - debug!("checking whether to inline callsite {:?}", callsite); + fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range) { + for bb in blocks { + let callsite = match self.get_valid_function_call(bb, &caller_body[bb], caller_body) { + None => continue, + Some(it) => it, + }; - if let InstanceDef::Item(_) = callsite.callee.def { - if !self.tcx.is_mir_available(callsite.callee.def_id()) { - debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite,); - continue; - } + if !self.is_mir_available(&callsite.callee, caller_body) { + debug!("MIR unavailable {}", callsite.callee); + continue; } - let callee_body = if let Some(callee_def_id) = callsite.callee.def_id().as_local() { - let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id); - // Avoid a cycle here by only using `instance_mir` only if we have - // a lower `HirId` than the callee. This ensures that the callee will - // not inline us. This trick only works without incremental compilation. - // So don't do it if that is enabled. Also avoid inlining into generators, - // since their `optimized_mir` is used for layout computation, which can - // create a cycle, even when no attempt is made to inline the function - // in the other direction. - if !self.tcx.dep_graph.is_fully_enabled() - && self_hir_id < callee_hir_id - && caller_body.generator_kind.is_none() - { - self.tcx.instance_mir(callsite.callee.def) - } else { - continue; - } - } else { - // This cannot result in a cycle since the callee MIR is from another crate - // and is already optimized. - self.tcx.instance_mir(callsite.callee.def) - }; - - if !self.consider_optimizing(callsite, &callee_body) { + let callee_body = self.tcx.instance_mir(callsite.callee.def); + if !self.should_inline(callsite, callee_body) { continue; } + if !self.tcx.consider_optimizing(|| { + format!("Inline {:?} into {}", callee_body.span, callsite.callee) + }) { + return; + } + let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions( self.tcx, self.param_env, callee_body, ); - let start = caller_body.basic_blocks().len(); + let old_blocks = caller_body.basic_blocks().next_index(); self.inline_call(callsite, caller_body, callee_body); + let new_blocks = old_blocks..caller_body.basic_blocks().next_index(); + self.changed = true; - // Add callsites from inlined function - for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start) { - if let Some(new_callsite) = self.get_valid_function_call(bb, bb_data, caller_body) { - // Don't inline the same function multiple times. - if callsite.callee != new_callsite.callee { - callsites.push_back(new_callsite); - } - } - } + self.history.push(callsite.callee); + self.process_blocks(caller_body, new_blocks); + self.history.pop(); + } + } - changed = true; + fn is_mir_available(&self, callee: &Instance<'tcx>, caller_body: &Body<'tcx>) -> bool { + if let InstanceDef::Item(_) = callee.def { + if !self.tcx.is_mir_available(callee.def_id()) { + return false; + } } - // Simplify if we inlined anything. - if changed { - debug!("running simplify cfg on {:?}", caller_body.source); - CfgSimplifier::new(caller_body).simplify(); - remove_dead_blocks(caller_body); + if let Some(callee_def_id) = callee.def_id().as_local() { + let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id); + // Avoid a cycle here by only using `instance_mir` only if we have + // a lower `HirId` than the callee. This ensures that the callee will + // not inline us. This trick only works without incremental compilation. + // So don't do it if that is enabled. Also avoid inlining into generators, + // since their `optimized_mir` is used for layout computation, which can + // create a cycle, even when no attempt is made to inline the function + // in the other direction. + !self.tcx.dep_graph.is_fully_enabled() + && self.hir_id < callee_hir_id + && caller_body.generator_kind.is_none() + } else { + // This cannot result in a cycle since the callee MIR is from another crate + // and is already optimized. + true } } @@ -196,14 +197,6 @@ impl Inliner<'tcx> { None } - fn consider_optimizing(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool { - debug!("consider_optimizing({:?})", callsite); - self.should_inline(callsite, callee_body) - && self.tcx.consider_optimizing(|| { - format!("Inline {:?} into {:?}", callee_body.span, callsite) - }) - } - fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool { debug!("should_inline({:?})", callsite); let tcx = self.tcx; @@ -323,7 +316,18 @@ impl Inliner<'tcx> { } TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => { - if let ty::FnDef(def_id, _) = *f.literal.ty.kind() { + if let ty::FnDef(def_id, substs) = + *callsite.callee.subst_mir(self.tcx, &f.literal.ty).kind() + { + let substs = self.tcx.normalize_erasing_regions(self.param_env, substs); + if let Ok(Some(instance)) = + Instance::resolve(self.tcx, self.param_env, def_id, substs) + { + if callsite.callee == instance || self.history.contains(&instance) { + debug!("`callee is recursive - not inlining"); + return false; + } + } // Don't give intrinsics the extra penalty for calls let f = tcx.fn_sig(def_id); if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic { @@ -397,8 +401,6 @@ impl Inliner<'tcx> { let terminator = caller_body[callsite.bb].terminator.take().unwrap(); match terminator.kind { TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => { - debug!("inlined {} into {:?}", callsite.callee, caller_body.source.instance); - // If the call is something like `a[*i] = f(i)`, where // `i : &mut usize`, then just duplicating the `a[*i]` // Place could result in two different locations if `f` diff --git a/src/test/mir-opt/inline/inline-cycle.rs b/src/test/mir-opt/inline/inline-cycle.rs new file mode 100644 index 0000000000000..63ad57de1d464 --- /dev/null +++ b/src/test/mir-opt/inline/inline-cycle.rs @@ -0,0 +1,60 @@ +// Check that inliner handles various forms of recursion and doesn't fall into +// an infinite inlining cycle. The particular outcome of inlining is not +// crucial otherwise. +// +// Regression test for issue #78573. + +fn main() { + one(); + two(); +} + +// EMIT_MIR inline_cycle.one.Inline.diff +fn one() { + ::call(); +} + +pub trait Call { + fn call(); +} + +pub struct A(T); +pub struct B(T); +pub struct C; + +impl Call for A { + #[inline] + fn call() { + as Call>::call() + } +} + + +impl Call for B { + #[inline] + fn call() { + ::call() + } +} + +impl Call for C { + #[inline] + fn call() { + A::::call() + } +} + +// EMIT_MIR inline_cycle.two.Inline.diff +fn two() { + call(f); +} + +#[inline] +fn call(f: F) { + f(); +} + +#[inline] +fn f() { + call(f); +} diff --git a/src/test/mir-opt/inline/inline_cycle.one.Inline.diff b/src/test/mir-opt/inline/inline_cycle.one.Inline.diff new file mode 100644 index 0000000000000..1b53c82788540 --- /dev/null +++ b/src/test/mir-opt/inline/inline_cycle.one.Inline.diff @@ -0,0 +1,27 @@ +- // MIR for `one` before Inline ++ // MIR for `one` after Inline + + fn one() -> () { + let mut _0: (); // return place in scope 0 at $DIR/inline-cycle.rs:13:10: 13:10 + let _1: (); // in scope 0 at $DIR/inline-cycle.rs:14:5: 14:24 ++ scope 1 (inlined ::call) { // at $DIR/inline-cycle.rs:14:5: 14:24 ++ } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/inline-cycle.rs:14:5: 14:24 +- _1 = ::call() -> bb1; // scope 0 at $DIR/inline-cycle.rs:14:5: 14:24 ++ _1 = as Call>::call() -> bb1; // scope 1 at $DIR/inline-cycle.rs:14:5: 14:24 + // mir::Constant +- // + span: $DIR/inline-cycle.rs:14:5: 14:22 +- // + literal: Const { ty: fn() {::call}, val: Value(Scalar()) } ++ // + span: $DIR/inline-cycle.rs:14:5: 14:24 ++ // + literal: Const { ty: fn() { as Call>::call}, val: Value(Scalar()) } + } + + bb1: { + StorageDead(_1); // scope 0 at $DIR/inline-cycle.rs:14:24: 14:25 + _0 = const (); // scope 0 at $DIR/inline-cycle.rs:13:10: 15:2 + return; // scope 0 at $DIR/inline-cycle.rs:15:2: 15:2 + } + } + diff --git a/src/test/mir-opt/inline/inline_cycle.two.Inline.diff b/src/test/mir-opt/inline/inline_cycle.two.Inline.diff new file mode 100644 index 0000000000000..b44baca9bf497 --- /dev/null +++ b/src/test/mir-opt/inline/inline_cycle.two.Inline.diff @@ -0,0 +1,47 @@ +- // MIR for `two` before Inline ++ // MIR for `two` after Inline + + fn two() -> () { + let mut _0: (); // return place in scope 0 at $DIR/inline-cycle.rs:48:10: 48:10 + let _1: (); // in scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 ++ let mut _2: fn() {f}; // in scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 ++ let mut _5: (); // in scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 ++ scope 1 (inlined call::) { // at $DIR/inline-cycle.rs:49:5: 49:12 ++ debug f => _2; // in scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ let _3: (); // in scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ let mut _4: fn() {f}; // in scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ scope 2 (inlined >::call_once - shim(fn() {f})) { // at $DIR/inline-cycle.rs:49:5: 49:12 ++ } ++ } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 +- _1 = call::(f) -> bb1; // scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageLive(_2); // scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 ++ _2 = f; // scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 + // mir::Constant +- // + span: $DIR/inline-cycle.rs:49:5: 49:9 +- // + literal: Const { ty: fn(fn() {f}) {call::}, val: Value(Scalar()) } +- // mir::Constant + // + span: $DIR/inline-cycle.rs:49:10: 49:11 + // + literal: Const { ty: fn() {f}, val: Value(Scalar()) } ++ StorageLive(_3); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageLive(_4); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ _4 = move _2; // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageLive(_5); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ _5 = const (); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ _3 = move _4() -> bb1; // scope 2 at $DIR/inline-cycle.rs:49:5: 49:12 + } + + bb1: { ++ StorageDead(_5); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageDead(_4); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageDead(_3); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ _1 = const (); // scope 1 at $DIR/inline-cycle.rs:49:5: 49:12 ++ StorageDead(_2); // scope 0 at $DIR/inline-cycle.rs:49:5: 49:12 + StorageDead(_1); // scope 0 at $DIR/inline-cycle.rs:49:12: 49:13 + _0 = const (); // scope 0 at $DIR/inline-cycle.rs:48:10: 50:2 + return; // scope 0 at $DIR/inline-cycle.rs:50:2: 50:2 + } + } + From 7ca6e8f767eaaaf30cec10ec6936587fc521b934 Mon Sep 17 00:00:00 2001 From: Stein Somers Date: Wed, 28 Oct 2020 11:58:06 +0100 Subject: [PATCH 17/19] BTreeMap: fix pointer provenance rules, make borrowing explicit --- library/alloc/src/collections/btree/node.rs | 572 ++++++++++-------- library/alloc/src/collections/btree/search.rs | 2 +- library/alloc/src/lib.rs | 1 + 3 files changed, 328 insertions(+), 247 deletions(-) diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 433074027e7f7..dbf9031620ee1 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -32,7 +32,6 @@ use core::cmp::Ordering; use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; use core::ptr::{self, NonNull, Unique}; -use core::slice; use crate::alloc::{AllocRef, Global, Layout}; use crate::boxed::Box; @@ -120,11 +119,11 @@ struct BoxedNode { impl BoxedNode { fn from_leaf(node: Box>) -> Self { - BoxedNode { ptr: Box::into_unique(node).0 } + BoxedNode { ptr: Unique::from(Box::leak(node)) } } fn from_internal(node: Box>) -> Self { - BoxedNode { ptr: Unique::from(&mut Box::leak(node).data) } + BoxedNode { ptr: Unique::from(Box::leak(node)).cast() } } fn as_ptr(&self) -> NonNull> { @@ -189,6 +188,11 @@ impl Root { NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData } } + /// Packs the reference, aware of type and height, into a type-agnostic pointer. + fn into_boxed_node(self) -> BoxedNode { + self.node + } + /// Adds a new internal node with a single edge pointing to the previous root node, /// make that new node the root node, and return it. This increases the height by 1 /// and is the opposite of `pop_internal_level`. @@ -218,15 +222,16 @@ impl Root { pub fn pop_internal_level(&mut self) { assert!(self.height > 0); - let top = self.node.ptr; + let top = BoxedNode::as_ptr(&self.node); let mut internal_node = unsafe { self.internal_node_as_mut() }; - self.node = unsafe { internal_node.as_internal_mut().edges[0].assume_init_read() }; + let internal_node = NodeRef::as_internal_mut(&mut internal_node); + self.node = unsafe { internal_node.edges[0].assume_init_read() }; self.height -= 1; - self.node_as_mut().as_leaf_mut().parent = None; + self.node_as_mut().clear_parent_link(); unsafe { - Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); + Global.dealloc(top.cast(), Layout::new::>()); } } } @@ -236,21 +241,49 @@ impl Root { // internal use of `NodeRef` because we stay completely generic over `K` and `V`. // However, whenever a public type wraps `NodeRef`, make sure that it has the // correct variance. +/// /// A reference to a node. /// /// This type has a number of parameters that controls how it acts: -/// - `BorrowType`: This can be `Immut<'a>`, `Mut<'a>` or `ValMut<'a>' for some `'a` -/// or `Owned`. -/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`, -/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`, -/// when this is `ValMut<'a>`, the `NodeRef` acts as immutable with respect -/// to keys and tree structure, but allows mutable references to values, -/// and when this is `Owned`, the `NodeRef` acts roughly like `Box`. -/// - `K` and `V`: These control what types of things are stored in the nodes. +/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime. +/// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`. +/// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node` +/// with respect to keys and tree structure, but also allows many +/// mutable references to values throughout the tree to coexist. +/// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`, +/// although insert methods allow a mutable pointer to a value to coexist. +/// - When this is `Owned`, the `NodeRef` acts roughly like `Box`, +/// but does not have a destructor, and must be cleaned up manually. +/// - `K` and `V`: These are the types of keys and values stored in the nodes. /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the /// `NodeRef` could be pointing to either type of node. +/// `Type` is named `NodeType` when used outside `NodeRef`. +/// +/// Both `BorrowType` and `NodeType` restrict what methods we implement, to +/// exploit static type safety. There are limitations in the way we can apply +/// such restrictions: +/// - For each type parameter, we can only define a method either generically +/// or for one particular type. For example, we cannot define a method like +/// `key_at` generically for all `BorrowType`, because we want to return +/// `&'a K` for most choices of `BorrowType`, but plain `K` for `Owned`. +/// We cannot define `key_at` once for all types that have a lifetime. +/// Therefore, we define it only for the least powerful type `Immut<'a>`. +/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`. +/// Therefore, we have to explicitly call `reborrow` on a more powerfull +/// `NodeRef` in order to reach a method like `key_at`. +/// - All methods on `NodeRef` that return some kind of reference, except +/// `reborrow` and `reborrow_mut`, take `self` by value and not by reference. +/// This avoids silently returning a second reference somewhere in the tree. +/// That is irrelevant when `BorrowType` is `Immut<'a>`, but the rule does +/// no harm because we make those `NodeRef` implicitly `Copy`. +/// The rule also avoids implicitly returning the lifetime of `&self`, +/// instead of the lifetime contained in `BorrowType`. +/// An exception to this rule are the insert functions. +/// - Given the above, we need a `reborrow_mut` to explicitly copy a `Mut<'a>` +/// `NodeRef` whenever we want to invoke a method returning an extra reference +/// somewhere in the tree. pub struct NodeRef { /// The number of levels below the node, a property of the node that cannot be /// entirely described by `Type` and that the node does not store itself either. @@ -277,30 +310,45 @@ unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef Send for NodeRef, K, V, Type> {} unsafe impl Send for NodeRef {} +impl NodeRef { + /// Unpack a node reference that was packed by `Root::into_boxed_node`. + fn from_boxed_node(boxed_node: BoxedNode, height: usize) -> Self { + NodeRef { height, node: boxed_node.as_ptr(), _marker: PhantomData } + } +} + +impl NodeRef { + /// Unpack a node reference that was packed as `NodeRef::parent`. + fn from_internal(node: NonNull>, height: usize) -> Self { + debug_assert!(height > 0); + NodeRef { height, node: node.cast(), _marker: PhantomData } + } +} + impl NodeRef { - /// Exposes the data of an internal node for reading. + /// Exposes the data of an internal node. /// - /// Returns a raw ptr to avoid invalidating other references to this node, - /// which is possible when BorrowType is marker::ValMut. - fn as_internal_ptr(&self) -> *const InternalNode { - self.node.as_ptr() as *const InternalNode + /// Returns a raw ptr to avoid invalidating other references to this node. + fn as_internal_ptr(this: &Self) -> *mut InternalNode { + // SAFETY: the static node type is `Internal`. + this.node.as_ptr() as *mut InternalNode } } -impl<'a, K, V> NodeRef, K, V, marker::Internal> { - /// Exposes the data of an internal node for reading, - /// when we know we have exclusive access. - fn as_internal(&mut self) -> &InternalNode { - unsafe { &*self.as_internal_ptr() } +impl<'a, K, V> NodeRef, K, V, marker::Internal> { + /// Exposes the data of an internal node in an immutable tree. + fn as_internal(this: &Self) -> &'a InternalNode { + let ptr = Self::as_internal_ptr(this); + // SAFETY: there can be no mutable references into this tree borrowed as `Immut`. + unsafe { &*ptr } } } impl<'a, K, V> NodeRef, K, V, marker::Internal> { - /// Exposes the data of an internal node for writing. - /// - /// We don't need to return a raw ptr because we have unique access to the entire node. - fn as_internal_mut(&mut self) -> &mut InternalNode { - unsafe { &mut *(self.node.as_ptr() as *mut InternalNode) } + /// Offers exclusive access to the data of an internal node. + fn as_internal_mut(this: &mut Self) -> &'a mut InternalNode { + let ptr = Self::as_internal_ptr(this); + unsafe { &mut *ptr } } } @@ -312,7 +360,7 @@ impl NodeRef { pub fn len(&self) -> usize { // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut, // there might be outstanding mutable references to values that we must not invalidate. - unsafe { usize::from((*self.as_leaf_ptr()).len) } + unsafe { usize::from((*Self::as_leaf_ptr(self)).len) } } /// Returns the height of this node with respect to the leaf level. Zero height means the @@ -322,48 +370,49 @@ impl NodeRef { } /// Temporarily takes out another, immutable reference to the same node. - fn reborrow(&self) -> NodeRef, K, V, Type> { + pub fn reborrow(&self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } /// Exposes the leaf portion of any leaf or internal node. /// - /// Returns a raw ptr to avoid invalidating other references to this node, - /// which is possible when BorrowType is marker::ValMut. - fn as_leaf_ptr(&self) -> *const LeafNode { + /// Returns a raw ptr to avoid invalidating other references to this node. + fn as_leaf_ptr(this: &Self) -> *mut LeafNode { // The node must be valid for at least the LeafNode portion. // This is not a reference in the NodeRef type because we don't know if // it should be unique or shared. - self.node.as_ptr() + this.node.as_ptr() } +} - /// Borrows a reference to one of the keys stored in the node. +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Exposes one of the keys stored in the node. /// /// # Safety /// The node has more than `idx` initialized elements. - pub unsafe fn key_at(&self, idx: usize) -> &K { - unsafe { self.reborrow().into_key_at(idx) } + pub unsafe fn key_at(self, idx: usize) -> &'a K { + debug_assert!(idx < self.len()); + unsafe { Self::as_leaf(&self).keys.get_unchecked(idx).assume_init_ref() } } - /// Borrows a reference to one of the values stored in the node. + /// Exposes one of the values stored in the node. /// /// # Safety /// The node has more than `idx` initialized elements. - unsafe fn val_at(&self, idx: usize) -> &V { - unsafe { self.reborrow().into_val_at(idx) } + unsafe fn val_at(self, idx: usize) -> &'a V { + debug_assert!(idx < self.len()); + unsafe { Self::as_leaf(&self).vals.get_unchecked(idx).assume_init_ref() } } } -impl NodeRef { - /// Borrows a reference to the contents of one of the edges that delimit - /// the elements of the node, without invalidating other references. +impl<'a, K, V> NodeRef, K, V, marker::Internal> { + /// Exposes the contents of one of the edges in the node. /// /// # Safety /// The node has more than `idx` initialized elements. - unsafe fn edge_at(&self, idx: usize) -> &BoxedNode { + unsafe fn edge_at(self, idx: usize) -> &'a BoxedNode { debug_assert!(idx <= self.len()); - let node = self.as_internal_ptr(); - unsafe { (*node).edges.get_unchecked(idx).assume_init_ref() } + unsafe { Self::as_internal(&self).edges.get_unchecked(idx).assume_init_ref() } } } @@ -380,15 +429,11 @@ impl NodeRef { ) -> Result, marker::Edge>, Self> { // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, // there might be outstanding mutable references to values that we must not invalidate. - let leaf_ptr = self.as_leaf_ptr(); + let leaf_ptr: *const _ = Self::as_leaf_ptr(&self); unsafe { (*leaf_ptr).parent } .as_ref() .map(|parent| Handle { - node: NodeRef { - height: self.height + 1, - node: parent.cast(), - _marker: PhantomData, - }, + node: NodeRef::from_internal(*parent, self.height + 1), idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) }, _marker: PhantomData, }) @@ -420,11 +465,11 @@ impl NodeRef { } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { - /// Exposes the data of a leaf node for reading in an immutable tree. - fn into_leaf(self) -> &'a LeafNode { - // SAFETY: we can access the entire node freely and do no need raw pointers, - // because there can be no mutable references to this Immut tree. - unsafe { &(*self.as_leaf_ptr()) } + /// Exposes the leaf portion of any leaf or internal node in an immutable tree. + fn as_leaf(this: &Self) -> &'a LeafNode { + let ptr = Self::as_leaf_ptr(this); + // SAFETY: there can be no mutable references into this tree borrowed as `Immut`. + unsafe { &*ptr } } } @@ -473,139 +518,155 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } - /// Exposes the leaf portion of any leaf or internal node for writing. - /// - /// We don't need to return a raw ptr because we have unique access to the entire node. - fn as_leaf_mut(&mut self) -> &'a mut LeafNode { - unsafe { &mut (*self.node.as_ptr()) } + /// Offers exclusive access to the leaf portion of any leaf or internal node. + fn as_leaf_mut(this: &mut Self) -> &'a mut LeafNode { + let ptr = Self::as_leaf_ptr(this); + // SAFETY: we have exclusive access to the entire node. + unsafe { &mut *ptr } } +} - /// Borrows a mutable reference to one of the keys stored in the node. +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Offers exclusive access to a part of the key storage area. /// /// # Safety /// The node has more than `idx` initialized elements. - unsafe fn key_mut_at(&mut self, idx: usize) -> &mut K { - unsafe { self.reborrow_mut().into_key_mut_at(idx) } + unsafe fn into_key_area_mut_at(mut self, idx: usize) -> &'a mut MaybeUninit { + debug_assert!(idx < self.len()); + unsafe { Self::as_leaf_mut(&mut self).keys.get_unchecked_mut(idx) } } - /// Borrows a mutable reference to one of the values stored in the node. + /// Offers exclusive access to a part of the value storage area. /// /// # Safety /// The node has more than `idx` initialized elements. - unsafe fn val_mut_at(&mut self, idx: usize) -> &mut V { - unsafe { self.reborrow_mut().into_val_mut_at(idx) } - } - - fn keys_mut(&mut self) -> &mut [K] - where - K: 'a, - V: 'a, - { - // SAFETY: the caller will not be able to call further methods on self - // until the key slice reference is dropped, as we have unique access - // for the lifetime of the borrow. - // SAFETY: The keys of a node must always be initialized up to length. - unsafe { - slice::from_raw_parts_mut( - MaybeUninit::slice_as_mut_ptr(&mut self.as_leaf_mut().keys), - self.len(), - ) - } - } - - fn vals_mut(&mut self) -> &mut [V] - where - K: 'a, - V: 'a, - { - // SAFETY: the caller will not be able to call further methods on self - // until the value slice reference is dropped, as we have unique access - // for the lifetime of the borrow. - // SAFETY: The values of a node must always be initialized up to length. - unsafe { - slice::from_raw_parts_mut( - MaybeUninit::slice_as_mut_ptr(&mut self.as_leaf_mut().vals), - self.len(), - ) - } + unsafe fn into_val_area_mut_at(mut self, idx: usize) -> &'a mut MaybeUninit { + debug_assert!(idx < self.len()); + unsafe { Self::as_leaf_mut(&mut self).vals.get_unchecked_mut(idx) } } } -impl<'a, K, V> NodeRef, K, V, marker::Internal> { - fn edges_mut(&mut self) -> &mut [BoxedNode] { - unsafe { - slice::from_raw_parts_mut( - MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges), - self.len() + 1, - ) - } +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { + /// Offers exclusive access to a part of the storage area for edge contents. + /// + /// # Safety + /// The node has at least `idx` initialized elements. + unsafe fn into_edge_area_mut_at(mut self, idx: usize) -> &'a mut MaybeUninit> { + debug_assert!(idx <= self.len()); + unsafe { Self::as_internal_mut(&mut self).edges.get_unchecked_mut(idx) } } } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { - /// # Safety - /// The node has more than `idx` initialized elements. - unsafe fn into_key_at(self, idx: usize) -> &'a K { - unsafe { self.into_leaf().keys.get_unchecked(idx).assume_init_ref() } + /// Exposes the entire key storage area in the node, + /// regardless of the node's current length, + /// having exclusive access to the entire node. + unsafe fn key_area(self) -> &'a [MaybeUninit] { + Self::as_leaf(&self).keys.as_slice() } - /// # Safety - /// The node has more than `idx` initialized elements. - unsafe fn into_val_at(self, idx: usize) -> &'a V { - unsafe { self.into_leaf().vals.get_unchecked(idx).assume_init_ref() } + /// Exposes the entire value storage area in the node, + /// regardless of the node's current length, + /// having exclusive access to the entire node. + unsafe fn val_area(self) -> &'a [MaybeUninit] { + Self::as_leaf(&self).vals.as_slice() } } -impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { - /// # Safety - /// The node has more than `idx` initialized elements. - unsafe fn into_key_mut_at(mut self, idx: usize) -> &'a mut K { - debug_assert!(idx < self.len()); +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { + /// Exposes the entire storage area for edge contents in the node, + /// regardless of the node's current length, + /// having exclusive access to the entire node. + unsafe fn edge_area(self) -> &'a [MaybeUninit>] { + Self::as_internal(&self).edges.as_slice() + } +} - let leaf = self.as_leaf_mut(); - unsafe { leaf.keys.get_unchecked_mut(idx).assume_init_mut() } +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Offers exclusive access to a sized slice of key storage area in the node. + unsafe fn into_key_area_slice(mut self) -> &'a mut [MaybeUninit] { + let len = self.len(); + // SAFETY: the caller will not be able to call further methods on self + // until the key slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { Self::as_leaf_mut(&mut self).keys.get_unchecked_mut(..len) } } - /// # Safety - /// The node has more than `idx` initialized elements. - unsafe fn into_val_mut_at(mut self, idx: usize) -> &'a mut V { - debug_assert!(idx < self.len()); + /// Offers exclusive access to a sized slice of value storage area in the node. + unsafe fn into_val_area_slice(mut self) -> &'a mut [MaybeUninit] { + let len = self.len(); + // SAFETY: the caller will not be able to call further methods on self + // until the value slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { Self::as_leaf_mut(&mut self).vals.get_unchecked_mut(..len) } + } +} - let leaf = self.as_leaf_mut(); - unsafe { leaf.vals.get_unchecked_mut(idx).assume_init_mut() } +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { + /// Offers exclusive access to a sized slice of storage area for edge contents in the node. + unsafe fn into_edge_area_slice(mut self) -> &'a mut [MaybeUninit>] { + let len = self.len(); + // SAFETY: the caller will not be able to call further methods on self + // until the edge slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { Self::as_internal_mut(&mut self).edges.get_unchecked_mut(..len + 1) } } } impl<'a, K, V, Type> NodeRef, K, V, Type> { /// # Safety - /// The node has more than `idx` initialized elements. - unsafe fn into_key_val_mut_at(self, idx: usize) -> (&'a K, &'a mut V) { + /// - The node has more than `idx` initialized elements. + /// - The keys and values of the node must be initialized up to its current length. + unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) { // We only create a reference to the one element we are interested in, // to avoid aliasing with outstanding references to other elements, // in particular, those returned to the caller in earlier iterations. - let leaf = self.node.as_ptr(); + let leaf = Self::as_leaf_ptr(&mut self); let keys = unsafe { &raw const (*leaf).keys }; let vals = unsafe { &raw mut (*leaf).vals }; // We must coerce to unsized array pointers because of Rust issue #74679. let keys: *const [_] = keys; let vals: *mut [_] = vals; - // SAFETY: The keys and values of a node must always be initialized up to length. let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() }; let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() }; (key, val) } } +impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Exposes exclusive access to the length of the node. + pub fn into_len_mut(mut self) -> &'a mut u16 { + &mut (*Self::as_leaf_mut(&mut self)).len + } +} + +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Set or clear the node's link to its parent edge, + /// without invalidating other references to the node. + fn set_parent_link(&mut self, parent: NonNull>, parent_idx: usize) { + let leaf = Self::as_leaf_ptr(self); + unsafe { (*leaf).parent = Some(parent) }; + unsafe { (*leaf).parent_idx.write(parent_idx as u16) }; + } + + /// Clear the node's link to its parent edge, freeing it from its tree. + /// This only makes sense when there are no other references to the node. + fn clear_parent_link(&mut self) { + let leaf = Self::as_leaf_mut(self); + leaf.parent = None; + } +} + impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { /// Adds a key/value pair to the end of the node. pub fn push(&mut self, key: K, val: V) { - let len = &mut self.as_leaf_mut().len; + let len = unsafe { self.reborrow_mut().into_len_mut() }; let idx = usize::from(*len); assert!(idx < CAPACITY); *len += 1; unsafe { - ptr::write(self.key_mut_at(idx), key); - ptr::write(self.val_mut_at(idx), val); + self.reborrow_mut().into_key_area_mut_at(idx).write(key); + self.reborrow_mut().into_val_area_mut_at(idx).write(val); } } @@ -614,10 +675,10 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { assert!(self.len() < CAPACITY); unsafe { - slice_insert(self.keys_mut(), 0, key); - slice_insert(self.vals_mut(), 0, val); + *self.reborrow_mut().into_len_mut() += 1; + slice_insert(self.reborrow_mut().into_key_area_slice(), 0, key); + slice_insert(self.reborrow_mut().into_val_area_slice(), 0, val); } - self.as_leaf_mut().len += 1; } } @@ -643,14 +704,14 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { pub fn push(&mut self, key: K, val: V, edge: Root) { assert!(edge.height == self.height - 1); - let len = &mut self.as_leaf_mut().len; + let len = unsafe { self.reborrow_mut().into_len_mut() }; let idx = usize::from(*len); assert!(idx < CAPACITY); *len += 1; unsafe { - ptr::write(self.key_mut_at(idx), key); - ptr::write(self.val_mut_at(idx), val); - self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node); + self.reborrow_mut().into_key_area_mut_at(idx).write(key); + self.reborrow_mut().into_val_area_mut_at(idx).write(val); + self.reborrow_mut().into_edge_area_mut_at(idx + 1).write(edge.into_boxed_node()); Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link(); } } @@ -662,13 +723,12 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { assert!(self.len() < CAPACITY); unsafe { - slice_insert(self.keys_mut(), 0, key); - slice_insert(self.vals_mut(), 0, val); - slice_insert(self.edges_mut(), 0, edge.node); + *self.reborrow_mut().into_len_mut() += 1; + slice_insert(self.reborrow_mut().into_key_area_slice(), 0, key); + slice_insert(self.reborrow_mut().into_val_area_slice(), 0, val); + slice_insert(self.reborrow_mut().into_edge_area_slice(), 0, edge.into_boxed_node()); } - self.as_leaf_mut().len += 1; - self.correct_all_childrens_parent_links(); } } @@ -683,19 +743,21 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { let idx = self.len() - 1; unsafe { - let key = ptr::read(self.key_at(idx)); - let val = ptr::read(self.val_at(idx)); + let key = ptr::read(self.reborrow().key_at(idx)); + let val = ptr::read(self.reborrow().val_at(idx)); let edge = match self.reborrow_mut().force() { ForceResult::Leaf(_) => None, ForceResult::Internal(internal) => { - let edge = ptr::read(internal.edge_at(idx + 1)); - let mut new_root = Root { node: edge, height: internal.height - 1 }; - new_root.node_as_mut().as_leaf_mut().parent = None; - Some(new_root) + let boxed_node = ptr::read(internal.reborrow().edge_at(idx + 1)); + let mut edge = Root { node: boxed_node, height: internal.height - 1 }; + // In practice, clearing the parent is a waste of time, because we will + // insert the node elsewhere and set its parent link again. + edge.node_as_mut().clear_parent_link(); + Some(edge) } }; - self.as_leaf_mut().len -= 1; + *self.reborrow_mut().into_len_mut() -= 1; (key, val, edge) } } @@ -709,29 +771,35 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { let old_len = self.len(); unsafe { - let key = slice_remove(self.keys_mut(), 0); - let val = slice_remove(self.vals_mut(), 0); + let key = slice_remove(self.reborrow_mut().into_key_area_slice(), 0); + let val = slice_remove(self.reborrow_mut().into_val_area_slice(), 0); let edge = match self.reborrow_mut().force() { ForceResult::Leaf(_) => None, ForceResult::Internal(mut internal) => { - let edge = slice_remove(internal.edges_mut(), 0); - let mut new_root = Root { node: edge, height: internal.height - 1 }; - new_root.node_as_mut().as_leaf_mut().parent = None; + let boxed_node = + slice_remove(internal.reborrow_mut().into_edge_area_slice(), 0); + let mut edge = Root { node: boxed_node, height: internal.height - 1 }; + // In practice, clearing the parent is a waste of time, because we will + // insert the node elsewhere and set its parent link again. + edge.node_as_mut().clear_parent_link(); internal.correct_childrens_parent_links(0..old_len); - Some(new_root) + Some(edge) } }; - self.as_leaf_mut().len -= 1; + *self.reborrow_mut().into_len_mut() -= 1; (key, val, edge) } } fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) { - (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr()) + let leaf = Self::as_leaf_mut(&mut self); + let keys = MaybeUninit::slice_as_mut_ptr(&mut leaf.keys); + let vals = MaybeUninit::slice_as_mut_ptr(&mut leaf.vals); + (keys, vals) } } @@ -816,7 +884,7 @@ impl NodeRef { /// Could be a public implementation of PartialEq, but only used in this module. fn eq(&self, other: &Self) -> bool { let Self { node, height, _marker: _ } = self; - if *node == other.node { + if node.eq(&other.node) { debug_assert_eq!(*height, other.height); true } else { @@ -924,11 +992,11 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark debug_assert!(self.node.len() < CAPACITY); unsafe { - slice_insert(self.node.keys_mut(), self.idx, key); - slice_insert(self.node.vals_mut(), self.idx, val); - self.node.as_leaf_mut().len += 1; + *self.node.reborrow_mut().into_len_mut() += 1; + slice_insert(self.node.reborrow_mut().into_key_area_slice(), self.idx, key); + slice_insert(self.node.reborrow_mut().into_val_area_slice(), self.idx, val); - self.node.val_mut_at(self.idx) + self.node.reborrow_mut().into_val_area_mut_at(self.idx).assume_init_mut() } } } @@ -964,12 +1032,12 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark impl<'a, K, V> Handle, K, V, marker::Internal>, marker::Edge> { /// Fixes the parent pointer and index in the child node below this edge. This is useful /// when the ordering of edges has been changed, such as in the various `insert` methods. - fn correct_parent_link(mut self) { - let idx = self.idx as u16; - let ptr = NonNull::new(self.node.as_internal_mut()); + fn correct_parent_link(self) { + // Create backpointer without invalidating other references to the node. + let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) }; + let idx = self.idx; let mut child = self.descend(); - child.as_leaf_mut().parent = ptr; - child.as_leaf_mut().parent_idx.write(idx); + child.set_parent_link(ptr, idx); } } @@ -981,11 +1049,12 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, debug_assert!(self.node.len() < CAPACITY); debug_assert!(edge.height == self.node.height - 1); + let boxed_node = edge.into_boxed_node(); unsafe { - slice_insert(self.node.keys_mut(), self.idx, key); - slice_insert(self.node.vals_mut(), self.idx, val); - slice_insert(self.node.edges_mut(), self.idx + 1, edge.node); - self.node.as_leaf_mut().len += 1; + *self.node.reborrow_mut().into_len_mut() += 1; + slice_insert(self.node.reborrow_mut().into_key_area_slice(), self.idx, key); + slice_insert(self.node.reborrow_mut().into_val_area_slice(), self.idx, val); + slice_insert(self.node.reborrow_mut().into_edge_area_slice(), self.idx + 1, boxed_node); self.node.correct_childrens_parent_links((self.idx + 1)..=self.node.len()); } @@ -1073,28 +1142,25 @@ impl Handle, marke // node pointer is dereferenced, we access the edges array with a // reference (Rust issue #73987) and invalidate any other references // to or inside the array, should any be around. - let internal_node = self.node.as_internal_ptr(); - NodeRef { - height: self.node.height - 1, - node: unsafe { (&*(*internal_node).edges.get_unchecked(self.idx).as_ptr()).as_ptr() }, - _marker: PhantomData, - } + let parent_ptr = NodeRef::as_internal_ptr(&self.node); + let boxed_node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() }; + NodeRef::from_boxed_node(boxed_node, self.node.height - 1) } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_kv(self) -> (&'a K, &'a V) { - (unsafe { self.node.into_key_at(self.idx) }, unsafe { self.node.into_val_at(self.idx) }) + (unsafe { self.node.key_at(self.idx) }, unsafe { self.node.val_at(self.idx) }) } } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_key_mut(self) -> &'a mut K { - unsafe { self.node.into_key_mut_at(self.idx) } + unsafe { self.node.into_key_area_mut_at(self.idx).assume_init_mut() } } pub fn into_val_mut(self) -> &'a mut V { - unsafe { self.node.into_val_mut_at(self.idx) } + unsafe { self.node.into_val_area_mut_at(self.idx).assume_init_mut() } } } @@ -1106,12 +1172,14 @@ impl<'a, K, V, NodeType> Handle, K, V, NodeType>, mar impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn kv_mut(&mut self) -> (&mut K, &mut V) { - // We cannot call into_key_mut_at and into_val_mut_at, because calling the second one + // We cannot call separate key and value methods, because calling the second one // invalidates the reference returned by the first. - let leaf = self.node.as_leaf_mut(); - let key = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() }; - let val = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }; - (key, val) + unsafe { + let leaf = NodeRef::as_leaf_mut(&mut self.node.reborrow_mut()); + let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut(); + let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut(); + (key, val) + } } } @@ -1127,23 +1195,23 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> /// by taking care of leaf data. fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { let new_len = self.split_new_node_len(); + new_node.len = new_len as u16; unsafe { - let k = ptr::read(self.node.key_at(self.idx)); - let v = ptr::read(self.node.val_at(self.idx)); + let k = ptr::read(self.node.reborrow().key_at(self.idx)); + let v = ptr::read(self.node.reborrow().val_at(self.idx)); ptr::copy_nonoverlapping( - self.node.key_at(self.idx + 1), - MaybeUninit::slice_as_mut_ptr(&mut new_node.keys), + self.node.reborrow().key_area().as_ptr().add(self.idx + 1), + new_node.keys.as_mut_ptr(), new_len, ); ptr::copy_nonoverlapping( - self.node.val_at(self.idx + 1), - MaybeUninit::slice_as_mut_ptr(&mut new_node.vals), + self.node.reborrow().val_area().as_ptr().add(self.idx + 1), + new_node.vals.as_mut_ptr(), new_len, ); - self.node.as_leaf_mut().len = self.idx as u16; - new_node.len = new_len as u16; + *self.node.reborrow_mut().into_len_mut() = self.idx as u16; (k, v) } } @@ -1174,9 +1242,9 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark mut self, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { unsafe { - let k = slice_remove(self.node.keys_mut(), self.idx); - let v = slice_remove(self.node.vals_mut(), self.idx); - self.node.as_leaf_mut().len -= 1; + let k = slice_remove(self.node.reborrow_mut().into_key_area_slice(), self.idx); + let v = slice_remove(self.node.reborrow_mut().into_val_area_slice(), self.idx); + *self.node.reborrow_mut().into_len_mut() -= 1; ((k, v), self.left_edge()) } } @@ -1205,11 +1273,11 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, pub fn split(mut self) -> (NodeRef, K, V, marker::Internal>, K, V, Root) { unsafe { let mut new_node = Box::new(InternalNode::new()); - // Move edges out before reducing length: let new_len = self.split_new_node_len(); + // Move edges out before reducing length: ptr::copy_nonoverlapping( - self.node.edge_at(self.idx + 1), - MaybeUninit::slice_as_mut_ptr(&mut new_node.edges), + self.node.reborrow().edge_area().as_ptr().add(self.idx + 1), + new_node.edges.as_mut_ptr(), new_len + 1, ); let (k, v) = self.split_leaf_data(&mut new_node.data); @@ -1241,31 +1309,28 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, assert!(left_len + right_len < CAPACITY); unsafe { - ptr::write( - left_node.keys_mut().get_unchecked_mut(left_len), - slice_remove(self.node.keys_mut(), self.idx), - ); + *left_node.reborrow_mut().into_len_mut() += right_len as u16 + 1; + + let parent_key = slice_remove(self.node.reborrow_mut().into_key_area_slice(), self.idx); + left_node.reborrow_mut().into_key_area_mut_at(left_len).write(parent_key); ptr::copy_nonoverlapping( - right_node.key_at(0), - left_node.keys_mut().as_mut_ptr().add(left_len + 1), + right_node.reborrow().key_area().as_ptr(), + left_node.reborrow_mut().into_key_area_slice().as_mut_ptr().add(left_len + 1), right_len, ); - ptr::write( - left_node.vals_mut().get_unchecked_mut(left_len), - slice_remove(self.node.vals_mut(), self.idx), - ); + + let parent_val = slice_remove(self.node.reborrow_mut().into_val_area_slice(), self.idx); + left_node.reborrow_mut().into_val_area_mut_at(left_len).write(parent_val); ptr::copy_nonoverlapping( - right_node.val_at(0), - left_node.vals_mut().as_mut_ptr().add(left_len + 1), + right_node.reborrow().val_area().as_ptr(), + left_node.reborrow_mut().into_val_area_slice().as_mut_ptr().add(left_len + 1), right_len, ); - slice_remove(&mut self.node.edges_mut(), self.idx + 1); + slice_remove(&mut self.node.reborrow_mut().into_edge_area_slice(), self.idx + 1); let self_len = self.node.len(); self.node.correct_childrens_parent_links(self.idx + 1..self_len); - self.node.as_leaf_mut().len -= 1; - - left_node.as_leaf_mut().len += right_len as u16 + 1; + *self.node.reborrow_mut().into_len_mut() -= 1; if self.node.height > 1 { // SAFETY: the height of the nodes being merged is one below the height @@ -1273,8 +1338,8 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, let mut left_node = left_node.cast_to_internal_unchecked(); let right_node = right_node.cast_to_internal_unchecked(); ptr::copy_nonoverlapping( - right_node.edge_at(0), - left_node.edges_mut().as_mut_ptr().add(left_len + 1), + right_node.reborrow().edge_area().as_ptr(), + left_node.reborrow_mut().into_edge_area_slice().as_mut_ptr().add(left_len + 1), right_len + 1, ); @@ -1360,13 +1425,14 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, move_kv(left_kv, new_left_len, parent_kv, 0, 1); } - left_node.as_leaf_mut().len -= count as u16; - right_node.as_leaf_mut().len += count as u16; + *left_node.reborrow_mut().into_len_mut() -= count as u16; + *right_node.reborrow_mut().into_len_mut() += count as u16; match (left_node.force(), right_node.force()) { (ForceResult::Internal(left), ForceResult::Internal(mut right)) => { // Make room for stolen edges. - let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr(); + let left = left.reborrow(); + let right_edges = right.reborrow_mut().into_edge_area_slice().as_mut_ptr(); ptr::copy(right_edges, right_edges.add(count), right_len + 1); right.correct_childrens_parent_links(count..count + right_len + 1); @@ -1415,15 +1481,15 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len); } - left_node.as_leaf_mut().len += count as u16; - right_node.as_leaf_mut().len -= count as u16; + *left_node.reborrow_mut().into_len_mut() += count as u16; + *right_node.reborrow_mut().into_len_mut() -= count as u16; match (left_node.force(), right_node.force()) { (ForceResult::Internal(left), ForceResult::Internal(mut right)) => { - move_edges(right.reborrow_mut(), 0, left, left_len + 1, count); + move_edges(right.reborrow(), 0, left, left_len + 1, count); // Fix right indexing. - let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr(); + let right_edges = right.reborrow_mut().into_edge_area_slice().as_mut_ptr(); ptr::copy(right_edges.add(count), right_edges, new_right_len + 1); right.correct_childrens_parent_links(0..=new_right_len); } @@ -1448,16 +1514,16 @@ unsafe fn move_kv( } // Source and destination must have the same height. -unsafe fn move_edges( - mut source: NodeRef, K, V, marker::Internal>, +unsafe fn move_edges<'a, K: 'a, V: 'a>( + source: NodeRef, K, V, marker::Internal>, source_offset: usize, - mut dest: NodeRef, K, V, marker::Internal>, + mut dest: NodeRef, K, V, marker::Internal>, dest_offset: usize, count: usize, ) { - let source_ptr = source.as_internal().edges.as_ptr(); - let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr(); unsafe { + let source_ptr = source.edge_area().as_ptr(); + let dest_ptr = dest.reborrow_mut().into_edge_area_slice().as_mut_ptr(); ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count); dest.correct_childrens_parent_links(dest_offset..dest_offset + count); } @@ -1553,11 +1619,12 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma move_kv(left_kv, left_new_len, right_kv, 0, right_new_len); - left_node.as_leaf_mut().len = left_new_len as u16; - right_node.as_leaf_mut().len = right_new_len as u16; + *left_node.reborrow_mut().into_len_mut() = left_new_len as u16; + *right_node.reborrow_mut().into_len_mut() = right_new_len as u16; match (left_node.force(), right_node.force()) { (ForceResult::Internal(left), ForceResult::Internal(right)) => { + let left = left.reborrow(); move_edges(left, left_new_len + 1, right, 1, right_new_len); } (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} @@ -1606,20 +1673,33 @@ pub mod marker { pub enum Edge {} } -unsafe fn slice_insert(slice: &mut [T], idx: usize, val: T) { +/// Inserts a value into a slice of initialized elements followed by one uninitialized element. +/// +/// # Safety +/// The slice has more than `idx` elements. +unsafe fn slice_insert(slice: &mut [MaybeUninit], idx: usize, val: T) { unsafe { let len = slice.len(); + debug_assert!(len > idx); let slice_ptr = slice.as_mut_ptr(); - ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx); - ptr::write(slice_ptr.add(idx), val); + if len > idx + 1 { + ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1); + } + (*slice_ptr.add(idx)).write(val); } } -unsafe fn slice_remove(slice: &mut [T], idx: usize) -> T { +/// Removes and returns a value from a slice of all initialized elements, leaving behind one +/// trailing uninitialized element. +/// +/// # Safety +/// The slice has more than `idx` elements. +unsafe fn slice_remove(slice: &mut [MaybeUninit], idx: usize) -> T { unsafe { let len = slice.len(); + debug_assert!(idx < len); let slice_ptr = slice.as_mut_ptr(); - let ret = ptr::read(slice_ptr.add(idx)); + let ret = (*slice_ptr.add(idx)).assume_init_read(); ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1); ret } diff --git a/library/alloc/src/collections/btree/search.rs b/library/alloc/src/collections/btree/search.rs index 1526c0673c691..701d5ec73e218 100644 --- a/library/alloc/src/collections/btree/search.rs +++ b/library/alloc/src/collections/btree/search.rs @@ -72,7 +72,7 @@ where // is an index -- not a reference. let len = node.len(); for i in 0..len { - let k = unsafe { node.key_at(i) }; + let k = unsafe { node.reborrow().key_at(i) }; match key.cmp(k.borrow()) { Ordering::Greater => {} Ordering::Equal => return (i, true), diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 15092d463ec44..f21fc8854d05e 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -78,6 +78,7 @@ #![cfg_attr(test, feature(new_uninit))] #![feature(allocator_api)] #![feature(array_chunks)] +#![feature(array_methods)] #![feature(array_value_iter)] #![feature(array_windows)] #![feature(allow_internal_unstable)] From 8d43b3cbb91b7327b42b0da721525e7ae2911f0b Mon Sep 17 00:00:00 2001 From: David Hewitt <1939362+davidhewitt@users.noreply.github.com> Date: Sat, 25 Jul 2020 19:02:49 +0100 Subject: [PATCH 18/19] Add `#[cfg(panic = "...")]` --- compiler/rustc_feature/src/active.rs | 3 ++ compiler/rustc_feature/src/builtin_attrs.rs | 1 + compiler/rustc_session/src/config.rs | 3 ++ compiler/rustc_span/src/symbol.rs | 1 + compiler/rustc_target/src/spec/mod.rs | 8 ++++ .../src/language-features/cfg-panic.md | 38 +++++++++++++++++++ src/test/ui/cfg/cfg-panic-abort.rs | 16 ++++++++ src/test/ui/cfg/cfg-panic.rs | 18 +++++++++ ...panic.stderr => assert.const_panic.stderr} | 0 src/test/ui/consts/control-flow/assert.rs | 6 +-- .../feature-gates/feature-gate-cfg-panic.rs | 11 ++++++ .../feature-gate-cfg-panic.stderr | 21 ++++++++++ src/test/ui/fmt/format-args-capture.rs | 13 +++++-- .../issues/issue-68696-catch-during-unwind.rs | 4 +- .../ui/test-attrs/test-allow-fail-attr.rs | 3 +- 15 files changed, 136 insertions(+), 10 deletions(-) create mode 100644 src/doc/unstable-book/src/language-features/cfg-panic.md create mode 100644 src/test/ui/cfg/cfg-panic-abort.rs create mode 100644 src/test/ui/cfg/cfg-panic.rs rename src/test/ui/consts/control-flow/{assert.panic.stderr => assert.const_panic.stderr} (100%) create mode 100644 src/test/ui/feature-gates/feature-gate-cfg-panic.rs create mode 100644 src/test/ui/feature-gates/feature-gate-cfg-panic.stderr diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs index 84114fc773533..0df67b63eba58 100644 --- a/compiler/rustc_feature/src/active.rs +++ b/compiler/rustc_feature/src/active.rs @@ -613,6 +613,9 @@ declare_features! ( /// Allows the use of destructuring assignments. (active, destructuring_assignment, "1.49.0", Some(71126), None), + /// Enables `#[cfg(panic = "...")]` config key. + (active, cfg_panic, "1.49.0", Some(77443), None), + // ------------------------------------------------------------------------- // feature-group-end: actual feature gates // ------------------------------------------------------------------------- diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 57ae534590ddf..5c5cf609ac33c 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -33,6 +33,7 @@ const GATED_CFGS: &[GatedCfg] = &[ ), (sym::sanitize, sym::cfg_sanitize, cfg_fn!(cfg_sanitize)), (sym::version, sym::cfg_version, cfg_fn!(cfg_version)), + (sym::panic, sym::cfg_panic, cfg_fn!(cfg_panic)), ]; /// Find a gated cfg determined by the `pred`icate which is given the cfg's name. diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index b632bfbed301c..ab694ad4c5afd 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -793,6 +793,9 @@ pub fn default_configuration(sess: &Session) -> CrateConfig { } } + let panic_strategy = sess.panic_strategy(); + ret.insert((sym::panic, Some(panic_strategy.desc_symbol()))); + for s in sess.opts.debugging_opts.sanitizer { let symbol = Symbol::intern(&s.to_string()); ret.insert((sym::sanitize, Some(symbol))); diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 2324dba80f543..ad58f89d87da7 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -326,6 +326,7 @@ symbols! { cfg_attr, cfg_attr_multi, cfg_doctest, + cfg_panic, cfg_sanitize, cfg_target_feature, cfg_target_has_atomic, diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 895114b026e31..55d27fd8698a7 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -37,6 +37,7 @@ use crate::spec::abi::{lookup as lookup_abi, Abi}; use crate::spec::crt_objects::{CrtObjects, CrtObjectsFallback}; use rustc_serialize::json::{Json, ToJson}; +use rustc_span::symbol::{sym, Symbol}; use std::collections::BTreeMap; use std::ops::Deref; use std::path::{Path, PathBuf}; @@ -176,6 +177,13 @@ impl PanicStrategy { PanicStrategy::Abort => "abort", } } + + pub fn desc_symbol(&self) -> Symbol { + match *self { + PanicStrategy::Unwind => sym::unwind, + PanicStrategy::Abort => sym::abort, + } + } } impl ToJson for PanicStrategy { diff --git a/src/doc/unstable-book/src/language-features/cfg-panic.md b/src/doc/unstable-book/src/language-features/cfg-panic.md new file mode 100644 index 0000000000000..f5b73128ad6c2 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/cfg-panic.md @@ -0,0 +1,38 @@ +# `cfg_panic` + +The tracking issue for this feature is: [#77443] + +[#77443]: https://github.com/rust-lang/rust/issues/77443 + +------------------------ + +The `cfg_panic` feature makes it possible to execute different code +depending on the panic strategy. + +Possible values at the moment are `"unwind"` or `"abort"`, although +it is possible that new panic strategies may be added to Rust in the +future. + +## Examples + +```rust +#![feature(cfg_panic)] + +#[cfg(panic = "unwind")] +fn a() { + // ... +} + +#[cfg(not(panic = "unwind"))] +fn a() { + // ... +} + +fn b() { + if cfg!(panic = "abort") { + // ... + } else { + // ... + } +} +``` diff --git a/src/test/ui/cfg/cfg-panic-abort.rs b/src/test/ui/cfg/cfg-panic-abort.rs new file mode 100644 index 0000000000000..9b88eff12ed38 --- /dev/null +++ b/src/test/ui/cfg/cfg-panic-abort.rs @@ -0,0 +1,16 @@ +// build-pass +// compile-flags: -C panic=abort +// no-prefer-dynamic +#![feature(cfg_panic)] + +#[cfg(panic = "unwind")] +pub fn bad() -> i32 { } + +#[cfg(not(panic = "abort"))] +pub fn bad() -> i32 { } + +#[cfg(panic = "some_imaginary_future_panic_handler")] +pub fn bad() -> i32 { } + +#[cfg(panic = "abort")] +pub fn main() { } diff --git a/src/test/ui/cfg/cfg-panic.rs b/src/test/ui/cfg/cfg-panic.rs new file mode 100644 index 0000000000000..dbb5932a9bb85 --- /dev/null +++ b/src/test/ui/cfg/cfg-panic.rs @@ -0,0 +1,18 @@ +// build-pass +// compile-flags: -C panic=unwind +// ignore-emscripten no panic_unwind implementation +// ignore-wasm32 no panic_unwind implementation +// ignore-wasm64 no panic_unwind implementation +#![feature(cfg_panic)] + +#[cfg(panic = "abort")] +pub fn bad() -> i32 { } + +#[cfg(not(panic = "unwind"))] +pub fn bad() -> i32 { } + +#[cfg(panic = "some_imaginary_future_panic_handler")] +pub fn bad() -> i32 { } + +#[cfg(panic = "unwind")] +pub fn main() { } diff --git a/src/test/ui/consts/control-flow/assert.panic.stderr b/src/test/ui/consts/control-flow/assert.const_panic.stderr similarity index 100% rename from src/test/ui/consts/control-flow/assert.panic.stderr rename to src/test/ui/consts/control-flow/assert.const_panic.stderr diff --git a/src/test/ui/consts/control-flow/assert.rs b/src/test/ui/consts/control-flow/assert.rs index 30cd31ee8a734..90017fee19337 100644 --- a/src/test/ui/consts/control-flow/assert.rs +++ b/src/test/ui/consts/control-flow/assert.rs @@ -1,14 +1,14 @@ // Test that `assert` works when `const_panic` is enabled. -// revisions: stock panic +// revisions: stock const_panic -#![cfg_attr(panic, feature(const_panic))] +#![cfg_attr(const_panic, feature(const_panic))] const _: () = assert!(true); //[stock]~^ ERROR panicking in constants is unstable const _: () = assert!(false); //[stock]~^ ERROR panicking in constants is unstable -//[panic]~^^ ERROR any use of this value will cause an error +//[const_panic]~^^ ERROR any use of this value will cause an error fn main() {} diff --git a/src/test/ui/feature-gates/feature-gate-cfg-panic.rs b/src/test/ui/feature-gates/feature-gate-cfg-panic.rs new file mode 100644 index 0000000000000..1508374d94266 --- /dev/null +++ b/src/test/ui/feature-gates/feature-gate-cfg-panic.rs @@ -0,0 +1,11 @@ +#[cfg(panic = "unwind")] +//~^ ERROR `cfg(panic)` is experimental and subject to change +fn foo() -> bool { true } +#[cfg(not(panic = "unwind"))] +//~^ ERROR `cfg(panic)` is experimental and subject to change +fn foo() -> bool { false } + + +fn main() { + assert!(foo()); +} diff --git a/src/test/ui/feature-gates/feature-gate-cfg-panic.stderr b/src/test/ui/feature-gates/feature-gate-cfg-panic.stderr new file mode 100644 index 0000000000000..ea5cd54fa90f0 --- /dev/null +++ b/src/test/ui/feature-gates/feature-gate-cfg-panic.stderr @@ -0,0 +1,21 @@ +error[E0658]: `cfg(panic)` is experimental and subject to change + --> $DIR/feature-gate-cfg-panic.rs:1:7 + | +LL | #[cfg(panic = "unwind")] + | ^^^^^^^^^^^^^^^^ + | + = note: see issue #77443 for more information + = help: add `#![feature(cfg_panic)]` to the crate attributes to enable + +error[E0658]: `cfg(panic)` is experimental and subject to change + --> $DIR/feature-gate-cfg-panic.rs:4:11 + | +LL | #[cfg(not(panic = "unwind"))] + | ^^^^^^^^^^^^^^^^ + | + = note: see issue #77443 for more information + = help: add `#![feature(cfg_panic)]` to the crate attributes to enable + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/src/test/ui/fmt/format-args-capture.rs b/src/test/ui/fmt/format-args-capture.rs index 7490632110c85..9348bb46dfe34 100644 --- a/src/test/ui/fmt/format-args-capture.rs +++ b/src/test/ui/fmt/format-args-capture.rs @@ -1,13 +1,16 @@ // run-pass -// ignore-wasm32 -// ignore-wasm64 #![feature(format_args_capture)] +#![feature(cfg_panic)] fn main() { named_argument_takes_precedence_to_captured(); - panic_with_single_argument_does_not_get_formatted(); - panic_with_multiple_arguments_is_formatted(); formatting_parameters_can_be_captured(); + + #[cfg(panic = "unwind")] + { + panic_with_single_argument_does_not_get_formatted(); + panic_with_multiple_arguments_is_formatted(); + } } fn named_argument_takes_precedence_to_captured() { @@ -22,6 +25,7 @@ fn named_argument_takes_precedence_to_captured() { assert_eq!(&s, "positional-named-captured"); } +#[cfg(panic = "unwind")] fn panic_with_single_argument_does_not_get_formatted() { // panic! with a single argument does not perform string formatting. // RFC #2795 suggests that this may need to change so that captured arguments are formatted. @@ -34,6 +38,7 @@ fn panic_with_single_argument_does_not_get_formatted() { assert_eq!(msg.downcast_ref::<&str>(), Some(&"{foo}")) } +#[cfg(panic = "unwind")] fn panic_with_multiple_arguments_is_formatted() { let foo = "captured"; diff --git a/src/test/ui/issues/issue-68696-catch-during-unwind.rs b/src/test/ui/issues/issue-68696-catch-during-unwind.rs index d042bed225d17..f25a78f59cd20 100644 --- a/src/test/ui/issues/issue-68696-catch-during-unwind.rs +++ b/src/test/ui/issues/issue-68696-catch-during-unwind.rs @@ -4,8 +4,7 @@ // entering the catch_unwind. // // run-pass -// ignore-wasm no panic support -// ignore-emscripten no panic support +#![feature(cfg_panic)] use std::panic::catch_unwind; @@ -19,6 +18,7 @@ impl Drop for Guard { } fn main() { + #[cfg(panic = "unwind")] let _ = catch_unwind(|| { let _guard = Guard::default(); panic!(); diff --git a/src/test/ui/test-attrs/test-allow-fail-attr.rs b/src/test/ui/test-attrs/test-allow-fail-attr.rs index 1a478460efc6c..29ce9f7c2e94f 100644 --- a/src/test/ui/test-attrs/test-allow-fail-attr.rs +++ b/src/test/ui/test-attrs/test-allow-fail-attr.rs @@ -1,11 +1,12 @@ // run-pass -// ignore-wasm32-bare compiled with panic=abort by default // compile-flags: --test #![feature(allow_fail)] +#![feature(cfg_panic)] #[test] #[allow_fail] fn test1() { + #[cfg(not(panic = "abort"))] panic!(); } From 61b52a33b3a5a170f7760ba3e3efaac25e5085ef Mon Sep 17 00:00:00 2001 From: SNCPlay42 Date: Tue, 8 Sep 2020 05:35:24 +0100 Subject: [PATCH 19/19] use RegionNameHighlight for async fn and closure returns --- .../borrow_check/diagnostics/region_name.rs | 152 ++++++++++++++---- .../issue-74072-lifetime-name-annotations.rs | 8 + ...sue-74072-lifetime-name-annotations.stderr | 16 +- .../issue-74497-lifetime-in-opaque.rs | 19 +++ .../issue-74497-lifetime-in-opaque.stderr | 11 ++ ...ted-closure-outlives-borrowed-value.stderr | 2 +- src/test/ui/nll/issue-58053.stderr | 4 +- 7 files changed, 173 insertions(+), 39 deletions(-) create mode 100644 src/test/ui/async-await/issue-74497-lifetime-in-opaque.rs create mode 100644 src/test/ui/async-await/issue-74497-lifetime-in-opaque.stderr diff --git a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs index db5c14843b9fc..2a90fb042dd71 100644 --- a/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs +++ b/compiler/rustc_mir/src/borrow_check/diagnostics/region_name.rs @@ -6,8 +6,8 @@ use rustc_hir::def::{DefKind, Res}; use rustc_middle::ty::print::RegionHighlightMode; use rustc_middle::ty::subst::{GenericArgKind, SubstsRef}; use rustc_middle::ty::{self, RegionVid, Ty}; -use rustc_span::symbol::kw; -use rustc_span::{symbol::Symbol, Span, DUMMY_SP}; +use rustc_span::symbol::{kw, sym, Ident, Symbol}; +use rustc_span::{Span, DUMMY_SP}; use crate::borrow_check::{nll::ToRegionVid, universal_regions::DefiningTy, MirBorrowckCtxt}; @@ -39,7 +39,7 @@ crate enum RegionNameSource { /// The region corresponding to a closure upvar. AnonRegionFromUpvar(Span, String), /// The region corresponding to the return type of a closure. - AnonRegionFromOutput(Span, String, String), + AnonRegionFromOutput(RegionNameHighlight, String), /// The region from a type yielded by a generator. AnonRegionFromYieldTy(Span, String), /// An anonymous region from an async fn. @@ -85,10 +85,10 @@ impl RegionName { | RegionNameSource::NamedFreeRegion(span) | RegionNameSource::SynthesizedFreeEnvRegion(span, _) | RegionNameSource::AnonRegionFromUpvar(span, _) - | RegionNameSource::AnonRegionFromOutput(span, _, _) | RegionNameSource::AnonRegionFromYieldTy(span, _) | RegionNameSource::AnonRegionFromAsyncFn(span) => Some(span), - RegionNameSource::AnonRegionFromArgument(ref highlight) => match *highlight { + RegionNameSource::AnonRegionFromArgument(ref highlight) + | RegionNameSource::AnonRegionFromOutput(ref highlight, _) => match *highlight { RegionNameHighlight::MatchedHirTy(span) | RegionNameHighlight::MatchedAdtAndSegment(span) | RegionNameHighlight::CannotMatchHirTy(span, _) @@ -117,6 +117,7 @@ impl RegionName { diag.span_label(*span, format!("has type `{}`", type_name)); } RegionNameSource::AnonRegionFromArgument(RegionNameHighlight::MatchedHirTy(span)) + | RegionNameSource::AnonRegionFromOutput(RegionNameHighlight::MatchedHirTy(span), _) | RegionNameSource::AnonRegionFromAsyncFn(span) => { diag.span_label( *span, @@ -125,6 +126,10 @@ impl RegionName { } RegionNameSource::AnonRegionFromArgument( RegionNameHighlight::MatchedAdtAndSegment(span), + ) + | RegionNameSource::AnonRegionFromOutput( + RegionNameHighlight::MatchedAdtAndSegment(span), + _, ) => { diag.span_label(*span, format!("let's call this `{}`", self)); } @@ -137,13 +142,28 @@ impl RegionName { format!("lifetime `{}` appears in the type {}", self, type_name), ); } + RegionNameSource::AnonRegionFromOutput( + RegionNameHighlight::Occluded(span, type_name), + mir_description, + ) => { + diag.span_label( + *span, + format!( + "return type{} `{}` contains a lifetime `{}`", + mir_description, type_name, self + ), + ); + } RegionNameSource::AnonRegionFromUpvar(span, upvar_name) => { diag.span_label( *span, format!("lifetime `{}` appears in the type of `{}`", self, upvar_name), ); } - RegionNameSource::AnonRegionFromOutput(span, mir_description, type_name) => { + RegionNameSource::AnonRegionFromOutput( + RegionNameHighlight::CannotMatchHirTy(span, type_name), + mir_description, + ) => { diag.span_label(*span, format!("return type{} is {}", mir_description, type_name)); } RegionNameSource::AnonRegionFromYieldTy(span, type_name) => { @@ -659,6 +679,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { /// or be early bound (named, not in argument). fn give_name_if_anonymous_region_appears_in_output(&self, fr: RegionVid) -> Option { let tcx = self.infcx.tcx; + let hir = tcx.hir(); let return_ty = self.regioncx.universal_regions().unnormalized_output_ty; debug!("give_name_if_anonymous_region_appears_in_output: return_ty = {:?}", return_ty); @@ -666,60 +687,123 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { return None; } - let mut highlight = RegionHighlightMode::default(); - highlight.highlighting_region_vid(fr, *self.next_region_name.try_borrow().unwrap()); - let type_name = - self.infcx.extract_inference_diagnostics_data(return_ty.into(), Some(highlight)).name; + let mir_hir_id = self.mir_hir_id(); - let (return_span, mir_description) = match tcx.hir().get(self.mir_hir_id()) { + let (return_span, mir_description, hir_ty) = match hir.get(mir_hir_id) { hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(_, return_ty, body_id, span, _), .. }) => { - let mut span = match return_ty.output { - hir::FnRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span), - hir::FnRetTy::Return(_) => return_ty.output.span(), + let (mut span, mut hir_ty) = match return_ty.output { + hir::FnRetTy::DefaultReturn(_) => { + (tcx.sess.source_map().end_point(*span), None) + } + hir::FnRetTy::Return(hir_ty) => (return_ty.output.span(), Some(hir_ty)), }; - let mir_description = match tcx.hir().body(*body_id).generator_kind { + let mir_description = match hir.body(*body_id).generator_kind { Some(hir::GeneratorKind::Async(gen)) => match gen { hir::AsyncGeneratorKind::Block => " of async block", hir::AsyncGeneratorKind::Closure => " of async closure", hir::AsyncGeneratorKind::Fn => { - span = tcx - .hir() - .get(tcx.hir().get_parent_item(mir_hir_id)) + let parent_item = hir.get(hir.get_parent_item(mir_hir_id)); + let output = &parent_item .fn_decl() .expect("generator lowered from async fn should be in fn") - .output - .span(); + .output; + span = output.span(); + if let hir::FnRetTy::Return(ret) = output { + hir_ty = Some(self.get_future_inner_return_ty(*ret)); + } " of async function" } }, Some(hir::GeneratorKind::Gen) => " of generator", None => " of closure", }; - (span, mir_description) + (span, mir_description, hir_ty) } - hir::Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Fn(method_sig, _), - .. - }) => (method_sig.decl.output.span(), ""), - _ => (self.body.span, ""), + node => match node.fn_decl() { + Some(fn_decl) => { + let hir_ty = match fn_decl.output { + hir::FnRetTy::DefaultReturn(_) => None, + hir::FnRetTy::Return(ty) => Some(ty), + }; + (fn_decl.output.span(), "", hir_ty) + } + None => (self.body.span, "", None), + }, }; + let highlight = hir_ty + .and_then(|hir_ty| self.highlight_if_we_can_match_hir_ty(fr, return_ty, hir_ty)) + .unwrap_or_else(|| { + // `highlight_if_we_cannot_match_hir_ty` needs to know the number we will give to + // the anonymous region. If it succeeds, the `synthesize_region_name` call below + // will increment the counter, "reserving" the number we just used. + let counter = *self.next_region_name.try_borrow().unwrap(); + self.highlight_if_we_cannot_match_hir_ty(fr, return_ty, return_span, counter) + }); + Some(RegionName { - // This counter value will already have been used, so this function will increment it - // so the next value will be used next and return the region name that would have been - // used. name: self.synthesize_region_name(), - source: RegionNameSource::AnonRegionFromOutput( - return_span, - mir_description.to_string(), - type_name, - ), + source: RegionNameSource::AnonRegionFromOutput(highlight, mir_description.to_string()), }) } + /// From the [`hir::Ty`] of an async function's lowered return type, + /// retrieve the `hir::Ty` representing the type the user originally wrote. + /// + /// e.g. given the function: + /// + /// ``` + /// async fn foo() -> i32 {} + /// ``` + /// + /// this function, given the lowered return type of `foo`, an [`OpaqueDef`] that implements `Future`, + /// returns the `i32`. + /// + /// [`OpaqueDef`]: hir::TyKind::OpaqueDef + fn get_future_inner_return_ty(&self, hir_ty: &'tcx hir::Ty<'tcx>) -> &'tcx hir::Ty<'tcx> { + let hir = self.infcx.tcx.hir(); + + if let hir::TyKind::OpaqueDef(id, _) = hir_ty.kind { + let opaque_ty = hir.item(id.id); + if let hir::ItemKind::OpaqueTy(hir::OpaqueTy { + bounds: + [hir::GenericBound::LangItemTrait( + hir::LangItem::Future, + _, + _, + hir::GenericArgs { + bindings: + [hir::TypeBinding { + ident: Ident { name: sym::Output, .. }, + kind: hir::TypeBindingKind::Equality { ty }, + .. + }], + .. + }, + )], + .. + }) = opaque_ty.kind + { + ty + } else { + span_bug!( + hir_ty.span, + "bounds from lowered return type of async fn did not match expected format: {:?}", + opaque_ty + ); + } + } else { + span_bug!( + hir_ty.span, + "lowered return type of async fn is not OpaqueDef: {:?}", + hir_ty + ); + } + } + fn give_name_if_anonymous_region_appears_in_yield_ty( &self, fr: RegionVid, diff --git a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs index f8e116333592f..95683241aba26 100644 --- a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs +++ b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.rs @@ -18,6 +18,14 @@ pub fn async_closure(x: &mut i32) -> impl Future { })() } +pub fn async_closure_explicit_return_type(x: &mut i32) -> impl Future { + (async move || -> &i32 { + let y = &*x; + *x += 1; //~ ERROR cannot assign to `*x` because it is borrowed + y + })() +} + pub fn async_block(x: &mut i32) -> impl Future { async move { let y = &*x; diff --git a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr index 997e36fe22cfe..123c3192cffba 100644 --- a/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr +++ b/src/test/ui/async-await/issue-74072-lifetime-name-annotations.stderr @@ -2,7 +2,7 @@ error[E0506]: cannot assign to `*x` because it is borrowed --> $DIR/issue-74072-lifetime-name-annotations.rs:9:5 | LL | pub async fn async_fn(x: &mut i32) -> &i32 { - | ---- return type of async function is &'1 i32 + | - let's call the lifetime of this reference `'1` LL | let y = &*x; | --- borrow of `*x` occurs here LL | *x += 1; @@ -25,6 +25,18 @@ LL | })() error[E0506]: cannot assign to `*x` because it is borrowed --> $DIR/issue-74072-lifetime-name-annotations.rs:24:9 | +LL | (async move || -> &i32 { + | - let's call the lifetime of this reference `'1` +LL | let y = &*x; + | --- borrow of `*x` occurs here +LL | *x += 1; + | ^^^^^^^ assignment to borrowed `*x` occurs here +LL | y + | - returning this value requires that `*x` is borrowed for `'1` + +error[E0506]: cannot assign to `*x` because it is borrowed + --> $DIR/issue-74072-lifetime-name-annotations.rs:32:9 + | LL | let y = &*x; | --- borrow of `*x` occurs here LL | *x += 1; @@ -34,6 +46,6 @@ LL | y LL | } | - return type of async block is &'1 i32 -error: aborting due to 3 previous errors +error: aborting due to 4 previous errors For more information about this error, try `rustc --explain E0506`. diff --git a/src/test/ui/async-await/issue-74497-lifetime-in-opaque.rs b/src/test/ui/async-await/issue-74497-lifetime-in-opaque.rs new file mode 100644 index 0000000000000..2d765eb41be07 --- /dev/null +++ b/src/test/ui/async-await/issue-74497-lifetime-in-opaque.rs @@ -0,0 +1,19 @@ +// edition:2018 + +// test that names give to anonymous lifetimes in opaque types like `impl Future` are correctly +// introduced in error messages + +use std::future::Future; + +pub async fn foo(_: F) +where + F: Fn(&u8) -> T, + T: Future, +{ +} + +pub async fn bar(_: &u8) {} + +fn main() { + let _ = foo(|x| bar(x)); //~ ERROR lifetime may not live long enough +} diff --git a/src/test/ui/async-await/issue-74497-lifetime-in-opaque.stderr b/src/test/ui/async-await/issue-74497-lifetime-in-opaque.stderr new file mode 100644 index 0000000000000..89fe1abb3656b --- /dev/null +++ b/src/test/ui/async-await/issue-74497-lifetime-in-opaque.stderr @@ -0,0 +1,11 @@ +error: lifetime may not live long enough + --> $DIR/issue-74497-lifetime-in-opaque.rs:18:21 + | +LL | let _ = foo(|x| bar(x)); + | -- ^^^^^^ returning this value requires that `'1` must outlive `'2` + | || + | |return type of closure `impl Future` contains a lifetime `'2` + | has type `&'1 u8` + +error: aborting due to previous error + diff --git a/src/test/ui/borrowck/issue-53432-nested-closure-outlives-borrowed-value.stderr b/src/test/ui/borrowck/issue-53432-nested-closure-outlives-borrowed-value.stderr index e8a026cfab92a..a7c3b9eec73c5 100644 --- a/src/test/ui/borrowck/issue-53432-nested-closure-outlives-borrowed-value.stderr +++ b/src/test/ui/borrowck/issue-53432-nested-closure-outlives-borrowed-value.stderr @@ -4,7 +4,7 @@ error: lifetime may not live long enough LL | let _action = move || { | ------- | | | - | | return type of closure is [closure@$DIR/issue-53432-nested-closure-outlives-borrowed-value.rs:4:9: 4:15] + | | return type of closure `[closure@$DIR/issue-53432-nested-closure-outlives-borrowed-value.rs:4:9: 4:15]` contains a lifetime `'2` | lifetime `'1` represents this closure's body LL | || f() // The `nested` closure | ^^^^^^ returning this value requires that `'1` must outlive `'2` diff --git a/src/test/ui/nll/issue-58053.stderr b/src/test/ui/nll/issue-58053.stderr index 297681ff4038a..e41ee8a89709c 100644 --- a/src/test/ui/nll/issue-58053.stderr +++ b/src/test/ui/nll/issue-58053.stderr @@ -2,9 +2,9 @@ error: lifetime may not live long enough --> $DIR/issue-58053.rs:6:33 | LL | let f = |x: &i32| -> &i32 { x }; - | - ---- ^ returning this value requires that `'1` must outlive `'2` + | - - ^ returning this value requires that `'1` must outlive `'2` | | | - | | return type of closure is &'2 i32 + | | let's call the lifetime of this reference `'2` | let's call the lifetime of this reference `'1` error: lifetime may not live long enough