Skip to content

Commit

Permalink
Auto merge of #110978 - Dylan-DPC:rollup-xclzwax, r=Dylan-DPC
Browse files Browse the repository at this point in the history
Rollup of 6 pull requests

Successful merges:

 - #110614 (Clear response values for overflow in new solver)
 - #110894 (Bump libffi-sys to 2.3.0)
 - #110932 (include source error for LoadLibraryExW)
 - #110958 (Make sure that some stdlib method signatures aren't accidental refinements)
 - #110962 (Make drop_flags an IndexVec.)
 - #110965 (Don't duplicate anonymous lifetimes for async fn in traits)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Apr 29, 2023
2 parents 572c0d5 + 81910a1 commit af2c7e0
Show file tree
Hide file tree
Showing 21 changed files with 390 additions and 63 deletions.
4 changes: 2 additions & 2 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1956,9 +1956,9 @@ dependencies = [

[[package]]
name = "libffi-sys"
version = "2.2.1"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc65067b78c0fc069771e8b9a9e02df71e08858bec92c1f101377c67b9dca7c7"
checksum = "f36115160c57e8529781b4183c2bb51fdc1f6d6d1ed345591d84be7703befb3c"
dependencies = [
"cc",
]
Expand Down
10 changes: 9 additions & 1 deletion compiler/rustc_metadata/src/creader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ use rustc_span::{Span, DUMMY_SP};
use rustc_target::spec::{PanicStrategy, TargetTriple};

use proc_macro::bridge::client::ProcMacro;
use std::error::Error;
use std::ops::Fn;
use std::path::Path;
use std::time::Duration;
Expand Down Expand Up @@ -1094,5 +1095,12 @@ fn load_dylib(path: &Path, max_attempts: usize) -> Result<libloading::Library, S
}

debug!("Failed to load proc-macro `{}` even after {} attempts.", path.display(), max_attempts);
Err(format!("{} (retried {} times)", last_error.unwrap(), max_attempts))

let last_error = last_error.unwrap();
let message = if let Some(src) = last_error.source() {
format!("{last_error} ({src}) (retried {max_attempts} times)")
} else {
format!("{last_error} (retried {max_attempts} times)")
};
Err(message)
}
15 changes: 8 additions & 7 deletions compiler/rustc_mir_transform/src/elaborate_drops.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::deref_separator::deref_finder;
use crate::MirPass;
use rustc_data_structures::fx::FxHashMap;
use rustc_index::bit_set::BitSet;
use rustc_index::IndexVec;
use rustc_middle::mir::patch::MirPatch;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, TyCtxt};
Expand Down Expand Up @@ -84,12 +84,13 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {

let reachable = traversal::reachable_as_bitset(body);

let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
ElaborateDropsCtxt {
tcx,
body,
env: &env,
init_data: InitializationData { inits, uninits },
drop_flags: Default::default(),
drop_flags,
patch: MirPatch::new(body),
un_derefer: un_derefer,
reachable,
Expand Down Expand Up @@ -293,7 +294,7 @@ struct ElaborateDropsCtxt<'a, 'tcx> {
body: &'a Body<'tcx>,
env: &'a MoveDataParamEnv<'tcx>,
init_data: InitializationData<'a, 'tcx>,
drop_flags: FxHashMap<MovePathIndex, Local>,
drop_flags: IndexVec<MovePathIndex, Option<Local>>,
patch: MirPatch<'tcx>,
un_derefer: UnDerefer<'tcx>,
reachable: BitSet<BasicBlock>,
Expand All @@ -312,11 +313,11 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
let tcx = self.tcx;
let patch = &mut self.patch;
debug!("create_drop_flag({:?})", self.body.span);
self.drop_flags.entry(index).or_insert_with(|| patch.new_internal(tcx.types.bool, span));
self.drop_flags[index].get_or_insert_with(|| patch.new_internal(tcx.types.bool, span));
}

fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
self.drop_flags.get(&index).map(|t| Place::from(*t))
self.drop_flags[index].map(Place::from)
}

/// create a patch that elaborates all drops in the input
Expand Down Expand Up @@ -463,7 +464,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
}

fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
if let Some(&flag) = self.drop_flags.get(&path) {
if let Some(flag) = self.drop_flags[path] {
let span = self.patch.source_info_for_location(self.body, loc).span;
let val = self.constant_bool(span, val.value());
self.patch.add_assign(loc, Place::from(flag), val);
Expand All @@ -474,7 +475,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
let loc = Location::START;
let span = self.patch.source_info_for_location(self.body, loc).span;
let false_ = self.constant_bool(span, false);
for flag in self.drop_flags.values() {
for flag in self.drop_flags.iter().flatten() {
self.patch.add_assign(loc, Place::from(*flag), false_.clone());
}
}
Expand Down
6 changes: 1 addition & 5 deletions compiler/rustc_resolve/src/late.rs
Original file line number Diff line number Diff line change
Expand Up @@ -859,13 +859,9 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
sig.decl.inputs.iter().map(|Param { ty, .. }| (None, &**ty)),
&sig.decl.output,
);

this.record_lifetime_params_for_async(
fn_id,
sig.header.asyncness.opt_return_id(),
);
},
);
self.record_lifetime_params_for_async(fn_id, sig.header.asyncness.opt_return_id());
return;
}
FnKind::Fn(..) => {
Expand Down
19 changes: 13 additions & 6 deletions compiler/rustc_trait_selection/src/solve/eval_ctxt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use rustc_infer::infer::at::ToTrace;
use rustc_infer::infer::canonical::CanonicalVarValues;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::{
DefineOpaqueTypes, InferCtxt, InferOk, LateBoundRegionConversionTime, TyCtxtInferExt,
DefineOpaqueTypes, InferCtxt, InferOk, LateBoundRegionConversionTime, RegionVariableOrigin,
TyCtxtInferExt,
};
use rustc_infer::traits::query::NoSolution;
use rustc_infer::traits::ObligationCause;
Expand Down Expand Up @@ -223,18 +224,20 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
{
debug!("rerunning goal to check result is stable");
let (_orig_values, canonical_goal) = self.canonicalize_goal(goal);
let canonical_response =
let new_canonical_response =
EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
if !canonical_response.value.var_values.is_identity() {
if !new_canonical_response.value.var_values.is_identity() {
bug!(
"unstable result: re-canonicalized goal={canonical_goal:#?} \
response={canonical_response:#?}"
first_response={canonical_response:#?} \
second_response={new_canonical_response:#?}"
);
}
if certainty != canonical_response.value.certainty {
if certainty != new_canonical_response.value.certainty {
bug!(
"unstable certainty: {certainty:#?} re-canonicalized goal={canonical_goal:#?} \
response={canonical_response:#?}"
first_response={canonical_response:#?} \
second_response={new_canonical_response:#?}"
);
}
}
Expand Down Expand Up @@ -434,6 +437,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
})
}

pub(super) fn next_region_infer(&self) -> ty::Region<'tcx> {
self.infcx.next_region_var(RegionVariableOrigin::MiscVariable(DUMMY_SP))
}

pub(super) fn next_const_infer(&self, ty: Ty<'tcx>) -> ty::Const<'tcx> {
self.infcx.next_const_var(
ty,
Expand Down
58 changes: 55 additions & 3 deletions compiler/rustc_trait_selection/src/solve/eval_ctxt/canonical.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use rustc_infer::infer::canonical::query_response::make_query_region_constraints
use rustc_infer::infer::canonical::CanonicalVarValues;
use rustc_infer::infer::canonical::{CanonicalExt, QueryRegionConstraints};
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::solve::{ExternalConstraints, ExternalConstraintsData};
use rustc_middle::traits::solve::{ExternalConstraints, ExternalConstraintsData, MaybeCause};
use rustc_middle::ty::{self, BoundVar, GenericArgKind};
use rustc_span::DUMMY_SP;
use std::iter;
Expand Down Expand Up @@ -60,9 +60,27 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {

let certainty = certainty.unify_with(goals_certainty);

let external_constraints = self.compute_external_query_constraints()?;
let response = match certainty {
Certainty::Yes | Certainty::Maybe(MaybeCause::Ambiguity) => {
let external_constraints = self.compute_external_query_constraints()?;
Response { var_values: self.var_values, external_constraints, certainty }
}
Certainty::Maybe(MaybeCause::Overflow) => {
// If we have overflow, it's probable that we're substituting a type
// into itself infinitely and any partial substitutions in the query
// response are probably not useful anyways, so just return an empty
// query response.
//
// This may prevent us from potentially useful inference, e.g.
// 2 candidates, one ambiguous and one overflow, which both
// have the same inference constraints.
//
// Changing this to retain some constraints in the future
// won't be a breaking change, so this is good enough for now.
return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Overflow));
}
};

let response = Response { var_values: self.var_values, external_constraints, certainty };
let canonical = Canonicalizer::canonicalize(
self.infcx,
CanonicalizeMode::Response { max_input_universe: self.max_input_universe },
Expand All @@ -72,6 +90,40 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
Ok(canonical)
}

/// Constructs a totally unconstrained, ambiguous response to a goal.
///
/// Take care when using this, since often it's useful to respond with
/// ambiguity but return constrained variables to guide inference.
pub(in crate::solve) fn make_ambiguous_response_no_constraints(
&self,
maybe_cause: MaybeCause,
) -> CanonicalResponse<'tcx> {
let unconstrained_response = Response {
var_values: CanonicalVarValues {
var_values: self.tcx().mk_substs_from_iter(self.var_values.var_values.iter().map(
|arg| -> ty::GenericArg<'tcx> {
match arg.unpack() {
GenericArgKind::Lifetime(_) => self.next_region_infer().into(),
GenericArgKind::Type(_) => self.next_ty_infer().into(),
GenericArgKind::Const(ct) => self.next_const_infer(ct.ty()).into(),
}
},
)),
},
external_constraints: self
.tcx()
.mk_external_constraints(ExternalConstraintsData::default()),
certainty: Certainty::Maybe(maybe_cause),
};

Canonicalizer::canonicalize(
self.infcx,
CanonicalizeMode::Response { max_input_universe: self.max_input_universe },
&mut Default::default(),
unconstrained_response,
)
}

#[instrument(level = "debug", skip(self), ret)]
fn compute_external_query_constraints(&self) -> Result<ExternalConstraints<'tcx>, NoSolution> {
// Cannot use `take_registered_region_obligations` as we may compute the response
Expand Down
22 changes: 11 additions & 11 deletions compiler/rustc_trait_selection/src/solve/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,17 +340,17 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
if responses.is_empty() {
return Err(NoSolution);
}
let certainty = responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| {
certainty.unify_with(response.value.certainty)
});

let response = self.evaluate_added_goals_and_make_canonical_response(certainty);
if let Ok(response) = response {
assert!(response.has_no_inference_or_external_constraints());
Ok(response)
} else {
bug!("failed to make floundered response: {responses:?}");
}

let Certainty::Maybe(maybe_cause) = responses.iter().fold(
Certainty::AMBIGUOUS,
|certainty, response| {
certainty.unify_with(response.value.certainty)
},
) else {
bug!("expected flounder response to be ambiguous")
};

Ok(self.make_ambiguous_response_no_constraints(maybe_cause))
}
}

Expand Down
60 changes: 48 additions & 12 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1543,11 +1543,17 @@ impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
self.next_back()
}

fn min(mut self) -> Option<(&'a K, &'a V)> {
fn min(mut self) -> Option<(&'a K, &'a V)>
where
(&'a K, &'a V): Ord,
{
self.next()
}

fn max(mut self) -> Option<(&'a K, &'a V)> {
fn max(mut self) -> Option<(&'a K, &'a V)>
where
(&'a K, &'a V): Ord,
{
self.next_back()
}
}
Expand Down Expand Up @@ -1612,11 +1618,17 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> {
self.next_back()
}

fn min(mut self) -> Option<(&'a K, &'a mut V)> {
fn min(mut self) -> Option<(&'a K, &'a mut V)>
where
(&'a K, &'a mut V): Ord,
{
self.next()
}

fn max(mut self) -> Option<(&'a K, &'a mut V)> {
fn max(mut self) -> Option<(&'a K, &'a mut V)>
where
(&'a K, &'a mut V): Ord,
{
self.next_back()
}
}
Expand Down Expand Up @@ -1779,11 +1791,17 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> {
self.next_back()
}

fn min(mut self) -> Option<&'a K> {
fn min(mut self) -> Option<&'a K>
where
&'a K: Ord,
{
self.next()
}

fn max(mut self) -> Option<&'a K> {
fn max(mut self) -> Option<&'a K>
where
&'a K: Ord,
{
self.next_back()
}
}
Expand Down Expand Up @@ -2008,11 +2026,17 @@ impl<'a, K, V> Iterator for Range<'a, K, V> {
self.next_back()
}

fn min(mut self) -> Option<(&'a K, &'a V)> {
fn min(mut self) -> Option<(&'a K, &'a V)>
where
(&'a K, &'a V): Ord,
{
self.next()
}

fn max(mut self) -> Option<(&'a K, &'a V)> {
fn max(mut self) -> Option<(&'a K, &'a V)>
where
(&'a K, &'a V): Ord,
{
self.next_back()
}
}
Expand Down Expand Up @@ -2081,11 +2105,17 @@ impl<K, V, A: Allocator + Clone> Iterator for IntoKeys<K, V, A> {
self.next_back()
}

fn min(mut self) -> Option<K> {
fn min(mut self) -> Option<K>
where
K: Ord,
{
self.next()
}

fn max(mut self) -> Option<K> {
fn max(mut self) -> Option<K>
where
K: Ord,
{
self.next_back()
}
}
Expand Down Expand Up @@ -2204,11 +2234,17 @@ impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
self.next_back()
}

fn min(mut self) -> Option<(&'a K, &'a mut V)> {
fn min(mut self) -> Option<(&'a K, &'a mut V)>
where
(&'a K, &'a mut V): Ord,
{
self.next()
}

fn max(mut self) -> Option<(&'a K, &'a mut V)> {
fn max(mut self) -> Option<(&'a K, &'a mut V)>
where
(&'a K, &'a mut V): Ord,
{
self.next_back()
}
}
Expand Down
Loading

0 comments on commit af2c7e0

Please sign in to comment.