Skip to content

Commit

Permalink
Auto merge of #9743 - smoelius:improve-needless-lifetimes, r=Alexendoo
Browse files Browse the repository at this point in the history
Improve `needless_lifetimes`

This PR makes the following improvements to `needless_lifetimes`.

* It fixes the following false negative, where `foo` is flagged but `bar` is not:
  ```rust
    fn foo<'a>(x: &'a u8, y: &'_ u8) {}

    fn bar<'a>(x: &'a u8, y: &'_ u8, z: &'_ u8) {}
  ```
* It flags more cases, generally. Previously, `needless_borrow` required *all* lifetimes to be used only once. With the changes, individual lifetimes are flagged for being used only once, even if not all lifetimes are.
* Finally, it tries to produce more clear error messages.

changelog: fix `needless_lifetimes` false negative involving functions with multiple unnamed lifetimes
changelog: in `needless_lifetimes`, flag individual lifetimes used only once, rather than require all lifetimes to be used only once
changelog: in `needless_lifetimes`, emit "replace with `'_`" warnings only when applicable, and point to a generic argument
  • Loading branch information
bors committed Nov 1, 2022
2 parents 37d338c + c0d9285 commit 7600535
Show file tree
Hide file tree
Showing 50 changed files with 457 additions and 213 deletions.
2 changes: 1 addition & 1 deletion clippy_lints/src/booleans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NonminimalBoolVisitor<'a, 'tcx> {
}
}

fn implements_ord<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
fn implements_ord(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let ty = cx.typeck_results().expr_ty(expr);
cx.tcx
.get_diagnostic_item(sym::Ord)
Expand Down
4 changes: 2 additions & 2 deletions clippy_lints/src/dereference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1232,8 +1232,8 @@ fn is_mixed_projection_predicate<'tcx>(
}
}

fn referent_used_exactly_once<'a, 'tcx>(
cx: &'a LateContext<'tcx>,
fn referent_used_exactly_once<'tcx>(
cx: &LateContext<'tcx>,
possible_borrowers: &mut Vec<(LocalDefId, PossibleBorrowerMap<'tcx, 'tcx>)>,
reference: &Expr<'tcx>,
) -> bool {
Expand Down
6 changes: 3 additions & 3 deletions clippy_lints/src/doc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,8 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
}
}

fn lint_for_missing_headers<'tcx>(
cx: &LateContext<'tcx>,
fn lint_for_missing_headers(
cx: &LateContext<'_>,
def_id: LocalDefId,
span: impl Into<MultiSpan> + Copy,
sig: &hir::FnSig<'_>,
Expand Down Expand Up @@ -467,7 +467,7 @@ struct DocHeaders {
panics: bool,
}

fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &'a [Attribute]) -> DocHeaders {
fn check_attrs(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &[Attribute]) -> DocHeaders {
use pulldown_cmark::{BrokenLink, CowStr, Options};
/// We don't want the parser to choke on intra doc links. Since we don't
/// actually care about rendering them, just pretend that all broken links are
Expand Down
2 changes: 1 addition & 1 deletion clippy_lints/src/fallible_impl_from.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl<'tcx> LateLintPass<'tcx> for FallibleImplFrom {
}
}

fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, impl_items: &[hir::ImplItemRef]) {
fn lint_impl_body(cx: &LateContext<'_>, impl_span: Span, impl_items: &[hir::ImplItemRef]) {
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{Expr, ImplItemKind};

Expand Down
4 changes: 2 additions & 2 deletions clippy_lints/src/implicit_hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitHasher {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
use rustc_span::BytePos;

fn suggestion<'tcx>(
cx: &LateContext<'tcx>,
fn suggestion(
cx: &LateContext<'_>,
diag: &mut Diagnostic,
generics_span: Span,
generics_suggestion_span: Span,
Expand Down
4 changes: 2 additions & 2 deletions clippy_lints/src/index_refutable_slice.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,8 +207,8 @@ impl SliceLintInformation {
}
}

fn filter_lintable_slices<'a, 'tcx>(
cx: &'a LateContext<'tcx>,
fn filter_lintable_slices<'tcx>(
cx: &LateContext<'tcx>,
slice_lint_info: FxIndexMap<hir::HirId, SliceLintInformation>,
max_suggested_slice: u64,
scope: &'tcx hir::Expr<'tcx>,
Expand Down
6 changes: 1 addition & 5 deletions clippy_lints/src/indexing_slicing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,7 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing {

/// Returns a tuple of options with the start and end (exclusive) values of
/// the range. If the start or end is not constant, None is returned.
fn to_const_range<'tcx>(
cx: &LateContext<'tcx>,
range: higher::Range<'_>,
array_size: u128,
) -> (Option<u128>, Option<u128>) {
fn to_const_range(cx: &LateContext<'_>, range: higher::Range<'_>, array_size: u128) -> (Option<u128>, Option<u128>) {
let s = range
.start
.map(|expr| constant(cx, cx.typeck_results(), expr).map(|(c, _)| c));
Expand Down
2 changes: 1 addition & 1 deletion clippy_lints/src/invalid_upcast_comparisons.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ declare_clippy_lint! {

declare_lint_pass!(InvalidUpcastComparisons => [INVALID_UPCAST_COMPARISONS]);

fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_>, expr: &'a Expr<'_>) -> Option<(FullInt, FullInt)> {
fn numeric_cast_precast_bounds(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<(FullInt, FullInt)> {
if let ExprKind::Cast(cast_exp, _) = expr.kind {
let pre_cast_ty = cx.typeck_results().expr_ty(cast_exp);
let cast_ty = cx.typeck_results().expr_ty(expr);
Expand Down
131 changes: 88 additions & 43 deletions clippy_lints/src/lifetimes.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::trait_ref_of_method;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::intravisit::nested_filter::{self as hir_nested_filter, NestedFilter};
Expand Down Expand Up @@ -151,6 +151,7 @@ fn check_fn_inner<'tcx>(
.params
.iter()
.filter(|param| matches!(param.kind, GenericParamKind::Type { .. }));

for typ in types {
for pred in generics.bounds_for_param(cx.tcx.hir().local_def_id(typ.hir_id)) {
if pred.origin == PredicateOrigin::WhereClause {
Expand Down Expand Up @@ -187,15 +188,30 @@ fn check_fn_inner<'tcx>(
}
}
}
if could_use_elision(cx, decl, body, trait_sig, generics.params) {
span_lint(

if let Some(elidable_lts) = could_use_elision(cx, decl, body, trait_sig, generics.params) {
let lts = elidable_lts
.iter()
// In principle, the result of the call to `Node::ident` could be `unwrap`ped, as `DefId` should refer to a
// `Node::GenericParam`.
.filter_map(|&(def_id, _)| cx.tcx.hir().get_by_def_id(def_id).ident())
.map(|ident| ident.to_string())
.collect::<Vec<_>>()
.join(", ");

span_lint_and_then(
cx,
NEEDLESS_LIFETIMES,
span.with_hi(decl.output.span().hi()),
"explicit lifetimes given in parameter types where they could be elided \
(or replaced with `'_` if needed by type declaration)",
&format!("the following explicit lifetimes could be elided: {lts}"),
|diag| {
if let Some(span) = elidable_lts.iter().find_map(|&(_, span)| span) {
diag.span_help(span, "replace with `'_` in generic arguments such as here");
}
},
);
}

if report_extra_lifetimes {
self::report_extra_lifetimes(cx, decl, generics);
}
Expand Down Expand Up @@ -226,7 +242,7 @@ fn could_use_elision<'tcx>(
body: Option<BodyId>,
trait_sig: Option<&[Ident]>,
named_generics: &'tcx [GenericParam<'_>],
) -> bool {
) -> Option<Vec<(LocalDefId, Option<Span>)>> {
// There are two scenarios where elision works:
// * no output references, all input references have different LT
// * output references, exactly one input reference with same LT
Expand All @@ -253,15 +269,15 @@ fn could_use_elision<'tcx>(
}

if input_visitor.abort() || output_visitor.abort() {
return false;
return None;
}

let input_lts = input_visitor.lts;
let output_lts = output_visitor.lts;

if let Some(trait_sig) = trait_sig {
if explicit_self_type(cx, func, trait_sig.first().copied()) {
return false;
return None;
}
}

Expand All @@ -270,22 +286,22 @@ fn could_use_elision<'tcx>(

let first_ident = body.params.first().and_then(|param| param.pat.simple_ident());
if explicit_self_type(cx, func, first_ident) {
return false;
return None;
}

let mut checker = BodyLifetimeChecker {
lifetimes_used_in_body: false,
};
checker.visit_expr(body.value);
if checker.lifetimes_used_in_body {
return false;
return None;
}
}

// check for lifetimes from higher scopes
for lt in input_lts.iter().chain(output_lts.iter()) {
if !allowed_lts.contains(lt) {
return false;
return None;
}
}

Expand All @@ -301,48 +317,45 @@ fn could_use_elision<'tcx>(
for lt in input_visitor.nested_elision_site_lts {
if let RefLt::Named(def_id) = lt {
if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) {
return false;
return None;
}
}
}
for lt in output_visitor.nested_elision_site_lts {
if let RefLt::Named(def_id) = lt {
if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) {
return false;
return None;
}
}
}
}

// no input lifetimes? easy case!
if input_lts.is_empty() {
false
} else if output_lts.is_empty() {
// no output lifetimes, check distinctness of input lifetimes
// A lifetime can be newly elided if:
// - It occurs only once among the inputs.
// - If there are multiple input lifetimes, then the newly elided lifetime does not occur among the
// outputs (because eliding such an lifetime would create an ambiguity).
let elidable_lts = named_lifetime_occurrences(&input_lts)
.into_iter()
.filter_map(|(def_id, occurrences)| {
if occurrences == 1 && (input_lts.len() == 1 || !output_lts.contains(&RefLt::Named(def_id))) {
Some((
def_id,
input_visitor
.lifetime_generic_arg_spans
.get(&def_id)
.or_else(|| output_visitor.lifetime_generic_arg_spans.get(&def_id))
.copied(),
))
} else {
None
}
})
.collect::<Vec<_>>();

// only unnamed and static, ok
let unnamed_and_static = input_lts.iter().all(|lt| *lt == RefLt::Unnamed || *lt == RefLt::Static);
if unnamed_and_static {
return false;
}
// we have no output reference, so we only need all distinct lifetimes
input_lts.len() == unique_lifetimes(&input_lts)
if elidable_lts.is_empty() {
None
} else {
// we have output references, so we need one input reference,
// and all output lifetimes must be the same
if unique_lifetimes(&output_lts) > 1 {
return false;
}
if input_lts.len() == 1 {
match (&input_lts[0], &output_lts[0]) {
(&RefLt::Named(n1), &RefLt::Named(n2)) if n1 == n2 => true,
(&RefLt::Named(_), &RefLt::Unnamed) => true,
_ => false, /* already elided, different named lifetimes
* or something static going on */
}
} else {
false
}
Some(elidable_lts)
}
}

Expand All @@ -358,10 +371,24 @@ fn allowed_lts_from(tcx: TyCtxt<'_>, named_generics: &[GenericParam<'_>]) -> FxH
allowed_lts
}

/// Number of unique lifetimes in the given vector.
/// Number of times each named lifetime occurs in the given slice. Returns a vector to preserve
/// relative order.
#[must_use]
fn unique_lifetimes(lts: &[RefLt]) -> usize {
lts.iter().collect::<FxHashSet<_>>().len()
fn named_lifetime_occurrences(lts: &[RefLt]) -> Vec<(LocalDefId, usize)> {
let mut occurrences = Vec::new();
for lt in lts {
if let &RefLt::Named(curr_def_id) = lt {
if let Some(pair) = occurrences
.iter_mut()
.find(|(prev_def_id, _)| *prev_def_id == curr_def_id)
{
pair.1 += 1;
} else {
occurrences.push((curr_def_id, 1));
}
}
}
occurrences
}

const CLOSURE_TRAIT_BOUNDS: [LangItem; 3] = [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce];
Expand All @@ -370,6 +397,7 @@ const CLOSURE_TRAIT_BOUNDS: [LangItem; 3] = [LangItem::Fn, LangItem::FnMut, Lang
struct RefVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
lts: Vec<RefLt>,
lifetime_generic_arg_spans: FxHashMap<LocalDefId, Span>,
nested_elision_site_lts: Vec<RefLt>,
unelided_trait_object_lifetime: bool,
}
Expand All @@ -379,6 +407,7 @@ impl<'a, 'tcx> RefVisitor<'a, 'tcx> {
Self {
cx,
lts: Vec::new(),
lifetime_generic_arg_spans: FxHashMap::default(),
nested_elision_site_lts: Vec::new(),
unelided_trait_object_lifetime: false,
}
Expand Down Expand Up @@ -472,6 +501,22 @@ impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> {
_ => walk_ty(self, ty),
}
}

fn visit_generic_arg(&mut self, generic_arg: &'tcx GenericArg<'tcx>) {
if let GenericArg::Lifetime(l) = generic_arg
&& let LifetimeName::Param(def_id, _) = l.name
{
self.lifetime_generic_arg_spans.entry(def_id).or_insert(l.span);
}
// Replace with `walk_generic_arg` if/when https://github.com/rust-lang/rust/pull/103692 lands.
// walk_generic_arg(self, generic_arg);
match generic_arg {
GenericArg::Lifetime(lt) => self.visit_lifetime(lt),
GenericArg::Type(ty) => self.visit_ty(ty),
GenericArg::Const(ct) => self.visit_anon_const(&ct.value),
GenericArg::Infer(inf) => self.visit_infer(inf),
}
}
}

/// Are any lifetimes mentioned in the `where` clause? If so, we don't try to
Expand Down
4 changes: 2 additions & 2 deletions clippy_lints/src/loops/mut_range_bound.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ fn check_for_mutability(cx: &LateContext<'_>, bound: &Expr<'_>) -> Option<HirId>
None
}

fn check_for_mutation<'tcx>(
cx: &LateContext<'tcx>,
fn check_for_mutation(
cx: &LateContext<'_>,
body: &Expr<'_>,
bound_id_start: Option<HirId>,
bound_id_end: Option<HirId>,
Expand Down
2 changes: 1 addition & 1 deletion clippy_lints/src/map_unit_fn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ fn is_unit_expression(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
/// semicolons, which causes problems when generating a suggestion. Given an
/// expression that evaluates to '()' or '!', recursively remove useless braces
/// and semi-colons until is suitable for including in the suggestion template
fn reduce_unit_expression<'a>(cx: &LateContext<'_>, expr: &'a hir::Expr<'_>) -> Option<Span> {
fn reduce_unit_expression(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Span> {
if !is_unit_expression(cx, expr) {
return None;
}
Expand Down
2 changes: 1 addition & 1 deletion clippy_lints/src/matches/manual_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ fn peels_blocks_incl_unsafe<'a>(expr: &'a Expr<'a>) -> &'a Expr<'a> {
// <expr>
// }
// Returns true if <expr> resolves to `Some(x)`, `false` otherwise
fn is_some_expr<'tcx>(cx: &LateContext<'_>, target: HirId, ctxt: SyntaxContext, expr: &'tcx Expr<'_>) -> bool {
fn is_some_expr(cx: &LateContext<'_>, target: HirId, ctxt: SyntaxContext, expr: &Expr<'_>) -> bool {
if let Some(inner_expr) = peels_blocks_incl_unsafe_opt(expr) {
// there can be not statements in the block as they would be removed when switching to `.filter`
if let ExprKind::Call(callee, [arg]) = inner_expr.kind {
Expand Down
6 changes: 3 additions & 3 deletions clippy_lints/src/matches/significant_drop_in_scrutinee.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ fn set_diagnostic<'tcx>(diag: &mut Diagnostic, cx: &LateContext<'tcx>, expr: &'t

/// If the expression is an `ExprKind::Match`, check if the scrutinee has a significant drop that
/// may have a surprising lifetime.
fn has_significant_drop_in_scrutinee<'tcx, 'a>(
cx: &'a LateContext<'tcx>,
fn has_significant_drop_in_scrutinee<'tcx>(
cx: &LateContext<'tcx>,
scrutinee: &'tcx Expr<'tcx>,
source: MatchSource,
) -> Option<(Vec<FoundSigDrop>, &'static str)> {
Expand Down Expand Up @@ -377,7 +377,7 @@ impl<'a, 'tcx> ArmSigDropHelper<'a, 'tcx> {
}
}

fn has_significant_drop_in_arms<'tcx, 'a>(cx: &'a LateContext<'tcx>, arms: &'tcx [Arm<'_>]) -> FxHashSet<Span> {
fn has_significant_drop_in_arms<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) -> FxHashSet<Span> {
let mut helper = ArmSigDropHelper::new(cx);
for arm in arms {
helper.visit_expr(arm.body);
Expand Down
2 changes: 1 addition & 1 deletion clippy_lints/src/matches/single_match.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ fn pat_in_candidate_enum<'a>(cx: &LateContext<'a>, ty: Ty<'a>, pat: &Pat<'_>) ->
}

/// Returns `true` if the given type is an enum we know won't be expanded in the future
fn in_candidate_enum<'a>(cx: &LateContext<'a>, ty: Ty<'_>) -> bool {
fn in_candidate_enum(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
// list of candidate `Enum`s we know will never get any more members
let candidates = [sym::Cow, sym::Option, sym::Result];

Expand Down
Loading

0 comments on commit 7600535

Please sign in to comment.