Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More cosmetic fixes for upcoming Clippy lints. #1771

Merged
merged 1 commit into from
Jan 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 13 additions & 46 deletions src/aggregation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1208,7 +1208,7 @@ mod tests {
text_field_many_terms => many_terms_data.choose(&mut rng).unwrap().to_string(),
text_field_few_terms => few_terms_data.choose(&mut rng).unwrap().to_string(),
score_field => val as u64,
score_field_f64 => val as f64,
score_field_f64 => val,
score_field_i64 => val as i64,
))?;
}
Expand Down Expand Up @@ -1250,10 +1250,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&term_query, &collector).unwrap().into();

agg_res
searcher.search(&term_query, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1281,10 +1278,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&term_query, &collector).unwrap().into();

agg_res
searcher.search(&term_query, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1312,10 +1306,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&term_query, &collector).unwrap().into();

agg_res
searcher.search(&term_query, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1351,10 +1342,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&term_query, &collector).unwrap().into();

agg_res
searcher.search(&term_query, &collector).unwrap()
});
}

Expand All @@ -1380,10 +1368,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand All @@ -1409,10 +1394,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1446,10 +1428,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1481,10 +1460,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1520,10 +1496,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand All @@ -1550,10 +1523,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&AllQuery, &collector).unwrap().into();

agg_res
searcher.search(&AllQuery, &collector).unwrap()
});
}

Expand Down Expand Up @@ -1597,7 +1567,7 @@ mod tests {
],
..Default::default()
}),
sub_aggregation: sub_agg_req_1.clone(),
sub_aggregation: sub_agg_req_1,
}),
),
]
Expand All @@ -1607,10 +1577,7 @@ mod tests {
let collector = AggregationCollector::from_aggs(agg_req_1, None, index.schema());

let searcher = reader.searcher();
let agg_res: AggregationResults =
searcher.search(&term_query, &collector).unwrap().into();

agg_res
searcher.search(&term_query, &collector).unwrap()
});
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/fastfield/alive_bitset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ mod bench {

fn get_alive() -> Vec<u32> {
let mut data = (0..1_000_000_u32).collect::<Vec<u32>>();
for _ in 0..(1_000_000) * 1 / 8 {
for _ in 0..1_000_000 / 8 {
remove_rand(&mut data);
}
data
Expand Down
2 changes: 1 addition & 1 deletion src/fastfield/multivalued/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ mod bench {
serializer.close().unwrap();
field
};
let file = directory.open_read(&path).unwrap();
let file = directory.open_read(path).unwrap();
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data_idx = fast_fields_composite
Expand Down
17 changes: 7 additions & 10 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
#![doc(html_logo_url = "http://fulmicoton.com/tantivy-logo/tantivy-logo.png")]
#![cfg_attr(all(feature = "unstable", test), feature(test))]
#![cfg_attr(
feature = "cargo-clippy",
allow(
clippy::module_inception,
clippy::needless_range_loop,
clippy::bool_assert_comparison
)
)]
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
#![warn(missing_docs)]
#![allow(clippy::len_without_is_empty)]
#![allow(clippy::derive_partial_eq_without_eq)]
#![allow(
clippy::len_without_is_empty,
clippy::derive_partial_eq_without_eq,
clippy::module_inception,
clippy::needless_range_loop,
clippy::bool_assert_comparison
)]

//! # `tantivy`
//!
Expand Down
16 changes: 8 additions & 8 deletions src/postings/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ mod bench {
let mut segment_postings = segment_reader
.inverted_index(TERM_A.field())
.unwrap()
.read_postings(&*TERM_A, IndexRecordOption::Basic)
.read_postings(&TERM_A, IndexRecordOption::Basic)
.unwrap()
.unwrap();
while segment_postings.advance() != TERMINATED {}
Expand All @@ -647,25 +647,25 @@ mod bench {
let segment_postings_a = segment_reader
.inverted_index(TERM_A.field())
.unwrap()
.read_postings(&*TERM_A, IndexRecordOption::Basic)
.read_postings(&TERM_A, IndexRecordOption::Basic)
.unwrap()
.unwrap();
let segment_postings_b = segment_reader
.inverted_index(TERM_B.field())
.unwrap()
.read_postings(&*TERM_B, IndexRecordOption::Basic)
.read_postings(&TERM_B, IndexRecordOption::Basic)
.unwrap()
.unwrap();
let segment_postings_c = segment_reader
.inverted_index(TERM_C.field())
.unwrap()
.read_postings(&*TERM_C, IndexRecordOption::Basic)
.read_postings(&TERM_C, IndexRecordOption::Basic)
.unwrap()
.unwrap();
let segment_postings_d = segment_reader
.inverted_index(TERM_D.field())
.unwrap()
.read_postings(&*TERM_D, IndexRecordOption::Basic)
.read_postings(&TERM_D, IndexRecordOption::Basic)
.unwrap()
.unwrap();
let mut intersection = Intersection::new(vec![
Expand All @@ -687,7 +687,7 @@ mod bench {
let mut segment_postings = segment_reader
.inverted_index(TERM_A.field())
.unwrap()
.read_postings(&*TERM_A, IndexRecordOption::Basic)
.read_postings(&TERM_A, IndexRecordOption::Basic)
.unwrap()
.unwrap();

Expand All @@ -705,7 +705,7 @@ mod bench {
let mut segment_postings = segment_reader
.inverted_index(TERM_A.field())
.unwrap()
.read_postings(&*TERM_A, IndexRecordOption::Basic)
.read_postings(&TERM_A, IndexRecordOption::Basic)
.unwrap()
.unwrap();
for doc in &existing_docs {
Expand Down Expand Up @@ -746,7 +746,7 @@ mod bench {
let mut segment_postings = segment_reader
.inverted_index(TERM_A.field())
.unwrap()
.read_postings(&*TERM_A, IndexRecordOption::Basic)
.read_postings(&TERM_A, IndexRecordOption::Basic)
.unwrap()
.unwrap();
let mut s = 0u32;
Expand Down
5 changes: 2 additions & 3 deletions src/query/range_query/range_query_ip_fastfield.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,8 +313,7 @@ mod bench {
})
.collect();

let index = create_index_from_docs(&docs);
index
create_index_from_docs(&docs)
}

fn get_90_percent() -> RangeInclusive<Ipv6Addr> {
Expand Down Expand Up @@ -353,7 +352,7 @@ mod bench {

let query = gen_query_inclusive(ip_range.start(), ip_range.end());
let query_from_text = |text: &str| {
QueryParser::for_index(&index, vec![])
QueryParser::for_index(index, vec![])
.parse_query(text)
.unwrap()
};
Expand Down
5 changes: 2 additions & 3 deletions src/query/range_query/range_query_u64_fastfield.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,8 +358,7 @@ mod bench {
})
.collect();

let index = create_index_from_docs(&docs);
index
create_index_from_docs(&docs)
}

fn get_90_percent() -> RangeInclusive<u64> {
Expand Down Expand Up @@ -392,7 +391,7 @@ mod bench {

let query = gen_query_inclusive(id_range.start(), id_range.end());
let query_from_text = |text: &str| {
QueryParser::for_index(&index, vec![])
QueryParser::for_index(index, vec![])
.parse_query(text)
.unwrap()
};
Expand Down