Skip to content

Commit

Permalink
Enable lints for tests only running optimized (#6664)
Browse files Browse the repository at this point in the history
* enable linting optimized-only tests

* fix automatically fixable or obvious lints

* fix suspicious_open_options by removing manual options

* fix `await_holding_lock`s

* avoid failing lint due to now disabled `#[cfg(debug_assertions)]`

* reduce future sizes in tests

* fix accidently flipped assert logic

* restore holding lock for web3signer download

* Merge branch 'unstable' into lint-opt-tests
  • Loading branch information
dknopik authored Dec 17, 2024
1 parent 847c801 commit 02cb2d6
Show file tree
Hide file tree
Showing 34 changed files with 576 additions and 578 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ test-full: cargo-fmt test-release test-debug test-ef test-exec-engine
# Lints the code for bad style and potentially unsafe arithmetic using Clippy.
# Clippy lints are opt-in per-crate for now. By default, everything is allowed except for performance and correctness lints.
lint:
cargo clippy --workspace --benches --tests $(EXTRA_CLIPPY_OPTS) --features "$(TEST_FEATURES)" -- \
RUSTFLAGS="-C debug-assertions=no $(RUSTFLAGS)" cargo clippy --workspace --benches --tests $(EXTRA_CLIPPY_OPTS) --features "$(TEST_FEATURES)" -- \
-D clippy::fn_to_numeric_cast_any \
-D clippy::manual_let_else \
-D clippy::large_stack_frames \
Expand Down
2 changes: 1 addition & 1 deletion account_manager/src/validator/exit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,6 @@ mod tests {
)
.unwrap();

assert_eq!(expected_pk, kp.pk.into());
assert_eq!(expected_pk, kp.pk);
}
}
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/src/shuffling_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ mod test {
}

assert!(
!cache.contains(&shuffling_id_and_committee_caches.get(0).unwrap().0),
!cache.contains(&shuffling_id_and_committee_caches.first().unwrap().0),
"should not contain oldest epoch shuffling id"
);
assert_eq!(
Expand Down
4 changes: 2 additions & 2 deletions beacon_node/beacon_chain/tests/attestation_production.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@ async fn produces_attestations_from_attestation_simulator_service() {
}

// Compare the prometheus metrics that evaluates the performance of the unaggregated attestations
let hit_prometheus_metrics = vec![
let hit_prometheus_metrics = [
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_HEAD_ATTESTER_HIT_TOTAL,
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_TARGET_ATTESTER_HIT_TOTAL,
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_SOURCE_ATTESTER_HIT_TOTAL,
];
let miss_prometheus_metrics = vec![
let miss_prometheus_metrics = [
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_HEAD_ATTESTER_MISS_TOTAL,
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_TARGET_ATTESTER_MISS_TOTAL,
metrics::VALIDATOR_MONITOR_ATTESTATION_SIMULATOR_SOURCE_ATTESTER_MISS_TOTAL,
Expand Down
42 changes: 25 additions & 17 deletions beacon_node/beacon_chain/tests/attestation_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,10 +431,12 @@ impl GossipTester {
.chain
.verify_aggregated_attestation_for_gossip(&aggregate)
.err()
.expect(&format!(
"{} should error during verify_aggregated_attestation_for_gossip",
desc
));
.unwrap_or_else(|| {
panic!(
"{} should error during verify_aggregated_attestation_for_gossip",
desc
)
});
inspect_err(&self, err);

/*
Expand All @@ -449,10 +451,12 @@ impl GossipTester {
.unwrap();

assert_eq!(results.len(), 2);
let batch_err = results.pop().unwrap().err().expect(&format!(
"{} should error during batch_verify_aggregated_attestations_for_gossip",
desc
));
let batch_err = results.pop().unwrap().err().unwrap_or_else(|| {
panic!(
"{} should error during batch_verify_aggregated_attestations_for_gossip",
desc
)
});
inspect_err(&self, batch_err);

self
Expand All @@ -475,10 +479,12 @@ impl GossipTester {
.chain
.verify_unaggregated_attestation_for_gossip(&attn, Some(subnet_id))
.err()
.expect(&format!(
"{} should error during verify_unaggregated_attestation_for_gossip",
desc
));
.unwrap_or_else(|| {
panic!(
"{} should error during verify_unaggregated_attestation_for_gossip",
desc
)
});
inspect_err(&self, err);

/*
Expand All @@ -496,10 +502,12 @@ impl GossipTester {
)
.unwrap();
assert_eq!(results.len(), 2);
let batch_err = results.pop().unwrap().err().expect(&format!(
"{} should error during batch_verify_unaggregated_attestations_for_gossip",
desc
));
let batch_err = results.pop().unwrap().err().unwrap_or_else(|| {
panic!(
"{} should error during batch_verify_unaggregated_attestations_for_gossip",
desc
)
});
inspect_err(&self, batch_err);

self
Expand Down Expand Up @@ -816,7 +824,7 @@ async fn aggregated_gossip_verification() {
let (index, sk) = tester.non_aggregator();
*a = SignedAggregateAndProof::from_aggregate(
index as u64,
tester.valid_aggregate.message().aggregate().clone(),
tester.valid_aggregate.message().aggregate(),
None,
&sk,
&chain.canonical_head.cached_head().head_fork(),
Expand Down
14 changes: 3 additions & 11 deletions beacon_node/beacon_chain/tests/bellatrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ async fn merge_with_terminal_block_hash_override() {

let block = &harness.chain.head_snapshot().beacon_block;

let execution_payload = block.message().body().execution_payload().unwrap().clone();
let execution_payload = block.message().body().execution_payload().unwrap();
if i == 0 {
assert_eq!(execution_payload.block_hash(), genesis_pow_block_hash);
}
Expand Down Expand Up @@ -133,7 +133,7 @@ async fn base_altair_bellatrix_with_terminal_block_after_fork() {
* Do the Bellatrix fork, without a terminal PoW block.
*/

harness.extend_to_slot(bellatrix_fork_slot).await;
Box::pin(harness.extend_to_slot(bellatrix_fork_slot)).await;

let bellatrix_head = &harness.chain.head_snapshot().beacon_block;
assert!(bellatrix_head.as_bellatrix().is_ok());
Expand Down Expand Up @@ -207,15 +207,7 @@ async fn base_altair_bellatrix_with_terminal_block_after_fork() {
harness.extend_slots(1).await;

let block = &harness.chain.head_snapshot().beacon_block;
execution_payloads.push(
block
.message()
.body()
.execution_payload()
.unwrap()
.clone()
.into(),
);
execution_payloads.push(block.message().body().execution_payload().unwrap().into());
}

verify_execution_payload_chain(execution_payloads.as_slice());
Expand Down
8 changes: 4 additions & 4 deletions beacon_node/beacon_chain/tests/capella.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ async fn base_altair_bellatrix_capella() {
/*
* Do the Altair fork.
*/
harness.extend_to_slot(altair_fork_slot).await;
Box::pin(harness.extend_to_slot(altair_fork_slot)).await;

let altair_head = &harness.chain.head_snapshot().beacon_block;
assert!(altair_head.as_altair().is_ok());
Expand All @@ -63,7 +63,7 @@ async fn base_altair_bellatrix_capella() {
/*
* Do the Bellatrix fork, without a terminal PoW block.
*/
harness.extend_to_slot(bellatrix_fork_slot).await;
Box::pin(harness.extend_to_slot(bellatrix_fork_slot)).await;

let bellatrix_head = &harness.chain.head_snapshot().beacon_block;
assert!(bellatrix_head.as_bellatrix().is_ok());
Expand All @@ -81,7 +81,7 @@ async fn base_altair_bellatrix_capella() {
/*
* Next Bellatrix block shouldn't include an exec payload.
*/
harness.extend_slots(1).await;
Box::pin(harness.extend_slots(1)).await;

let one_after_bellatrix_head = &harness.chain.head_snapshot().beacon_block;
assert!(
Expand Down Expand Up @@ -112,7 +112,7 @@ async fn base_altair_bellatrix_capella() {
terminal_block.timestamp = timestamp;
}
});
harness.extend_slots(1).await;
Box::pin(harness.extend_slots(1)).await;

let two_after_bellatrix_head = &harness.chain.head_snapshot().beacon_block;
assert!(
Expand Down
4 changes: 2 additions & 2 deletions beacon_node/beacon_chain/tests/payload_invalidation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ async fn invalid_payload_invalidates_parent() {
rig.import_block(Payload::Valid).await; // Import a valid transition block.
rig.move_to_first_justification(Payload::Syncing).await;

let roots = vec![
let roots = [
rig.import_block(Payload::Syncing).await,
rig.import_block(Payload::Syncing).await,
rig.import_block(Payload::Syncing).await,
Expand Down Expand Up @@ -1052,7 +1052,7 @@ async fn invalid_parent() {

// Ensure the block built atop an invalid payload is invalid for gossip.
assert!(matches!(
rig.harness.chain.clone().verify_block_for_gossip(block.clone().into()).await,
rig.harness.chain.clone().verify_block_for_gossip(block.clone()).await,
Err(BlockError::ParentExecutionPayloadInvalid { parent_root: invalid_root })
if invalid_root == parent_root
));
Expand Down
89 changes: 48 additions & 41 deletions beacon_node/beacon_chain/tests/store_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ async fn long_skip() {
final_blocks as usize,
BlockStrategy::ForkCanonicalChainAt {
previous_slot: Slot::new(initial_blocks),
first_slot: Slot::new(initial_blocks + skip_slots as u64 + 1),
first_slot: Slot::new(initial_blocks + skip_slots + 1),
},
AttestationStrategy::AllValidators,
)
Expand Down Expand Up @@ -381,8 +381,7 @@ async fn randao_genesis_storage() {
.beacon_state
.randao_mixes()
.iter()
.find(|x| **x == genesis_value)
.is_some());
.any(|x| *x == genesis_value));

// Then upon adding one more block, it isn't
harness.advance_slot();
Expand All @@ -393,14 +392,13 @@ async fn randao_genesis_storage() {
AttestationStrategy::AllValidators,
)
.await;
assert!(harness
assert!(!harness
.chain
.head_snapshot()
.beacon_state
.randao_mixes()
.iter()
.find(|x| **x == genesis_value)
.is_none());
.any(|x| *x == genesis_value));

check_finalization(&harness, num_slots);
check_split_slot(&harness, store);
Expand Down Expand Up @@ -1062,7 +1060,7 @@ fn check_shuffling_compatible(
let current_epoch_shuffling_is_compatible = harness.chain.shuffling_is_compatible(
&block_root,
head_state.current_epoch(),
&head_state,
head_state,
);

// Check for consistency with the more expensive shuffling lookup.
Expand Down Expand Up @@ -1102,7 +1100,7 @@ fn check_shuffling_compatible(
let previous_epoch_shuffling_is_compatible = harness.chain.shuffling_is_compatible(
&block_root,
head_state.previous_epoch(),
&head_state,
head_state,
);
harness
.chain
Expand Down Expand Up @@ -1130,14 +1128,11 @@ fn check_shuffling_compatible(

// Targeting two epochs before the current epoch should always return false
if head_state.current_epoch() >= 2 {
assert_eq!(
harness.chain.shuffling_is_compatible(
&block_root,
head_state.current_epoch() - 2,
&head_state
),
false
);
assert!(!harness.chain.shuffling_is_compatible(
&block_root,
head_state.current_epoch() - 2,
head_state
));
}
}
}
Expand Down Expand Up @@ -1559,14 +1554,13 @@ async fn prunes_fork_growing_past_youngest_finalized_checkpoint() {
.map(Into::into)
.collect();
let canonical_state_root = canonical_state.update_tree_hash_cache().unwrap();
let (canonical_blocks, _, _, _) = rig
.add_attested_blocks_at_slots(
canonical_state,
canonical_state_root,
&canonical_slots,
&honest_validators,
)
.await;
let (canonical_blocks, _, _, _) = Box::pin(rig.add_attested_blocks_at_slots(
canonical_state,
canonical_state_root,
&canonical_slots,
&honest_validators,
))
.await;

// Postconditions
let canonical_blocks: HashMap<Slot, SignedBeaconBlockHash> = canonical_blocks_zeroth_epoch
Expand Down Expand Up @@ -1939,7 +1933,7 @@ async fn prune_single_block_long_skip() {
2 * slots_per_epoch,
1,
2 * slots_per_epoch,
2 * slots_per_epoch as u64,
2 * slots_per_epoch,
1,
)
.await;
Expand All @@ -1961,31 +1955,45 @@ async fn prune_shared_skip_states_mid_epoch() {
#[tokio::test]
async fn prune_shared_skip_states_epoch_boundaries() {
let slots_per_epoch = E::slots_per_epoch();
pruning_test(slots_per_epoch - 1, 1, slots_per_epoch, 2, slots_per_epoch).await;
pruning_test(slots_per_epoch - 1, 2, slots_per_epoch, 1, slots_per_epoch).await;
pruning_test(
Box::pin(pruning_test(
slots_per_epoch - 1,
1,
slots_per_epoch,
2,
slots_per_epoch,
))
.await;
Box::pin(pruning_test(
slots_per_epoch - 1,
2,
slots_per_epoch,
1,
slots_per_epoch,
))
.await;
Box::pin(pruning_test(
2 * slots_per_epoch + slots_per_epoch / 2,
slots_per_epoch as u64 / 2,
slots_per_epoch / 2,
slots_per_epoch,
slots_per_epoch as u64 / 2 + 1,
slots_per_epoch / 2 + 1,
slots_per_epoch,
)
))
.await;
pruning_test(
Box::pin(pruning_test(
2 * slots_per_epoch + slots_per_epoch / 2,
slots_per_epoch as u64 / 2,
slots_per_epoch / 2,
slots_per_epoch,
slots_per_epoch as u64 / 2 + 1,
slots_per_epoch / 2 + 1,
slots_per_epoch,
)
))
.await;
pruning_test(
Box::pin(pruning_test(
2 * slots_per_epoch - 1,
slots_per_epoch as u64,
slots_per_epoch,
1,
0,
2 * slots_per_epoch,
)
))
.await;
}

Expand Down Expand Up @@ -2094,7 +2102,7 @@ async fn pruning_test(
);
check_chain_dump(
&harness,
(num_initial_blocks + num_canonical_middle_blocks + num_finalization_blocks + 1) as u64,
num_initial_blocks + num_canonical_middle_blocks + num_finalization_blocks + 1,
);

let all_canonical_states = harness
Expand Down Expand Up @@ -2613,8 +2621,7 @@ async fn process_blocks_and_attestations_for_unaligned_checkpoint() {
harness.advance_slot();
}
harness.extend_to_slot(finalizing_slot - 1).await;
harness
.add_block_at_slot(finalizing_slot, harness.get_current_state())
Box::pin(harness.add_block_at_slot(finalizing_slot, harness.get_current_state()))
.await
.unwrap();

Expand Down
Loading

0 comments on commit 02cb2d6

Please sign in to comment.