Skip to content

Commit

Permalink
Pr feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
AshwinSekar committed Aug 30, 2023
1 parent 4059787 commit 8ee8631
Show file tree
Hide file tree
Showing 5 changed files with 55 additions and 53 deletions.
26 changes: 11 additions & 15 deletions core/src/window_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,22 +150,18 @@ fn run_check_duplicate(
// Unlike the other cases we have to wait until here to decide to handle the duplicate and store
// in blockstore. This is because the duplicate could have been part of the same insert batch,
// so we wait until the batch has been written.
if !blockstore.has_duplicate_shreds_in_slot(shred_slot) {
if let Some(existing_shred_payload) = blockstore.is_shred_duplicate(&shred) {
blockstore.store_duplicate_slot(
shred_slot,
existing_shred_payload.clone(),
shred.clone().into_payload(),
)?;
(shred, existing_shred_payload)
} else {
// Shred is not duplicate
return Ok(());
}
} else {
// Shred has already been handled
return Ok(());
if blockstore.has_duplicate_shreds_in_slot(shred_slot) {
return Ok(()); // A duplicate is already recorded
}
let Some(existing_shred_payload) = blockstore.is_shred_duplicate(&shred) else {
return Ok(()); // Not a duplicate
};
blockstore.store_duplicate_slot(
shred_slot,
existing_shred_payload.clone(),
shred.clone().into_payload(),
)?;
(shred, existing_shred_payload)
}
};

Expand Down
2 changes: 1 addition & 1 deletion gossip/src/cluster_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ pub fn make_accounts_hashes_message(
pub(crate) type Ping = ping_pong::Ping<[u8; GOSSIP_PING_TOKEN_SIZE]>;

// TODO These messages should go through the gpu pipeline for spam filtering
#[frozen_abi(digest = "6T2sn92PMrTijsgncH3bBZL4K5GUowb442cCw4y4DuwV")]
#[frozen_abi(digest = "EnbW8mYTsPMndq9NkHLTkHJgduXvWSfSD6bBdmqQ8TiF")]
#[derive(Serialize, Deserialize, Debug, AbiEnumVisitor, AbiExample)]
#[allow(clippy::large_enum_variant)]
pub(crate) enum Protocol {
Expand Down
63 changes: 30 additions & 33 deletions gossip/src/duplicate_shred.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@ pub struct DuplicateShred {
pub(crate) from: Pubkey,
pub(crate) wallclock: u64,
pub(crate) slot: Slot,

#[allow(dead_code)]
shred_index: u32,

_unused: u32,
shred_type: ShredType,
// Serialized DuplicateSlotProof split into chunks.
num_chunks: u8,
Expand Down Expand Up @@ -113,43 +111,42 @@ where
return Err(Error::ShredTypeMismatch);
}

if shred1.index() == shred2.index() && shred1.payload() == shred2.payload() {
return Err(Error::InvalidDuplicateShreds);
if let Some(leader_schedule) = leader_schedule {
let slot_leader =
leader_schedule(shred1.slot()).ok_or(Error::UnknownSlotLeader(shred1.slot()))?;
if !shred1.verify(&slot_leader) || !shred2.verify(&slot_leader) {
return Err(Error::InvalidSignature);
}
}

if shred1.index() != shred2.index() && shred1.shred_type() == ShredType::Data {
match (
shred1.index(),
shred1.last_in_slot(),
shred2.index(),
shred2.last_in_slot(),
) {
(ix1, true, ix2, false) if ix1 > ix2 => return Err(Error::InvalidLastIndexConflict),
(ix1, false, ix2, true) if ix1 < ix2 => return Err(Error::InvalidLastIndexConflict),
(_, false, _, false) => return Err(Error::InvalidLastIndexConflict),
_ => (),
if shred1.index() == shred2.index() {
if shred1.payload() != shred2.payload() {
return Ok(());
}
return Err(Error::InvalidDuplicateShreds);
}

if shred1.index() != shred2.index() && shred1.shred_type() == ShredType::Code {
if shred1.fec_set_index() != shred2.fec_set_index() {
return Err(Error::InvalidErasureMetaConflict);
if shred1.shred_type() == ShredType::Data {
if shred1.last_in_slot() && shred2.index() > shred1.index() {
return Ok(());
}
let erasure_meta =
ErasureMeta::from_coding_shred(shred1).expect("Shred1 should be a coding shred");
if erasure_meta.check_coding_shred(shred2) {
return Err(Error::InvalidErasureMetaConflict);
if shred2.last_in_slot() && shred1.index() > shred2.index() {
return Ok(());
}
return Err(Error::InvalidLastIndexConflict);
}

if let Some(leader_schedule) = leader_schedule {
let slot_leader =
leader_schedule(shred1.slot()).ok_or(Error::UnknownSlotLeader(shred1.slot()))?;
if !shred1.verify(&slot_leader) || !shred2.verify(&slot_leader) {
return Err(Error::InvalidSignature);
}
// This mirrors the current logic in blockstore to detect coding shreds with conflicting
// erasure sets. However this is not technically exhaustive, as any 2 shreds with
// different but overlapping erasure sets can be considered duplicate and need not be
// a part of the same fec set. Further work to enhance detection is planned in
// https://github.com/solana-labs/solana/issues/33037
if shred1.fec_set_index() == shred2.fec_set_index()
&& !ErasureMeta::check_erasure_consistency(shred1, shred2)
{
return Ok(());
}
Ok(())
Err(Error::InvalidErasureMetaConflict)
}

pub(crate) fn from_shred<F>(
Expand Down Expand Up @@ -192,7 +189,7 @@ where
num_chunks,
chunk_index: i as u8,
chunk,
shred_index: 0,
_unused: 0,
});
Ok(chunks)
}
Expand Down Expand Up @@ -308,7 +305,7 @@ pub(crate) mod tests {
num_chunks: u8::MAX,
chunk_index: u8::MAX,
chunk: Vec::default(),
shred_index: 0,
_unused: 0,
};
assert_eq!(
bincode::serialize(&dup).unwrap().len(),
Expand Down Expand Up @@ -445,7 +442,7 @@ pub(crate) mod tests {
num_chunks,
chunk_index: i as u8,
chunk,
shred_index: 0,
_unused: 0,
});
Ok(chunks)
}
Expand Down
4 changes: 2 additions & 2 deletions ledger/src/blockstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6619,7 +6619,7 @@ pub mod tests {
));
assert!(blockstore.has_duplicate_shreds_in_slot(0));
assert_eq!(duplicate_shreds.len(), 1);
matches!(
assert_matches!(
duplicate_shreds[0],
PossibleDuplicateShred::LastIndexConflict(_, _)
);
Expand Down Expand Up @@ -6650,7 +6650,7 @@ pub mod tests {
));

assert_eq!(duplicate_shreds.len(), 1);
matches!(
assert_matches!(
duplicate_shreds[0],
PossibleDuplicateShred::LastIndexConflict(_, _)
);
Expand Down
13 changes: 11 additions & 2 deletions ledger/src/blockstore_meta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ impl SlotMeta {
}

impl ErasureMeta {
pub fn from_coding_shred(shred: &Shred) -> Option<Self> {
pub(crate) fn from_coding_shred(shred: &Shred) -> Option<Self> {
match shred.shred_type() {
ShredType::Data => None,
ShredType::Code => {
Expand All @@ -344,14 +344,23 @@ impl ErasureMeta {

// Returns true if the erasure fields on the shred
// are consistent with the erasure-meta.
pub fn check_coding_shred(&self, shred: &Shred) -> bool {
pub(crate) fn check_coding_shred(&self, shred: &Shred) -> bool {
let Some(mut other) = Self::from_coding_shred(shred) else {
return false;
};
other.__unused_size = self.__unused_size;
self == &other
}

/// Returns true if both shreds are coding shreds and have a
/// consistent erasure config
pub fn check_erasure_consistency(shred1: &Shred, shred2: &Shred) -> bool {
let Some(coding_shred) = Self::from_coding_shred(shred1) else {
return false;
};
coding_shred.check_coding_shred(shred2)
}

pub(crate) fn config(&self) -> ErasureConfig {
self.config
}
Expand Down

0 comments on commit 8ee8631

Please sign in to comment.