Skip to content

Commit

Permalink
A0-4560: Changes in dag reconstruction to include parent round (#514)
Browse files Browse the repository at this point in the history
* First working version

* fmt

* clippy

* Bump consensus crate version

* Updated readme

* Revered changes to unit testing code

* Review

* fmt

* [WIP]

* Fix for bizantine tests

* fmt

* clippy

* Reverting changes to crypto/src/node.rs

* Fmt, again

* Now internal interfaces are unified

* fmt

* Include rounds in the combined hash

* fmt

* ControlHash keeps only info about parent rounds.

* Some more changes to naming.

* One more change to naming.

* In reconstructon tests, check that ReconstructedUnit parents have
correct rounds.

Added test for with_parents

Other minor review remarks

* Mark method which is used in tests only

* Unit test for reconstruction in which unit round is wrong

* Removed parent_round

* Moved ControlHash to separate module, added some basic tests

* Added tests in control hash module

* Added test with control hash calculation corner cases

* Moved validate to separate module

* clippy

* Review round 3 remarks
  • Loading branch information
Marcin-Radecki authored Jan 17, 2025
1 parent 71f0314 commit 1a35839
Show file tree
Hide file tree
Showing 17 changed files with 585 additions and 166 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ More details are available [in the book][reference-link-implementation-details].
- Import AlephBFT in your crate
```toml
[dependencies]
aleph-bft = "^0.40"
aleph-bft = "^0.41"
```
- The main entry point is the `run_session` function, which returns a Future that runs the
consensus algorithm.
Expand Down
2 changes: 1 addition & 1 deletion consensus/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aleph-bft"
version = "0.40.0"
version = "0.41.0"
edition = "2021"
authors = ["Cardinal Cryptography"]
categories = ["algorithms", "data-structures", "cryptography", "database"]
Expand Down
3 changes: 2 additions & 1 deletion consensus/src/alerts/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,13 @@ impl<H: Hasher, D: Data, MK: MultiKeychain> Handler<H, D, MK> {

#[cfg(test)]
mod tests {
use crate::units::ControlHash;
use crate::{
alerts::{
handler::{Error, Handler, RmcResponse},
Alert, AlertMessage, ForkProof, ForkingNotification,
},
units::{ControlHash, FullUnit, PreUnit},
units::{FullUnit, PreUnit},
PartiallyMultisigned, Recipient, Round,
};
use aleph_bft_mock::{Data, Hasher64, Keychain, Signature};
Expand Down
5 changes: 1 addition & 4 deletions consensus/src/backup/loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,8 @@ impl<H: Hasher, D: Data, S: Signature, R: AsyncRead> BackupLoader<H, D, S, R> {
));
}

let parent_ids = &full_unit.as_pre_unit().control_hash().parents_mask;

// Sanity check: verify that all unit's parents appeared in backup before it.
for parent_id in parent_ids.elements() {
let parent = UnitCoord::new(coord.round() - 1, parent_id);
for parent in full_unit.as_pre_unit().control_hash().parents() {
if !already_loaded_coords.contains(&parent) {
return Err(LoaderError::InconsistentData(coord));
}
Expand Down
21 changes: 15 additions & 6 deletions consensus/src/creation/creator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,22 @@ impl<H: Hasher> Creator<H> {
pub fn create_unit(&self, round: Round) -> Result<PreUnit<H>> {
let parents = match round.checked_sub(1) {
None => NodeMap::with_size(self.n_members),
Some(prev_round) => self
.round_collectors
.get(usize::from(prev_round))
.ok_or(ConstraintError::NotEnoughParents)?
.prospective_parents(self.node_id)?
.clone(),
Some(prev_round) => {
let parents = self
.round_collectors
.get(usize::from(prev_round))
.ok_or(ConstraintError::NotEnoughParents)?
.prospective_parents(self.node_id)?
.clone();
let mut parents_with_rounds_and_hashes = NodeMap::with_size(parents.size());
for (parent_index, hash) in parents.into_iter() {
// we cannot have here round 0 units
parents_with_rounds_and_hashes.insert(parent_index, (hash, prev_round));
}
parents_with_rounds_and_hashes
}
};

Ok(PreUnit::new(
self.node_id,
round,
Expand Down
10 changes: 6 additions & 4 deletions consensus/src/dag/reconstruction/dag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,14 +121,16 @@ mod test {
Hasher, NodeCount, NodeIndex, NodeMap,
};
use aleph_bft_mock::Hasher64;
use aleph_bft_types::Round;
use std::collections::HashSet;

fn full_parents_to_map(
parents: Vec<<Hasher64 as Hasher>::Hash>,
) -> NodeMap<<Hasher64 as Hasher>::Hash> {
parent_round: Round,
) -> NodeMap<(<Hasher64 as Hasher>::Hash, Round)> {
let mut result = NodeMap::with_size(NodeCount(parents.len()));
for (id, parent) in parents.into_iter().enumerate() {
result.insert(NodeIndex(id), parent);
result.insert(NodeIndex(id), (parent, parent_round));
}
result
}
Expand All @@ -150,8 +152,8 @@ mod test {
.map(|unit| ReconstructedUnit::initial(unit.clone()))
.collect();
let mut result = vec![initial_units];
for (units, parents) in dag.iter().skip(1).zip(hashes) {
let parents = full_parents_to_map(parents);
for ((parent_round, units), parents) in dag.iter().skip(1).enumerate().zip(hashes) {
let parents = full_parents_to_map(parents, parent_round as Round);
let reconstructed = units
.iter()
.map(|unit| {
Expand Down
107 changes: 90 additions & 17 deletions consensus/src/dag/reconstruction/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,11 @@ pub struct ReconstructedUnit<U: Unit> {

impl<U: Unit> ReconstructedUnit<U> {
/// Returns a reconstructed unit if the parents agree with the hash, errors out otherwise.
pub fn with_parents(unit: U, parents: NodeMap<HashFor<U>>) -> Result<Self, U> {
match unit.control_hash().combined_hash
== ControlHash::<U::Hasher>::combine_hashes(&parents)
pub fn with_parents(unit: U, parents: NodeMap<(HashFor<U>, Round)>) -> Result<Self, U> {
match unit.control_hash().combined_hash()
== ControlHash::<U::Hasher>::create_control_hash(&parents)
{
true => {
let unit_round = unit.round();
let mut parents_with_rounds = NodeMap::with_size(parents.size());
for (parent_index, hash) in parents.into_iter() {
// we cannot have here round 0 units
parents_with_rounds.insert(parent_index, (hash, unit_round.saturating_sub(1)));
}
Ok(ReconstructedUnit {
unit,
parents: parents_with_rounds,
})
}
true => Ok(ReconstructedUnit { unit, parents }),
false => Err(unit),
}
}
Expand Down Expand Up @@ -249,13 +238,14 @@ impl<U: Unit> Reconstruction<U> {

#[cfg(test)]
mod test {
use std::collections::HashMap;

use crate::{
dag::reconstruction::{ReconstructedUnit, Reconstruction, ReconstructionResult, Request},
units::{random_full_parent_units_up_to, Unit, UnitCoord, UnitWithParents},
NodeCount, NodeIndex,
};
use aleph_bft_types::{NodeMap, Round};
use rand::Rng;
use std::collections::HashMap;

#[test]
fn reconstructs_initial_units() {
Expand Down Expand Up @@ -415,4 +405,87 @@ mod test {
unit_hash
)
}
#[test]
fn given_wrong_rounds_with_matching_hashes_when_calling_with_parents_then_err_is_returned() {
const MAX_ROUND: Round = 7;

let mut rng = rand::thread_rng();
let node_count = NodeCount(7);
let mut reconstruction = Reconstruction::new();

let dag = random_full_parent_units_up_to(MAX_ROUND, node_count, 43);
for units in &dag {
for unit in units {
let round = unit.round();
let ReconstructionResult { units, requests } =
reconstruction.add_unit(unit.clone());
assert!(requests.is_empty());
assert_eq!(units.len(), 1);
match round {
0 => {
let mut parents_map: NodeMap<(_, _)> = NodeMap::with_size(node_count);
assert!(
ReconstructedUnit::with_parents(unit.clone(), parents_map.clone())
.is_ok(),
"Initial units should not have parents!"
);

let random_parent_index = rng.gen::<u64>() % node_count.0 as u64;
parents_map.insert(
NodeIndex(random_parent_index as usize),
(unit.hash(), 2 as Round),
);
assert_eq!(
ReconstructedUnit::with_parents(unit.clone(), parents_map),
Err(unit.clone()),
"Initial unit reconstructed with a non-empty parent!"
);
}
round => {
let mut parents_map: NodeMap<(_, _)> = NodeMap::with_size(node_count);
assert_eq!(
ReconstructedUnit::with_parents(unit.clone(), parents_map.clone()),
Err(unit.clone()),
"Non-initial rounds should have parents!"
);

let random_parent_index = rng.gen::<u64>() % node_count.0 as u64;
parents_map.insert(
NodeIndex(random_parent_index as usize),
(unit.hash(), round as Round),
);
assert_eq!(
ReconstructedUnit::with_parents(unit.clone(), parents_map.clone()),
Err(unit.clone()),
"Unit reconstructed with missing parents and wrong parent rounds!"
);

let this_unit_control_hash = unit.control_hash();
let mut parents: NodeMap<(_, _)> =
NodeMap::with_size(this_unit_control_hash.n_members());
for (node_index, &(hash, round)) in units[0].parents.iter() {
parents.insert(node_index, (hash, round));
}
assert!(
ReconstructedUnit::with_parents(unit.clone(), parents.clone()).is_ok(),
"Reconstructed unit control hash does not match unit's control hash!"
);
let random_parent_index = rng.gen::<u64>() % node_count.0 as u64;
let random_parent_index = NodeIndex(random_parent_index as usize);
let &(parent_hash, _) = parents.get(random_parent_index).unwrap();
let wrong_round = match round {
1 => MAX_ROUND,
_ => 0,
};
parents_map.insert(random_parent_index, (parent_hash, wrong_round));
assert_eq!(
ReconstructedUnit::with_parents(unit.clone(), parents_map.clone()),
Err(unit.clone()),
"Unit reconstructed with one parent having wrong round!"
);
}
}
}
}
}
}
27 changes: 15 additions & 12 deletions consensus/src/dag/reconstruction/parents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@ use crate::{
units::{ControlHash, HashFor, Unit, UnitCoord},
NodeIndex, NodeMap,
};
use aleph_bft_types::Round;
use std::collections::{hash_map::Entry, HashMap};

/// A unit in the process of reconstructing its parents.
#[derive(Debug, PartialEq, Eq, Clone)]
enum ReconstructingUnit<U: Unit> {
/// We are trying to optimistically reconstruct the unit from potential parents we get.
Reconstructing(U, NodeMap<HashFor<U>>),
Reconstructing(U, NodeMap<(HashFor<U>, Round)>),
/// We are waiting for receiving an explicit list of unit parents.
WaitingForParents(U),
}
Expand All @@ -29,11 +30,7 @@ impl<U: Unit> ReconstructingUnit<U> {
round != 0,
"We should never try to reconstruct parents of a unit of round 0."
);
let coords = unit
.control_hash()
.parents()
.map(|parent_id| UnitCoord::new(round - 1, parent_id))
.collect();
let coords = unit.control_hash().parents().collect();
(
ReconstructingUnit::Reconstructing(unit, NodeMap::with_size(n_members)),
coords,
Expand All @@ -44,14 +41,15 @@ impl<U: Unit> ReconstructingUnit<U> {
self,
parent_id: NodeIndex,
parent_hash: HashFor<U>,
parent_round: Round,
) -> SingleParentReconstructionResult<U> {
use ReconstructingUnit::*;
use SingleParentReconstructionResult::*;
match self {
Reconstructing(unit, mut parents) => {
parents.insert(parent_id, parent_hash);
parents.insert(parent_id, (parent_hash, parent_round));
match parents.item_count() == unit.control_hash().parents().count() {
// We have enought parents, just need to check the control hash matches.
// We have enough parents, just need to check the control hash matches.
true => match ReconstructedUnit::with_parents(unit, parents) {
Ok(unit) => Reconstructed(unit),
// If the control hash doesn't match we want to get an explicit list of parents.
Expand Down Expand Up @@ -85,9 +83,11 @@ impl<U: Unit> ReconstructingUnit<U> {
return Err(self);
}
let mut parents_map = NodeMap::with_size(control_hash.n_members());
for parent_id in control_hash.parents() {
match parents.get(&UnitCoord::new(self.as_unit().round() - 1, parent_id)) {
Some(parent_hash) => parents_map.insert(parent_id, *parent_hash),
for parent_coord in control_hash.parents() {
match parents.get(&parent_coord) {
Some(parent_hash) => {
parents_map.insert(parent_coord.creator(), (*parent_hash, parent_coord.round()))
}
// The parents were inconsistent with the control hash.
None => return Err(self),
}
Expand Down Expand Up @@ -118,10 +118,11 @@ impl<U: Unit> Reconstruction<U> {
child_hash: HashFor<U>,
parent_id: NodeIndex,
parent_hash: HashFor<U>,
parent_round: Round,
) -> ReconstructionResult<U> {
use SingleParentReconstructionResult::*;
match self.reconstructing_units.remove(&child_hash) {
Some(child) => match child.reconstruct_parent(parent_id, parent_hash) {
Some(child) => match child.reconstruct_parent(parent_id, parent_hash, parent_round) {
Reconstructed(unit) => ReconstructionResult::reconstructed(unit),
InProgress(unit) => {
self.reconstructing_units.insert(child_hash, unit);
Expand Down Expand Up @@ -161,6 +162,7 @@ impl<U: Unit> Reconstruction<U> {
child_hash,
unit_coord.creator(),
unit_hash,
unit_coord.round(),
));
}
}
Expand All @@ -178,6 +180,7 @@ impl<U: Unit> Reconstruction<U> {
unit_hash,
parent_coord.creator(),
*parent_hash,
parent_coord.round(),
)),
None => {
self.waiting_for_coord
Expand Down
8 changes: 3 additions & 5 deletions consensus/src/network/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,18 @@ mod tests {
member::UnitMessage,
network::NetworkDataInner::{Alert, Units},
units::{ControlHash, FullUnit, PreUnit, UncheckedSignedUnit, UnitCoord},
Hasher, NodeIndex, NodeSubset, Round, Signed,
Hasher, NodeIndex, Round, Signed,
};
use aleph_bft_mock::{Data, Hasher64, Keychain, PartialMultisignature, Signature};
use aleph_bft_types::NodeMap;
use codec::{Decode, Encode};

fn test_unchecked_unit(
creator: NodeIndex,
round: Round,
data: Data,
) -> UncheckedSignedUnit<Hasher64, Data, Signature> {
let control_hash = ControlHash {
parents_mask: NodeSubset::with_size(7.into()),
combined_hash: 0.using_encoded(Hasher64::hash),
};
let control_hash = ControlHash::new(&NodeMap::with_size(7.into()));
let pu = PreUnit::new(creator, round, control_hash);
let signable = FullUnit::new(pu, Some(data), 0);
Signed::sign(signable, &Keychain::new(0.into(), creator)).into_unchecked()
Expand Down
8 changes: 7 additions & 1 deletion consensus/src/testing/byzantine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,13 @@ impl<'a> MaliciousMember<'a> {
fn create_if_possible(&mut self, round: Round) -> bool {
if let Some(parents) = self.pick_parents(round) {
debug!(target: "malicious-member", "Creating a legit unit for round {}.", round);
let control_hash = ControlHash::<Hasher64>::new(&parents);
let mut node_with_parents = NodeMap::with_size(parents.size());
if round > 0 {
for (node_index, &hash) in parents.iter() {
node_with_parents.insert(node_index, (hash, round - 1));
}
}
let control_hash = ControlHash::<Hasher64>::new(&node_with_parents);
let new_preunit = PreUnit::<Hasher64>::new(self.node_ix, round, control_hash);
if round != self.forking_round {
let full_unit = FullUnit::new(new_preunit, Some(0), self.session_id);
Expand Down
7 changes: 3 additions & 4 deletions consensus/src/testing/crash_recovery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,10 @@ fn verify_backup(buf: &mut &[u8]) -> HashSet<UnitCoord> {
let unit = <UncheckedSignedUnit<Hasher64, Data, Signature>>::decode(buf).unwrap();
let full_unit = unit.as_signable();
let coord = full_unit.coord();
let parent_ids = &full_unit.as_pre_unit().control_hash().parents_mask;
let control_hash = &full_unit.as_pre_unit().control_hash();

for parent_id in parent_ids.elements() {
let parent = UnitCoord::new(coord.round() - 1, parent_id);
assert!(already_saved.contains(&parent));
for parent_coord in control_hash.parents() {
assert!(already_saved.contains(&parent_coord));
}

already_saved.insert(coord);
Expand Down
Loading

0 comments on commit 1a35839

Please sign in to comment.