Skip to content

Commit

Permalink
Global consensus threshold and other crypto improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
timorleph committed Mar 11, 2024
1 parent ad8a26c commit 107d1a3
Show file tree
Hide file tree
Showing 19 changed files with 195 additions and 231 deletions.
10 changes: 5 additions & 5 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions consensus/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aleph-bft"
version = "0.34.1"
version = "0.35.0"
edition = "2021"
authors = ["Cardinal Cryptography"]
categories = ["algorithms", "data-structures", "cryptography", "database"]
Expand All @@ -13,8 +13,8 @@ readme = "../README.md"
description = "AlephBFT is an asynchronous and Byzantine fault tolerant consensus protocol aimed at ordering arbitrary messages (transactions). It has been designed to continuously operate even in the harshest conditions: with no bounds on message-delivery delays and in the presence of malicious actors. This makes it an excellent fit for blockchain-related applications."

[dependencies]
aleph-bft-rmc = { path = "../rmc", version = "0.11" }
aleph-bft-types = { path = "../types", version = "0.11" }
aleph-bft-rmc = { path = "../rmc", version = "0.12" }
aleph-bft-types = { path = "../types", version = "0.12" }
anyhow = "1.0"
async-trait = "0.1"
codec = { package = "parity-scale-codec", version = "3.0", default-features = false, features = ["derive"] }
Expand Down
4 changes: 2 additions & 2 deletions consensus/src/backup/loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ mod tests {
use crate::{
backup::BackupLoader,
units::{
create_units, creator_set, preunit_to_unchecked_signed_unit, preunit_to_unit,
create_preunits, creator_set, preunit_to_unchecked_signed_unit, preunit_to_unit,
UncheckedSignedUnit as GenericUncheckedSignedUnit,
},
NodeCount, NodeIndex, Round, SessionId,
Expand All @@ -273,7 +273,7 @@ mod tests {
let mut units_per_round = Vec::with_capacity(rounds);

for round in 0..rounds {
let pre_units = create_units(creators.iter(), round as Round);
let pre_units = create_preunits(creators.iter(), round as Round);

let units: Vec<_> = pre_units
.iter()
Expand Down
24 changes: 11 additions & 13 deletions consensus/src/creation/creator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,7 @@ impl<H: Hasher> UnitsCollector<H> {
&self,
node_id: NodeIndex,
) -> Result<&NodeMap<H::Hash>, ConstraintError> {
let threshold = (self.candidates.size() * 2) / 3 + NodeCount(1);

if self.n_candidates < threshold {
if self.n_candidates < self.candidates.size().consensus_threshold() {
return Err(ConstraintError::NotEnoughParents);
}
if self.candidates.get(node_id).is_none() {
Expand Down Expand Up @@ -124,7 +122,7 @@ mod tests {
use super::{Creator as GenericCreator, UnitsCollector};
use crate::{
creation::creator::ConstraintError,
units::{create_units, creator_set, preunit_to_unit},
units::{create_preunits, creator_set, preunit_to_unit},
NodeCount, NodeIndex,
};
use aleph_bft_mock::Hasher64;
Expand All @@ -149,7 +147,7 @@ mod tests {
fn creates_unit_with_all_parents() {
let n_members = NodeCount(7);
let mut creators = creator_set(n_members);
let new_units = create_units(creators.iter(), 0);
let new_units = create_preunits(creators.iter(), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand All @@ -169,7 +167,7 @@ mod tests {
fn create_unit_with_minimal_parents(n_members: NodeCount) {
let n_parents = (n_members.0 * 2) / 3 + 1;
let mut creators = creator_set(n_members);
let new_units = create_units(creators.iter().take(n_parents), 0);
let new_units = create_preunits(creators.iter().take(n_parents), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand Down Expand Up @@ -207,9 +205,9 @@ mod tests {
}

fn dont_create_unit_below_parents_threshold(n_members: NodeCount) {
let n_parents = (n_members.0 * 2) / 3;
let n_parents = n_members.consensus_threshold() - NodeCount(1);
let mut creators = creator_set(n_members);
let new_units = create_units(creators.iter().take(n_parents), 0);
let new_units = create_preunits(creators.iter().take(n_parents.0), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand Down Expand Up @@ -247,7 +245,7 @@ mod tests {
let mut creators = creator_set(n_members);
let mut expected_hashes_per_round = Vec::new();
for round in 0..2 {
let new_units = create_units(creators.iter().skip(1), round);
let new_units = create_preunits(creators.iter().skip(1), round);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand Down Expand Up @@ -284,7 +282,7 @@ mod tests {
fn cannot_create_unit_without_predecessor() {
let n_members = NodeCount(7);
let mut creators = creator_set(n_members);
let new_units = create_units(creators.iter().skip(1), 0);
let new_units = create_preunits(creators.iter().skip(1), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand All @@ -299,7 +297,7 @@ mod tests {
fn units_collector_successfully_computes_parents() {
let n_members = NodeCount(4);
let creators = creator_set(n_members);
let new_units = create_units(creators.iter(), 0);
let new_units = create_preunits(creators.iter(), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand All @@ -324,7 +322,7 @@ mod tests {
fn units_collector_returns_err_when_not_enough_parents() {
let n_members = NodeCount(4);
let creators = creator_set(n_members);
let new_units = create_units(creators.iter().take(2), 0);
let new_units = create_preunits(creators.iter().take(2), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand All @@ -346,7 +344,7 @@ mod tests {
fn units_collector_returns_err_when_missing_own_parent() {
let n_members = NodeCount(4);
let creators = creator_set(n_members);
let new_units = create_units(creators.iter().take(3), 0);
let new_units = create_preunits(creators.iter().take(3), 0);
let new_units: Vec<_> = new_units
.into_iter()
.map(|(pu, _)| preunit_to_unit(pu, 0))
Expand Down
2 changes: 1 addition & 1 deletion consensus/src/extension/election.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ impl<H: Hasher> CandidateElection<H> {
relative_round: Round,
) -> Result<bool, CandidateOutcome<H>> {
use CandidateOutcome::*;
let threshold = (parents.size() * 2) / 3 + NodeCount(1);
let threshold = parents.size().consensus_threshold();
// Gather parents' votes.
let (votes_for, votes_against) = self.parent_votes(parents)?;
assert!(votes_for + votes_against >= threshold);
Expand Down
16 changes: 9 additions & 7 deletions consensus/src/reconstruction/dag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,10 +123,10 @@ impl<H: Hasher> Dag<H> {
mod test {
use crate::{
reconstruction::{dag::Dag, ReconstructedUnit},
units::tests::{random_full_parent_units_up_to, TestFullUnit},
units::{random_full_parent_units_up_to, FullUnit},
Hasher, NodeCount, NodeIndex, NodeMap,
};
use aleph_bft_mock::Hasher64;
use aleph_bft_mock::{Data, Hasher64};
use std::collections::HashSet;

fn full_parents_to_map(
Expand All @@ -141,11 +141,13 @@ mod test {

// silly clippy, the map below doesn't work with &[..]
#[allow(clippy::ptr_arg)]
fn unit_hashes(units: &Vec<TestFullUnit>) -> Vec<<Hasher64 as Hasher>::Hash> {
fn unit_hashes(units: &Vec<FullUnit<Hasher64, Data>>) -> Vec<<Hasher64 as Hasher>::Hash> {
units.iter().map(|unit| unit.hash()).collect()
}

fn reconstructed(dag: Vec<Vec<TestFullUnit>>) -> Vec<Vec<ReconstructedUnit<Hasher64>>> {
fn reconstructed(
dag: Vec<Vec<FullUnit<Hasher64, Data>>>,
) -> Vec<Vec<ReconstructedUnit<Hasher64>>> {
let hashes: Vec<_> = dag.iter().map(unit_hashes).collect();
let initial_units: Vec<_> = dag
.get(0)
Expand All @@ -171,7 +173,7 @@ mod test {
#[test]
fn reconstructs_initial_units() {
let mut dag = Dag::new();
for unit in reconstructed(random_full_parent_units_up_to(0, NodeCount(4)))
for unit in reconstructed(random_full_parent_units_up_to(0, NodeCount(4), 43))
.pop()
.expect("we have initial units")
{
Expand All @@ -183,7 +185,7 @@ mod test {
#[test]
fn reconstructs_units_in_order() {
let mut dag = Dag::new();
for units in reconstructed(random_full_parent_units_up_to(7000, NodeCount(4))) {
for units in reconstructed(random_full_parent_units_up_to(7000, NodeCount(4), 43)) {
for unit in units {
let reconstructed = dag.add_unit(unit.clone());
assert_eq!(reconstructed, vec![unit]);
Expand All @@ -193,7 +195,7 @@ mod test {

#[test]
fn reconstructs_units_in_reverse_order() {
let full_unit_dag = random_full_parent_units_up_to(7000, NodeCount(4));
let full_unit_dag = random_full_parent_units_up_to(7000, NodeCount(4), 43);
let mut hash_batches: Vec<_> = full_unit_dag
.iter()
.map(unit_hashes)
Expand Down
21 changes: 9 additions & 12 deletions consensus/src/reconstruction/parents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,17 +291,14 @@ mod test {
parents::{Reconstruction, Request},
ReconstructedUnit,
},
units::{
tests::{random_full_parent_units_up_to, TestFullUnit},
UnitCoord,
},
units::{random_full_parent_units_up_to, UnitCoord},
NodeCount, NodeIndex,
};

#[test]
fn reconstructs_initial_units() {
let mut reconstruction = Reconstruction::new();
for unit in &random_full_parent_units_up_to(0, NodeCount(4))[0] {
for unit in &random_full_parent_units_up_to(0, NodeCount(4), 43)[0] {
let unit = unit.unit();
let (mut reconstructed_units, requests) = reconstruction.add_unit(unit.clone()).into();
assert!(requests.is_empty());
Expand All @@ -315,7 +312,7 @@ mod test {
#[test]
fn reconstructs_units_coming_in_order() {
let mut reconstruction = Reconstruction::new();
let dag = random_full_parent_units_up_to(7, NodeCount(4));
let dag = random_full_parent_units_up_to(7, NodeCount(4), 43);
for units in &dag {
for unit in units {
let unit = unit.unit();
Expand All @@ -332,7 +329,7 @@ mod test {
}
round => {
assert_eq!(reconstructed_unit.parents().item_count(), 4);
let parents: &Vec<TestFullUnit> = dag
let parents = dag
.get((round - 1) as usize)
.expect("the parents are there");
for (parent, reconstructed_parent) in
Expand All @@ -349,7 +346,7 @@ mod test {
#[test]
fn requests_all_parents() {
let mut reconstruction = Reconstruction::new();
let dag = random_full_parent_units_up_to(1, NodeCount(4));
let dag = random_full_parent_units_up_to(1, NodeCount(4), 43);
let unit = dag
.get(1)
.expect("just created")
Expand All @@ -364,7 +361,7 @@ mod test {
#[test]
fn requests_single_parent() {
let mut reconstruction = Reconstruction::new();
let dag = random_full_parent_units_up_to(1, NodeCount(4));
let dag = random_full_parent_units_up_to(1, NodeCount(4), 43);
for unit in dag.get(0).expect("just created").iter().skip(1) {
let unit = unit.unit();
reconstruction.add_unit(unit.clone());
Expand All @@ -387,7 +384,7 @@ mod test {
#[test]
fn reconstructs_units_coming_in_reverse_order() {
let mut reconstruction = Reconstruction::new();
let mut dag = random_full_parent_units_up_to(7, NodeCount(4));
let mut dag = random_full_parent_units_up_to(7, NodeCount(4), 43);
dag.reverse();
for unit in dag.get(0).expect("we have the top units") {
let unit = unit.unit();
Expand All @@ -414,12 +411,12 @@ mod test {
#[test]
fn handles_bad_hash() {
let mut reconstruction = Reconstruction::new();
let dag = random_full_parent_units_up_to(0, NodeCount(4));
let dag = random_full_parent_units_up_to(0, NodeCount(4), 43);
for unit in dag.get(0).expect("just created") {
let unit = unit.unit();
reconstruction.add_unit(unit.clone());
}
let other_dag = random_full_parent_units_up_to(1, NodeCount(4));
let other_dag = random_full_parent_units_up_to(1, NodeCount(4), 43);
let unit = other_dag
.get(1)
.expect("just created")
Expand Down
Loading

0 comments on commit 107d1a3

Please sign in to comment.