Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perf: make binary nodes compute their edges in parallel #24

Merged
merged 6 commits into from
Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ version = "0.1.0"
default = ["std", "rocksdb"]
rocksdb = ["dep:rocksdb"]
std = ["parity-scale-codec/std", "bitvec/std", "starknet-types-core/std"]
# internal
bench = []
tdelabro marked this conversation as resolved.
Show resolved Hide resolved

[dependencies]
bitvec = { version = "1", default-features = false, features = ["alloc"] }
Expand Down Expand Up @@ -36,10 +38,17 @@ rocksdb = { optional = true, version = "0.21.0", features = [
] }

[dev-dependencies]
pprof = { version = "0.3", features = ["flamegraph"] }
pathfinder-common = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-common", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-crypto = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-crypto", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-merkle-tree = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-merkle-tree", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
pathfinder-storage = { git = "https://github.com/massalabs/pathfinder.git", package = "pathfinder-storage", rev = "b7b6d76a76ab0e10f92e5f84ce099b5f727cb4db" }
rand = "0.8.5"
tempfile = "3.8.0"
rstest = "0.18.2"
criterion = "0.5.1"

[[bench]]
name = "storage"
required-features = ["bench"]
harness = false
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,13 @@ fn main() {
}
```

## Build and run benchmarks

This crate uses `rayon` to parallelize hash computations. As such, results will vary depending on the number of cores of your cpu.
```
cargo bench
```

## Acknowledgements

- Shout out to [Danno Ferrin](https://github.com/shemnon) and [Karim Taam](https://github.com/matkt) for their work on Bonsai. This project is heavily inspired by their work.
Expand Down
39 changes: 39 additions & 0 deletions benches/flamegraph.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
use criterion::profiler::Profiler;
use pprof::ProfilerGuard;
use std::{fs::File, os::raw::c_int, path::Path};

pub struct FlamegraphProfiler<'a> {
frequency: c_int,
active_profiler: Option<ProfilerGuard<'a>>,
}

impl<'a> FlamegraphProfiler<'a> {
#[allow(dead_code)]
pub fn new(frequency: c_int) -> Self {
FlamegraphProfiler {
frequency,
active_profiler: None,
}
}
}

impl<'a> Profiler for FlamegraphProfiler<'a> {
fn start_profiling(&mut self, _benchmark_id: &str, _benchmark_dir: &Path) {
self.active_profiler = Some(ProfilerGuard::new(self.frequency).unwrap());
}

fn stop_profiling(&mut self, _benchmark_id: &str, benchmark_dir: &Path) {
std::fs::create_dir_all(benchmark_dir).unwrap();
let flamegraph_path = benchmark_dir.join("flamegraph.svg");
let flamegraph_file = File::create(&flamegraph_path)
.expect("File system error while creating flamegraph.svg");
if let Some(profiler) = self.active_profiler.take() {
profiler
.report()
.build()
.unwrap()
.flamegraph(flamegraph_file)
.expect("Error writing flamegraph");
}
}
}
157 changes: 157 additions & 0 deletions benches/storage.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
use std::hint::black_box;

use bitvec::vec::BitVec;
use bonsai_trie::{
databases::HashMapDb,
id::{BasicId, BasicIdBuilder},
BonsaiStorage, BonsaiStorageConfig,
};
use criterion::{criterion_group, criterion_main, Criterion};
use rand::{prelude::*, thread_rng};
use starknet_types_core::{
felt::Felt,
hash::{Pedersen, StarkHash},
};

mod flamegraph;

fn storage(c: &mut Criterion) {
c.bench_function("storage commit", move |b| {
let mut bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
HashMapDb::<BasicId>::default(),
BonsaiStorageConfig::default(),
)
.unwrap();
let mut rng = thread_rng();

let felt = Felt::from_hex("0x66342762FDD54D033c195fec3ce2568b62052e").unwrap();
for _ in 0..1000 {
let bitvec = BitVec::from_vec(vec![
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
}

let mut id_builder = BasicIdBuilder::new();
b.iter_batched(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
bonsai_storage.commit(id_builder.new_id()).unwrap();
},
criterion::BatchSize::LargeInput,
);
});
}

fn one_update(c: &mut Criterion) {
c.bench_function("one update", move |b| {
let mut bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
HashMapDb::<BasicId>::default(),
BonsaiStorageConfig::default(),
)
.unwrap();
let mut rng = thread_rng();

let felt = Felt::from_hex("0x66342762FDD54D033c195fec3ce2568b62052e").unwrap();
for _ in 0..1000 {
let bitvec = BitVec::from_vec(vec![
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
}

let mut id_builder = BasicIdBuilder::new();
bonsai_storage.commit(id_builder.new_id()).unwrap();

b.iter_batched(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
let bitvec = BitVec::from_vec(vec![0, 1, 2, 3, 4, 5]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
bonsai_storage.commit(id_builder.new_id()).unwrap();
},
criterion::BatchSize::LargeInput,
);
});
}

fn five_updates(c: &mut Criterion) {
c.bench_function("five updates", move |b| {
let mut bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
HashMapDb::<BasicId>::default(),
BonsaiStorageConfig::default(),
)
.unwrap();
let mut rng = thread_rng();

let felt = Felt::from_hex("0x66342762FDD54D033c195fec3ce2568b62052e").unwrap();
for _ in 0..1000 {
let bitvec = BitVec::from_vec(vec![
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
}

let mut id_builder = BasicIdBuilder::new();
bonsai_storage.commit(id_builder.new_id()).unwrap();

b.iter_batched(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 1, 2, 3, 4, 5]), &felt)
.unwrap();
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 2, 2, 5, 4, 5]), &felt)
.unwrap();
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 1, 2, 3, 3, 5]), &felt)
.unwrap();
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 1, 1, 3, 99, 3]), &felt)
.unwrap();
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 1, 2, 3, 4, 6]), &felt)
.unwrap();
bonsai_storage.commit(id_builder.new_id()).unwrap();
},
criterion::BatchSize::LargeInput,
);
});
}

fn hash(c: &mut Criterion) {
c.bench_function("pedersen hash", move |b| {
let felt0 =
Felt::from_hex("0x100bd6fbfced88ded1b34bd1a55b747ce3a9fde9a914bca75571e4496b56443")
.unwrap();
let felt1 =
Felt::from_hex("0x00a038cda302fedbc4f6117648c6d3faca3cda924cb9c517b46232c6316b152f")
.unwrap();
b.iter(|| {
black_box(Pedersen::hash(&felt0, &felt1));
})
});
}

criterion_group! {
name = benches;
config = Criterion::default(); // .with_profiler(flamegraph::FlamegraphProfiler::new(100));
targets = storage, one_update, five_updates, hash
}
criterion_main!(benches);
2 changes: 2 additions & 0 deletions src/changes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ pub struct Change {
}

#[derive(Debug, Default)]
#[cfg_attr(feature = "bench", derive(Clone))]
pub struct ChangeBatch(pub(crate) HashMap<TrieKey, Change>);

const KEY_SEPARATOR: u8 = 0x00;
Expand Down Expand Up @@ -115,6 +116,7 @@ impl ChangeBatch {
}
}

#[cfg_attr(feature = "bench", derive(Clone))]
pub struct ChangeStore<ID>
where
ID: Id,
Expand Down
1 change: 1 addition & 0 deletions src/key_value_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use crate::{
};

/// Crate Trie <= KeyValueDB => BonsaiDatabase
#[cfg_attr(feature = "bench", derive(Clone))]
pub struct KeyValueDB<DB, ID>
where
DB: BonsaiDatabase,
Expand Down
14 changes: 14 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,20 @@ where
tries: MerkleTrees<H, DB, ChangeID>,
}

#[cfg(feature = "bench")]
impl<ChangeID, DB, H> Clone for BonsaiStorage<ChangeID, DB, H>
where
DB: BonsaiDatabase + Clone,
ChangeID: id::Id,
H: StarkHash + Send + Sync,
{
fn clone(&self) -> Self {
Self {
tries: self.tries.clone(),
}
}
}

/// Trie root hash type.
pub type BonsaiTrieHash = Felt;

Expand Down
Loading
Loading