Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(tool): forest-tool api generate-test-snapshot #5074

Open
wants to merge 27 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
f5d98d7
test(rpc): rpc unit tests with db snapshot
hanabi1224 Dec 4, 2024
4df1c11
Merge branch 'main' into hm/rpc-snaphsot-unittest
hanabi1224 Dec 4, 2024
6edcc28
Merge branch 'main' into hm/rpc-snaphsot-unittest
hanabi1224 Dec 10, 2024
008ca0d
Merge branch 'main' into hm/rpc-snaphsot-unittest
hanabi1224 Dec 11, 2024
91e4716
feat(tool): forest-tool api generate-test-snapshot
hanabi1224 Dec 11, 2024
2f72b9b
fix build errors
hanabi1224 Dec 12, 2024
895236d
Merge remote-tracking branch 'origin/hm/rpc-snaphsot-unittest' into h…
hanabi1224 Dec 12, 2024
fd80f98
cc
hanabi1224 Dec 12, 2024
bd33685
update test cases
hanabi1224 Dec 12, 2024
0c7e42e
track EthMappingsStore
hanabi1224 Dec 12, 2024
35c273a
for_each_rpc_method
hanabi1224 Dec 12, 2024
75c7a6e
Merge branch 'main' into hm/forest-tool-api-generate-test-snapshot
hanabi1224 Dec 13, 2024
e54bc5e
Merge branch 'main' into hm/forest-tool-api-generate-test-snapshot
hanabi1224 Dec 16, 2024
18089eb
switch to foreset CAR db
hanabi1224 Dec 16, 2024
f69ad6e
Merge branch 'main' into hm/forest-tool-api-generate-test-snapshot
hanabi1224 Dec 16, 2024
a70ee05
Merge remote-tracking branch 'origin/main' into hm/forest-tool-api-ge…
hanabi1224 Dec 17, 2024
1f2f7ff
set_heaviest_tipset
hanabi1224 Dec 17, 2024
b4282ce
Merge remote-tracking branch 'origin/main' into hm/forest-tool-api-ge…
hanabi1224 Jan 7, 2025
fb552dc
Merge remote-tracking branch 'origin/main' into hm/forest-tool-api-ge…
hanabi1224 Jan 7, 2025
376ab9e
fix copyright headers
hanabi1224 Jan 7, 2025
9a410e2
Merge remote-tracking branch 'origin/main' into hm/forest-tool-api-ge…
hanabi1224 Jan 9, 2025
f20cb1d
fix hackhack
hanabi1224 Jan 10, 2025
8ccbdd3
construct ChainConfig from NetworkChain
hanabi1224 Jan 10, 2025
0e21f0f
code docs
hanabi1224 Jan 10, 2025
b98c0ae
test_export_forest_car
hanabi1224 Jan 10, 2025
e3a7ebf
local cache for rpc test snapshots
hanabi1224 Jan 10, 2025
c469b88
switch to a more popular md5 crate
hanabi1224 Jan 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ glob = "0.3"
http-range-header = "0.4"
insta = { version = "1", features = ["yaml"] }
libp2p-swarm-test = { workspace = true }
md5 = { package = "md-5", version = "0.10" }
num-bigint = { version = "0.4", features = ['quickcheck'] }
petgraph = "0.7"
predicates = "3"
Expand Down
2 changes: 1 addition & 1 deletion src/chain/store/chain_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ where
&self
.settings
.require_obj::<TipsetKey>(HEAD_KEY)
.expect("failed to load heaviest tipset"),
.expect("failed to load heaviest tipset key"),
)
.expect("failed to load heaviest tipset")
}
Expand Down
13 changes: 8 additions & 5 deletions src/chain/store/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::beacon::{BeaconEntry, IGNORE_DRAND_VAR};
use crate::blocks::{Tipset, TipsetKey};
use crate::metrics;
use crate::shim::clock::ChainEpoch;
use crate::utils::misc::env::is_env_truthy;
use fvm_ipld_blockstore::Blockstore;
use itertools::Itertools;
use lru::LruCache;
Expand Down Expand Up @@ -47,11 +48,13 @@ impl<DB: Blockstore> ChainIndex<DB> {
/// Loads a tipset from memory given the tipset keys and cache. Semantically
/// identical to [`Tipset::load`] but the result is cached.
pub fn load_tipset(&self, tsk: &TipsetKey) -> Result<Option<Arc<Tipset>>, Error> {
if let Some(ts) = self.ts_cache.lock().get(tsk) {
metrics::LRU_CACHE_HIT
.get_or_create(&metrics::values::TIPSET)
.inc();
return Ok(Some(ts.clone()));
if !is_env_truthy("FOREST_TIPSET_CACHE_DISABLED") {
if let Some(ts) = self.ts_cache.lock().get(tsk) {
metrics::LRU_CACHE_HIT
.get_or_create(&metrics::values::TIPSET)
.inc();
return Ok(Some(ts.clone()));
}
}

let ts_opt = Tipset::load(&self.db, tsk)?.map(Arc::new);
Expand Down
3 changes: 3 additions & 0 deletions src/cli_shared/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,9 @@ fn parse_content_disposition(value: &reqwest::header::HeaderValue) -> Option<Str

/// Download the file at `url` with a private HTTP client, returning the path to the downloaded file
async fn download_http(url: &Url, directory: &Path, filename: &str) -> anyhow::Result<PathBuf> {
if !directory.is_dir() {
std::fs::create_dir_all(directory)?;
}
let dst_path = directory.join(filename);
let destination = dst_path.display();
event!(target: "forest::snapshot", tracing::Level::INFO, %url, %destination, "downloading snapshot");
Expand Down
119 changes: 88 additions & 31 deletions src/db/memory.rs
Original file line number Diff line number Diff line change
@@ -1,33 +1,63 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use super::{EthMappingsStore, SettingsStore, SettingsStoreExt};
use crate::blocks::TipsetKey;
use crate::cid_collections::CidHashSet;
use crate::db::{GarbageCollectable, PersistentStore};
use crate::libp2p_bitswap::{BitswapStoreRead, BitswapStoreReadWrite};
use crate::rpc::eth::types::EthHash;
use crate::utils::db::car_stream::CarBlock;
use crate::utils::multihash::prelude::*;
use ahash::HashMap;
use anyhow::Context as _;
use cid::Cid;
use fvm_ipld_blockstore::Blockstore;
use itertools::Itertools;
use parking_lot::RwLock;

use super::{EthMappingsStore, SettingsStore};

#[derive(Debug, Default)]
pub struct MemoryDB {
blockchain_db: RwLock<HashMap<Vec<u8>, Vec<u8>>>,
blockchain_persistent_db: RwLock<HashMap<Vec<u8>, Vec<u8>>>,
blockchain_db: RwLock<HashMap<Cid, Vec<u8>>>,
blockchain_persistent_db: RwLock<HashMap<Cid, Vec<u8>>>,
settings_db: RwLock<HashMap<String, Vec<u8>>>,
eth_mappings_db: RwLock<HashMap<EthHash, Vec<u8>>>,
}

impl MemoryDB {
pub async fn export_forest_car<W: tokio::io::AsyncWrite + Unpin>(
&self,
writer: &mut W,
) -> anyhow::Result<()> {
let roots =
SettingsStoreExt::read_obj::<TipsetKey>(self, crate::db::setting_keys::HEAD_KEY)?
.context("chain head is not tracked and cannot be exported")?
.into_cids();
let blocks = {
let blockchain_db = self.blockchain_db.read();
let blockchain_persistent_db = self.blockchain_persistent_db.read();
blockchain_db
.iter()
.chain(blockchain_persistent_db.iter())
.map(|(&cid, data)| {
anyhow::Ok(CarBlock {
cid,
data: data.clone(),
})
})
.collect_vec()
};
let frames =
crate::db::car::forest::Encoder::compress_stream_default(futures::stream::iter(blocks));
crate::db::car::forest::Encoder::write(writer, roots, frames).await
}
}
Comment on lines +28 to +54
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we contrive a unit test for this functionality?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure, unit test added.


impl GarbageCollectable<CidHashSet> for MemoryDB {
fn get_keys(&self) -> anyhow::Result<CidHashSet> {
let mut set = CidHashSet::new();
for key in self.blockchain_db.read().keys() {
let cid = Cid::try_from(key.as_slice())?;
set.insert(cid);
for &key in self.blockchain_db.read().keys() {
set.insert(key);
}
Ok(set)
}
Expand All @@ -36,17 +66,11 @@ impl GarbageCollectable<CidHashSet> for MemoryDB {
let mut db = self.blockchain_db.write();
let mut deleted = 0;
db.retain(|key, _| {
let cid = Cid::try_from(key.as_slice());
match cid {
Ok(cid) => {
let retain = !keys.contains(&cid);
if !retain {
deleted += 1;
}
retain
}
_ => true,
let retain = !keys.contains(key);
if !retain {
deleted += 1;
}
retain
});
Ok(deleted)
}
Expand Down Expand Up @@ -111,22 +135,15 @@ impl EthMappingsStore for MemoryDB {

impl Blockstore for MemoryDB {
fn get(&self, k: &Cid) -> anyhow::Result<Option<Vec<u8>>> {
Ok(self
.blockchain_db
Ok(self.blockchain_db.read().get(k).cloned().or(self
.blockchain_persistent_db
.read()
.get(&k.to_bytes())
.cloned()
.or(self
.blockchain_persistent_db
.read()
.get(&k.to_bytes())
.cloned()))
.get(k)
.cloned()))
}

fn put_keyed(&self, k: &Cid, block: &[u8]) -> anyhow::Result<()> {
self.blockchain_db
.write()
.insert(k.to_bytes(), block.to_vec());
self.blockchain_db.write().insert(*k, block.to_vec());
Ok(())
}
}
Expand All @@ -135,14 +152,14 @@ impl PersistentStore for MemoryDB {
fn put_keyed_persistent(&self, k: &Cid, block: &[u8]) -> anyhow::Result<()> {
self.blockchain_persistent_db
.write()
.insert(k.to_bytes(), block.to_vec());
.insert(*k, block.to_vec());
Ok(())
}
}

impl BitswapStoreRead for MemoryDB {
fn contains(&self, cid: &Cid) -> anyhow::Result<bool> {
Ok(self.blockchain_db.read().contains_key(&cid.to_bytes()))
Ok(self.blockchain_db.read().contains_key(cid))
}

fn get(&self, cid: &Cid) -> anyhow::Result<Option<Vec<u8>>> {
Expand All @@ -157,3 +174,43 @@ impl BitswapStoreReadWrite for MemoryDB {
self.put_keyed(block.cid(), block.data())
}
}

#[cfg(test)]
mod tests {
use super::*;
use crate::db::{car::ForestCar, setting_keys::HEAD_KEY};
use fvm_ipld_encoding::DAG_CBOR;
use multihash_codetable::Code::Blake2b256;
use nunny::vec as nonempty;

#[tokio::test]
async fn test_export_forest_car() {
let db = MemoryDB::default();
let record1 = b"non-persistent";
let key1 = Cid::new_v1(DAG_CBOR, Blake2b256.digest(record1.as_slice()));
db.put_keyed(&key1, record1.as_slice()).unwrap();

let record2 = b"persistent";
let key2 = Cid::new_v1(DAG_CBOR, Blake2b256.digest(record2.as_slice()));
db.put_keyed_persistent(&key2, record2.as_slice()).unwrap();

let mut car_db_bytes = vec![];
assert!(db
.export_forest_car(&mut car_db_bytes)
.await
.unwrap_err()
.to_string()
.contains("chain head is not tracked and cannot be exported"));

db.write_obj(HEAD_KEY, &TipsetKey::from(nonempty![key1]))
.unwrap();

car_db_bytes.clear();
db.export_forest_car(&mut car_db_bytes).await.unwrap();

let car = ForestCar::new(car_db_bytes).unwrap();
assert_eq!(car.roots(), &nonempty![key1]);
assert!(car.has(&key1).unwrap());
assert!(car.has(&key2).unwrap());
}
}
2 changes: 1 addition & 1 deletion src/rpc/auth_layer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ static METHOD_NAME2REQUIRED_PERMISSION: Lazy<HashMap<&str, Permission>> = Lazy::
}
};
}
super::for_each_method!(insert);
super::for_each_rpc_method!(insert);

access.insert(chain::CHAIN_NOTIFY, Permission::Read);
access.insert(CANCEL_METHOD_NAME, Permission::Read);
Expand Down
Loading
Loading