Skip to content

Commit

Permalink
refacto class fetch
Browse files Browse the repository at this point in the history
  • Loading branch information
jbcaron committed Aug 22, 2024
1 parent 4ee716b commit ff2c85a
Show file tree
Hide file tree
Showing 21 changed files with 1,044 additions and 722 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion crates/client/block_import/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
//! A signature verification mode should be added to allow the skipping of block validation entirely if the block is signed.
use dc_db::{DeoxysBackend, DeoxysStorageError};
use dp_class::{class_hash::ComputeClassHashError, compile::ClassCompilationError};
use starknet_core::types::Felt;
use std::{borrow::Cow, sync::Arc};

Expand Down Expand Up @@ -82,7 +83,9 @@ pub enum BlockImportError {
#[error("Compiled class hash mismatch for class hash {class_hash:#x}: expected {expected:#x}, got {got:#x}")]
CompiledClassHash { class_hash: Felt, got: Felt, expected: Felt },
#[error("Class with hash {class_hash:#x} failed to compile: {error}")]
CompilationClassError { class_hash: Felt, error: String },
CompilationClassError { class_hash: Felt, error: ClassCompilationError },
#[error("Failed to compute class hash {class_hash:#x}: {error}")]
ComputeClassHash { class_hash: Felt, error: ComputeClassHashError },

#[error("Block hash mismatch: expected {expected:#x}, got {got:#x}")]
BlockHash { got: Felt, expected: Felt },
Expand Down
51 changes: 35 additions & 16 deletions crates/client/block_import/src/pre_validate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::{
};
use bitvec::vec::BitVec;
use dp_chain_config::StarknetVersion;
use dp_class::{ClassInfo, ConvertedClass, ToCompiledClass};
use dp_class::{ConvertedClass, LegacyClassInfo, LegacyConvertedClass, SierraClassInfo, SierraConvertedClass};
use dp_convert::ToFelt;
use dp_receipt::TransactionReceipt;
use dp_transactions::Transaction;
Expand Down Expand Up @@ -137,21 +137,40 @@ fn convert_classes(
}

fn class_conversion(_validation: &Validation, class: DeclaredClass) -> Result<ConvertedClass, BlockImportError> {
let DeclaredClass { class_hash, contract_class, compiled_class_hash } = class;

// TODO(class_hash): uncomment this when the class hashes are computed correctly accross the entire state
// let expected =
// contract_class.class_hash().map_err(|e| BlockImportError::ComputeClassHashError(e.to_string()))?;
// if class_hash != expected {
// }

let compiled_class = contract_class
.compile()
.map_err(|e| BlockImportError::CompilationClassError { error: e.to_string(), class_hash })?;

let class_info = ClassInfo { contract_class, compiled_class_hash };

Ok(ConvertedClass { class_infos: (class_hash, class_info), class_compiled: (class_hash, compiled_class) })
match class {
DeclaredClass::Sierra(sierra) => {
let class_hash = sierra
.contract_class
.compute_class_hash()
.map_err(|e| BlockImportError::ComputeClassHash { class_hash: sierra.class_hash, error: e })?;
if class_hash != sierra.class_hash {
return Err(BlockImportError::ClassHash { got: sierra.class_hash, expected: class_hash });
}
let (compiled_class_hash, compiled_class) = sierra
.contract_class
.compile_to_casm()
.map_err(|e| BlockImportError::CompilationClassError { class_hash: sierra.class_hash, error: e })?;
if compiled_class_hash != sierra.compiled_class_hash {
return Err(BlockImportError::CompiledClassHash {
class_hash: sierra.class_hash,
got: sierra.compiled_class_hash,
expected: compiled_class_hash,
});
}
Ok(ConvertedClass::Sierra(SierraConvertedClass {
class_hash: sierra.class_hash,
info: SierraClassInfo { contract_class: sierra.contract_class, compiled_class_hash },
compiled: compiled_class,
}))
}
DeclaredClass::Legacy(legacy) => {
// TODO: verify that the class hash is correct
Ok(ConvertedClass::Legacy(LegacyConvertedClass {
class_hash: legacy.class_hash,
info: LegacyClassInfo { contract_class: legacy.contract_class },
}))
}
}
}

fn transaction_hashes(
Expand Down
46 changes: 43 additions & 3 deletions crates/client/block_import/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@ use dp_block::{
Header,
};
use dp_chain_config::StarknetVersion;
use dp_class::{ContractClass, ConvertedClass};
use dp_class::{
class_update::{ClassUpdate, LegacyClassUpdate, SierraClassUpdate},
CompressedLegacyContractClass, ConvertedClass, FlattenedSierraClass,
};
use dp_receipt::TransactionReceipt;
use dp_state_update::StateDiff;
use dp_transactions::Transaction;
Expand Down Expand Up @@ -37,12 +40,49 @@ pub struct Validation {
}

#[derive(Clone, Debug, Eq, PartialEq)]
pub struct DeclaredClass {
pub enum DeclaredClass {
Legacy(LegacyDeclaredClass),
Sierra(SierraDeclaredClass),
}

impl From<ClassUpdate> for DeclaredClass {
fn from(value: ClassUpdate) -> Self {
match value {
ClassUpdate::Legacy(legacy) => DeclaredClass::Legacy(legacy.into()),
ClassUpdate::Sierra(sierra) => DeclaredClass::Sierra(sierra.into()),
}
}
}

#[derive(Clone, Debug, Eq, PartialEq)]
pub struct LegacyDeclaredClass {
pub class_hash: Felt,
pub contract_class: CompressedLegacyContractClass,
}

impl From<LegacyClassUpdate> for LegacyDeclaredClass {
fn from(value: LegacyClassUpdate) -> Self {
Self { class_hash: value.class_hash, contract_class: value.contract_class.into() }
}
}

#[derive(Clone, Debug, Eq, PartialEq)]
pub struct SierraDeclaredClass {
pub class_hash: Felt,
pub contract_class: ContractClass,
pub contract_class: FlattenedSierraClass,
pub compiled_class_hash: Felt,
}

impl From<SierraClassUpdate> for SierraDeclaredClass {
fn from(value: SierraClassUpdate) -> Self {
Self {
class_hash: value.class_hash,
contract_class: value.contract_class.into(),
compiled_class_hash: value.compiled_class_hash,
}
}
}

#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct UnverifiedCommitments {
pub transaction_count: Option<u64>,
Expand Down
121 changes: 50 additions & 71 deletions crates/client/db/src/class_db.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
use std::collections::HashSet;

use dp_class::{ClassInfo, CompiledClass};
use dp_class::{ClassInfo, CompiledSierra, ConvertedClass};
use rayon::{iter::ParallelIterator, slice::ParallelSlice};
use rocksdb::WriteOptions;
use starknet_types_core::felt::Felt;
Expand Down Expand Up @@ -85,91 +83,81 @@ impl DeoxysBackend {
Ok(self.get_class_info(id, class_hash)?.is_some())
}

pub fn get_class(
pub fn get_sierra_compiled(
&self,
id: &impl DbBlockIdResolvable,
class_hash: &Felt,
) -> Result<Option<(ClassInfo, CompiledClass)>, DeoxysStorageError> {
let Some(id) = id.resolve_db_block_id(self)? else { return Ok(None) };
let Some(info) = self.get_class_info(&id, class_hash)? else { return Ok(None) };

log::debug!("get_class {:?} {:#x}", id, class_hash);
let compiled_class = self
.class_db_get_encoded_kv::<CompiledClass>(
id.is_pending(),
class_hash,
Column::PendingClassCompiled,
Column::ClassCompiled,
)?
.ok_or(DeoxysStorageError::InconsistentStorage("Class compiled not found while class info is".into()))?;

Ok(Some((info, compiled_class)))
) -> Result<Option<CompiledSierra>, DeoxysStorageError> {
let Some(requested_id) = id.resolve_db_block_id(self)? else { return Ok(None) };

log::debug!("sierra compiled {requested_id:?} {class_hash:#x}");

let Some(compiled) = self.class_db_get_encoded_kv::<CompiledSierra>(
requested_id.is_pending(),
class_hash,
Column::PendingClassCompiled,
Column::ClassCompiled,
)?
else {
return Ok(None);
};

log::debug!("valid");

Ok(Some(compiled))
}

/// NB: This functions needs to run on the rayon thread pool
pub(crate) fn store_classes(
&self,
block_id: DbBlockId,
class_infos: &[(Felt, ClassInfo)],
class_compiled: &[(Felt, CompiledClass)],
converted_classes: &[ConvertedClass],
col_info: Column,
col_compiled: Column,
) -> Result<(), DeoxysStorageError> {
let mut writeopts = WriteOptions::new();
writeopts.disable_wal(true);

// Check if the class is already in the db, if so, skip it
// This check is needed because blocks are fetched and converted in parallel
// TODO(merge): this should be removed after block import refactor
let ignore_class: HashSet<_> = if let DbBlockId::BlockN(block_n) = block_id {
class_infos
.iter()
.filter_map(|(key, _)| match self.get_class_info(&DbBlockId::BlockN(block_n), key) {
Ok(Some(_)) => Some(*key),
_ => None,
})
.collect()
} else {
HashSet::new()
};

class_infos.par_chunks(DB_UPDATES_BATCH_SIZE).try_for_each_init(
converted_classes.par_chunks(DB_UPDATES_BATCH_SIZE).try_for_each_init(
|| self.db.get_column(col_info),
|col, chunk| {
let mut batch = WriteBatchWithTransaction::default();
for (key, value) in chunk {
if ignore_class.contains(key) {
continue;
}
let key_bin = bincode::serialize(key)?;
for converted_class in chunk {
let class_hash = converted_class.class_hash();
let key_bin = bincode::serialize(&class_hash)?;
// TODO: find a way to avoid this allocation
batch.put_cf(
col,
&key_bin,
bincode::serialize(&ClassInfoWithBlockNumber { class_info: value.clone(), block_id })?,
bincode::serialize(&ClassInfoWithBlockNumber { class_info: converted_class.info(), block_id })?,
);
}
self.db.write_opt(batch, &writeopts)?;
Ok::<_, DeoxysStorageError>(())
},
)?;

class_compiled.par_chunks(DB_UPDATES_BATCH_SIZE).try_for_each_init(
|| self.db.get_column(col_compiled),
|col, chunk| {
let mut batch = WriteBatchWithTransaction::default();
for (key, value) in chunk {
if ignore_class.contains(key) {
continue;
converted_classes
.iter()
.filter_map(|converted_class| match converted_class {
ConvertedClass::Sierra(sierra) => Some((sierra.class_hash, sierra.compiled.clone())),
_ => None,
})
.collect::<Vec<_>>()
.par_chunks(DB_UPDATES_BATCH_SIZE)
.try_for_each_init(
|| self.db.get_column(col_compiled),
|col, chunk| {
let mut batch = WriteBatchWithTransaction::default();
for (key, value) in chunk {
let key_bin = bincode::serialize(key)?;
// TODO: find a way to avoid this allocation
batch.put_cf(col, &key_bin, bincode::serialize(&value)?);
}
let key_bin = bincode::serialize(key)?;
// TODO: find a way to avoid this allocation
batch.put_cf(col, &key_bin, bincode::serialize(&value)?);
}
self.db.write_opt(batch, &writeopts)?;
Ok::<_, DeoxysStorageError>(())
},
)?;
self.db.write_opt(batch, &writeopts)?;
Ok::<_, DeoxysStorageError>(())
},
)?;

Ok(())
}
Expand All @@ -178,28 +166,19 @@ impl DeoxysBackend {
pub(crate) fn class_db_store_block(
&self,
block_number: u64,
class_infos: &[(Felt, ClassInfo)],
class_compiled: &[(Felt, CompiledClass)],
converted_classes: &[ConvertedClass],
) -> Result<(), DeoxysStorageError> {
self.store_classes(
DbBlockId::BlockN(block_number),
class_infos,
class_compiled,
Column::ClassInfo,
Column::ClassCompiled,
)
self.store_classes(DbBlockId::BlockN(block_number), converted_classes, Column::ClassInfo, Column::ClassCompiled)
}

/// NB: This functions needs to run on the rayon thread pool
pub(crate) fn class_db_store_pending(
&self,
class_infos: &[(Felt, ClassInfo)],
class_compiled: &[(Felt, CompiledClass)],
converted_classes: &[ConvertedClass],
) -> Result<(), DeoxysStorageError> {
self.store_classes(
DbBlockId::Pending,
class_infos,
class_compiled,
converted_classes,
Column::PendingClassInfo,
Column::PendingClassCompiled,
)
Expand Down
20 changes: 3 additions & 17 deletions crates/client/db/src/storage_updates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,9 @@ use dp_class::ConvertedClass;
use dp_state_update::{
ContractStorageDiffItem, DeployedContractItem, NonceUpdate, ReplacedClassItem, StateDiff, StorageEntry,
};
use starknet_core::types::ContractClass;
use starknet_types_core::felt::Felt;
use std::collections::HashMap;

#[derive(Clone, Debug)]
pub struct DbClassUpdate {
pub class_hash: Felt,
pub contract_class: ContractClass,
pub compiled_class_hash: Felt,
}

impl DeoxysBackend {
/// NB: This functions needs to run on the rayon thread pool
pub fn store_block(
Expand Down Expand Up @@ -76,15 +68,9 @@ impl DeoxysBackend {
}
};

let task_class_db = || {
let (class_info_updates, compiled_class_updates): (Vec<_>, Vec<_>) = converted_classes
.into_iter()
.map(|ConvertedClass { class_infos, class_compiled }| (class_infos, class_compiled))
.unzip();
match block_n {
None => self.class_db_store_pending(&class_info_updates, &compiled_class_updates),
Some(block_n) => self.class_db_store_block(block_n, &class_info_updates, &compiled_class_updates),
}
let task_class_db = || match block_n {
None => self.class_db_store_pending(&converted_classes),
Some(block_n) => self.class_db_store_block(block_n, &converted_classes),
};

let ((r1, r2), r3) = rayon::join(|| rayon::join(task_block_db, task_contract_db), task_class_db);
Expand Down
Loading

0 comments on commit ff2c85a

Please sign in to comment.