From 237f31c51891e1dfa624e9a41d8802aa804ae316 Mon Sep 17 00:00:00 2001 From: cchudant Date: Tue, 17 Dec 2024 13:26:44 +0100 Subject: [PATCH] fix(block_production): continue pending block now reexecutes the previous transactions (#411) Co-authored-by: antiyro <74653697+antiyro@users.noreply.github.com> --- CHANGELOG.md | 1 + configs/chain_config.example.yaml | 5 +- configs/presets/devnet.yaml | 2 +- configs/presets/integration.yaml | 2 +- configs/presets/mainnet.yaml | 2 +- configs/presets/sepolia.yaml | 2 +- crates/client/analytics/src/lib.rs | 2 - .../src/tests/block_import_utils.rs | 10 +- crates/client/block_import/src/types.rs | 4 +- .../src/finalize_execution_state.rs | 40 +-- crates/client/block_production/src/lib.rs | 124 ++++----- .../src/re_add_finalized_to_blockifier.rs | 82 ++++++ crates/client/db/src/block_db.rs | 2 +- crates/client/db/src/class_db.rs | 42 ++- crates/client/db/src/error.rs | 6 + crates/client/devnet/src/lib.rs | 18 +- crates/client/exec/src/block_context.rs | 2 +- .../exec/src/blockifier_state_adapter.rs | 33 +-- crates/client/exec/src/execution.rs | 6 +- crates/client/exec/src/lib.rs | 6 +- crates/client/exec/src/transaction.rs | 69 ++--- crates/client/mempool/src/header.rs | 8 +- crates/client/mempool/src/inner/limits.rs | 24 +- crates/client/mempool/src/inner/mod.rs | 28 +- crates/client/mempool/src/inner/proptest.rs | 2 +- crates/client/mempool/src/lib.rs | 52 ++-- crates/client/rpc/src/test_utils.rs | 4 +- crates/client/rpc/src/utils/mod.rs | 3 - crates/client/rpc/src/utils/transaction.rs | 90 ------- .../methods/read/get_block_with_receipts.rs | 11 +- .../methods/read/get_block_with_tx_hashes.rs | 4 +- .../v0_7_1/methods/read/get_block_with_txs.rs | 4 +- .../methods/trace/trace_block_transactions.rs | 3 +- .../v0_7_1/methods/trace/trace_transaction.rs | 3 +- crates/client/sync/src/fetch/fetchers.rs | 2 +- crates/client/sync/src/l2.rs | 2 +- crates/node/src/cli/chain_config_overrides.rs | 9 +- crates/primitives/block/src/header.rs | 47 +++- crates/primitives/block/src/lib.rs | 21 +- .../chain_config/src/chain_config.rs | 8 +- crates/primitives/class/src/compile.rs | 27 +- crates/primitives/class/src/lib.rs | 23 +- crates/primitives/gateway/src/block.rs | 10 +- crates/primitives/transactions/src/lib.rs | 43 +++- ...sted_to_blockifier.rs => to_blockifier.rs} | 95 +++++-- crates/primitives/utils/src/serde.rs | 21 ++ crates/tests/src/devnet.rs | 239 ++++++++++++++++++ crates/tests/src/lib.rs | 207 +-------------- 48 files changed, 802 insertions(+), 648 deletions(-) create mode 100644 crates/client/block_production/src/re_add_finalized_to_blockifier.rs delete mode 100644 crates/client/rpc/src/utils/transaction.rs rename crates/primitives/transactions/src/{broadcasted_to_blockifier.rs => to_blockifier.rs} (64%) create mode 100644 crates/tests/src/devnet.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d830c8b4..a4fcaf2a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## Next release +- fix(block_production): continue pending block now reexecutes the previous transactions - feat(services): reworked Madara services for better cancellation control - feat: fetch eth/strk price and sync strk gas price - feat(block_production): continue pending block on restart diff --git a/configs/chain_config.example.yaml b/configs/chain_config.example.yaml index 5e4d35143..fd8d7df2f 100644 --- a/configs/chain_config.example.yaml +++ b/configs/chain_config.example.yaml @@ -74,5 +74,6 @@ sequencer_address: "0x0" mempool_tx_limit: 10000 # Transaction limit in the mempool, additional limit for declare transactions. mempool_declare_tx_limit: 20 -# Max age of a transaction in the mempool. -mempool_tx_max_age: "5h" +# Max age of a transaction in the mempool. Null for no age limit. +# mempool_tx_max_age: "5h" +mempool_tx_max_age: null diff --git a/configs/presets/devnet.yaml b/configs/presets/devnet.yaml index 00a580175..4e562cbc9 100644 --- a/configs/presets/devnet.yaml +++ b/configs/presets/devnet.yaml @@ -31,4 +31,4 @@ eth_core_contract_address: "0xe7f1725e7734ce288f8367e1bb143e90bb3f0512" eth_gps_statement_verifier: "0xf294781D719D2F4169cE54469C28908E6FA752C1" mempool_tx_limit: 10000 mempool_declare_tx_limit: 20 -mempool_tx_max_age: "5h" +mempool_tx_max_age: null diff --git a/configs/presets/integration.yaml b/configs/presets/integration.yaml index a1ecbdc42..4688a9bdc 100644 --- a/configs/presets/integration.yaml +++ b/configs/presets/integration.yaml @@ -31,4 +31,4 @@ eth_core_contract_address: "0x4737c0c1B4D5b1A687B42610DdabEE781152359c" eth_gps_statement_verifier: "0x2046B966994Adcb88D83f467a41b75d64C2a619F" mempool_tx_limit: 10000 mempool_declare_tx_limit: 20 -mempool_tx_max_age: "5h" +mempool_tx_max_age: null diff --git a/configs/presets/mainnet.yaml b/configs/presets/mainnet.yaml index 1d0323598..6d6d387e7 100644 --- a/configs/presets/mainnet.yaml +++ b/configs/presets/mainnet.yaml @@ -31,4 +31,4 @@ eth_core_contract_address: "0xc662c410C0ECf747543f5bA90660f6ABeBD9C8c4" eth_gps_statement_verifier: "0x47312450B3Ac8b5b8e247a6bB6d523e7605bDb60" mempool_tx_limit: 10000 mempool_declare_tx_limit: 20 -mempool_tx_max_age: "5h" +mempool_tx_max_age: null diff --git a/configs/presets/sepolia.yaml b/configs/presets/sepolia.yaml index da1b25f0b..7c61fd632 100644 --- a/configs/presets/sepolia.yaml +++ b/configs/presets/sepolia.yaml @@ -31,4 +31,4 @@ eth_core_contract_address: "0xE2Bb56ee936fd6433DC0F6e7e3b8365C906AA057" eth_gps_statement_verifier: "0xf294781D719D2F4169cE54469C28908E6FA752C1" mempool_tx_limit: 10000 mempool_declare_tx_limit: 20 -mempool_tx_max_age: "5h" +mempool_tx_max_age: null diff --git a/crates/client/analytics/src/lib.rs b/crates/client/analytics/src/lib.rs index 45e570d9a..b75a90b68 100644 --- a/crates/client/analytics/src/lib.rs +++ b/crates/client/analytics/src/lib.rs @@ -52,8 +52,6 @@ impl Analytics { let layer = OpenTelemetryTracingBridge::new(&logger_provider); tracing_subscriber.with(OpenTelemetryLayer::new(tracer)).with(layer).init(); - - tracing::info!("OTEL initialized"); Ok(()) } diff --git a/crates/client/block_import/src/tests/block_import_utils.rs b/crates/client/block_import/src/tests/block_import_utils.rs index 9de8d8e3b..5d5d5ec76 100644 --- a/crates/client/block_import/src/tests/block_import_utils.rs +++ b/crates/client/block_import/src/tests/block_import_utils.rs @@ -1,4 +1,4 @@ -use mp_block::header::{GasPrices, L1DataAvailabilityMode}; +use mp_block::header::{BlockTimestamp, GasPrices, L1DataAvailabilityMode}; use mp_block::Header; use mp_chain_config::StarknetVersion; use mp_state_update::StateDiff; @@ -18,7 +18,7 @@ pub fn create_dummy_unverified_header() -> UnverifiedHeader { UnverifiedHeader { parent_block_hash: Some(felt!("0x1")), sequencer_address: felt!("0x2"), - block_timestamp: 12345, + block_timestamp: BlockTimestamp(12345), protocol_version: StarknetVersion::new(0, 13, 2, 0), l1_gas_price: GasPrices { eth_l1_gas_price: 14, @@ -77,7 +77,7 @@ pub fn create_dummy_header() -> Header { state_diff_length: Some(0), state_diff_commitment: Some(felt!("0x0")), receipt_commitment: Some(felt!("0x0")), - block_timestamp: 12345, + block_timestamp: BlockTimestamp(12345), protocol_version: StarknetVersion::new(0, 13, 2, 0), l1_gas_price: GasPrices { eth_l1_gas_price: 14, @@ -117,7 +117,7 @@ pub fn create_dummy_unverified_full_block() -> UnverifiedFullBlock { header: UnverifiedHeader { parent_block_hash: Some(Felt::ZERO), sequencer_address: Felt::ZERO, - block_timestamp: 0, + block_timestamp: BlockTimestamp(0), protocol_version: StarknetVersion::default(), l1_gas_price: GasPrices::default(), l1_da_mode: L1DataAvailabilityMode::Blob, @@ -139,7 +139,7 @@ pub fn create_dummy_pending_block() -> PreValidatedPendingBlock { header: UnverifiedHeader { parent_block_hash: Some(felt!("0x1")), sequencer_address: felt!("0x2"), - block_timestamp: 12345, + block_timestamp: BlockTimestamp(12345), protocol_version: StarknetVersion::new(0, 13, 2, 0), l1_gas_price: GasPrices { eth_l1_gas_price: 14, diff --git a/crates/client/block_import/src/types.rs b/crates/client/block_import/src/types.rs index 462169220..5808e502c 100644 --- a/crates/client/block_import/src/types.rs +++ b/crates/client/block_import/src/types.rs @@ -2,7 +2,7 @@ //! Step 2. verify_apply: [`PreValidatedBlock`] ====[`crate::verify_apply`]===> [`BlockImportResult`] use mp_block::{ - header::{GasPrices, L1DataAvailabilityMode}, + header::{BlockTimestamp, GasPrices, L1DataAvailabilityMode}, Header, VisitedSegments, }; use mp_chain_config::StarknetVersion; @@ -24,7 +24,7 @@ pub struct UnverifiedHeader { /// The Starknet address of the sequencer that created this block. pub sequencer_address: Felt, /// The time the sequencer created this block before executing transactions - pub block_timestamp: u64, + pub block_timestamp: BlockTimestamp, /// The version of the Starknet protocol used when creating this block pub protocol_version: StarknetVersion, /// Gas prices for this block diff --git a/crates/client/block_production/src/finalize_execution_state.rs b/crates/client/block_production/src/finalize_execution_state.rs index 601933e30..50a0b2935 100644 --- a/crates/client/block_production/src/finalize_execution_state.rs +++ b/crates/client/block_production/src/finalize_execution_state.rs @@ -12,51 +12,13 @@ use mp_state_update::{ ContractStorageDiffItem, DeclaredClassItem, DeployedContractItem, NonceUpdate, ReplacedClassItem, StateDiff, StorageEntry, }; -use starknet_api::core::{ClassHash, CompiledClassHash, ContractAddress, Nonce}; +use starknet_api::core::ContractAddress; use std::collections::{hash_map, HashMap}; #[derive(Debug, thiserror::Error)] #[error("Error converting state diff to state map")] pub struct StateDiffToStateMapError; -pub fn state_diff_to_state_map(diff: StateDiff) -> Result { - let nonces = diff - .nonces - .into_iter() - .map(|entry| Ok((entry.contract_address.try_into().map_err(|_| StateDiffToStateMapError)?, Nonce(entry.nonce)))) - .collect::>()?; - let class_hashes = diff - .deployed_contracts - .into_iter() - .map(|entry| Ok((entry.address.try_into().map_err(|_| StateDiffToStateMapError)?, ClassHash(entry.class_hash)))) - .chain(diff.replaced_classes.into_iter().map(|entry| { - Ok((entry.contract_address.try_into().map_err(|_| StateDiffToStateMapError)?, ClassHash(entry.class_hash))) - })) - .collect::>()?; - let storage = diff - .storage_diffs - .into_iter() - .flat_map(|d| { - d.storage_entries.into_iter().map(move |e| { - Ok(( - ( - d.address.try_into().map_err(|_| StateDiffToStateMapError)?, - e.key.try_into().map_err(|_| StateDiffToStateMapError)?, - ), - e.value, - )) - }) - }) - .collect::>()?; - let declared_contracts = diff.declared_classes.iter().map(|d| (ClassHash(d.class_hash), true)).collect(); - let compiled_class_hashes = diff - .declared_classes - .into_iter() - .map(|d| (ClassHash(d.class_hash), CompiledClassHash(d.compiled_class_hash))) - .collect(); - Ok(StateMaps { nonces, class_hashes, storage, declared_contracts, compiled_class_hashes }) -} - pub(crate) fn state_map_to_state_diff( backend: &MadaraBackend, on_top_of: &Option, diff --git a/crates/client/block_production/src/lib.rs b/crates/client/block_production/src/lib.rs index 623d9117d..688b17826 100644 --- a/crates/client/block_production/src/lib.rs +++ b/crates/client/block_production/src/lib.rs @@ -19,25 +19,23 @@ use crate::close_block::close_block; use crate::metrics::BlockProductionMetrics; use blockifier::blockifier::transaction_executor::{TransactionExecutor, BLOCK_STATE_ACCESS_ERR}; use blockifier::bouncer::{BouncerWeights, BuiltinCount}; -use blockifier::state::state_api::UpdatableState; use blockifier::transaction::errors::TransactionExecutionError; -use finalize_execution_state::{state_diff_to_state_map, StateDiffToStateMapError}; +use finalize_execution_state::StateDiffToStateMapError; use mc_block_import::{BlockImportError, BlockImporter}; use mc_db::db_block_id::DbBlockId; use mc_db::{MadaraBackend, MadaraStorageError}; use mc_exec::{BlockifierStateAdapter, ExecutionContext}; use mc_mempool::header::make_pending_header; use mc_mempool::{L1DataProvider, MempoolProvider}; -use mp_block::{BlockId, BlockTag, MadaraMaybePendingBlockInfo, MadaraPendingBlock, VisitedSegments}; +use mp_block::{BlockId, BlockTag, MadaraPendingBlock, VisitedSegments}; use mp_class::compile::ClassCompilationError; -use mp_class::{ConvertedClass, LegacyConvertedClass, SierraConvertedClass}; +use mp_class::ConvertedClass; use mp_convert::ToFelt; use mp_receipt::from_blockifier_execution_info; use mp_state_update::{ContractStorageDiffItem, StateDiff, StorageEntry}; use mp_transactions::TransactionWithHash; use mp_utils::service::ServiceContext; use opentelemetry::KeyValue; -use starknet_api::core::ClassHash; use starknet_types_core::felt::Felt; use std::borrow::Cow; use std::collections::VecDeque; @@ -48,6 +46,7 @@ use std::time::Instant; mod close_block; mod finalize_execution_state; pub mod metrics; +mod re_add_finalized_to_blockifier; #[derive(Default, Clone)] struct ContinueBlockStats { @@ -103,6 +102,29 @@ impl BlockProductionTask { self.current_pending_tick = n; } + /// Continue the pending block state by re-adding all of its transactions back into the mempool. + /// This function will always clear the pending block in db, even if the transactions could not be added to the mempool. + pub fn re_add_pending_block_txs_to_mempool( + backend: &MadaraBackend, + mempool: &Mempool, + ) -> Result<(), Cow<'static, str>> { + let Some(current_pending_block) = + backend.get_block(&DbBlockId::Pending).map_err(|err| format!("Getting pending block: {err:#}"))? + else { + // No pending block + return Ok(()); + }; + backend.clear_pending_block().map_err(|err| format!("Clearing pending block: {err:#}"))?; + + let n_txs = re_add_finalized_to_blockifier::re_add_txs_to_mempool(current_pending_block, mempool, backend) + .map_err(|err| format!("Re-adding transactions to mempool: {err:#}"))?; + + if n_txs > 0 { + tracing::info!("šŸ” Re-added {n_txs} transactions from the pending block back into the mempool"); + } + Ok(()) + } + pub fn new( backend: Arc, importer: Arc, @@ -110,78 +132,24 @@ impl BlockProductionTask { metrics: Arc, l1_data_provider: Arc, ) -> Result { - let (pending_block, state_diff, pcs) = match backend.get_block(&DbBlockId::Pending)? { - Some(pending) => { - let MadaraMaybePendingBlockInfo::Pending(info) = pending.info else { - return Err(Error::Unexpected("Get a pending block".into())); - }; - let pending_state_update = backend.get_pending_block_state_update()?; - (MadaraPendingBlock { info, inner: pending.inner }, pending_state_update, Default::default()) - } - None => { - let parent_block_hash = backend - .get_block_hash(&BlockId::Tag(BlockTag::Latest))? - .unwrap_or(/* genesis block's parent hash */ Felt::ZERO); - - ( - MadaraPendingBlock::new_empty(make_pending_header( - parent_block_hash, - backend.chain_config(), - l1_data_provider.as_ref(), - )), - StateDiff::default(), - Default::default(), - ) - } - }; + if let Err(err) = Self::re_add_pending_block_txs_to_mempool(&backend, &mempool) { + // This error should not stop block production from working. If it happens, that's too bad. We drop the pending state and start from + // a fresh one. + tracing::error!("Failed to continue the pending block state: {err:#}"); + } - let declared_classes: Vec = state_diff - .declared_classes - .iter() - .map(|item| { - let class_info = backend.get_class_info(&DbBlockId::Pending, &item.class_hash)?.ok_or_else(|| { - Error::Unexpected(format!("No class info for declared class {:#x}", item.class_hash).into()) - })?; - let converted_class = match class_info { - mp_class::ClassInfo::Sierra(info) => { - let compiled = - backend.get_sierra_compiled(&DbBlockId::Pending, &item.class_hash)?.ok_or_else(|| { - Error::Unexpected( - format!("No compiled class for declared class {:#x}", item.class_hash).into(), - ) - })?; - let compiled = Arc::new(compiled); - ConvertedClass::Sierra(SierraConvertedClass { class_hash: item.class_hash, info, compiled }) - } - mp_class::ClassInfo::Legacy(info) => { - ConvertedClass::Legacy(LegacyConvertedClass { class_hash: item.class_hash, info }) - } - }; - - Ok(converted_class) - }) - .collect::>()?; - - let class_hash_to_class = declared_classes - .iter() - .map(|c| { - Ok(( - ClassHash(c.class_hash()), - match c { - ConvertedClass::Legacy(class) => class.info.contract_class.to_blockifier_class()?, - ConvertedClass::Sierra(class) => class.compiled.to_blockifier_class()?, - }, - )) - }) - .collect::>()?; - - let mut executor = - ExecutionContext::new_in_block(Arc::clone(&backend), &pending_block.info.clone().into())?.tx_executor(); - let block_state = - executor.block_state.as_mut().expect("Block state can not be None unless we take ownership of it"); + let parent_block_hash = backend + .get_block_hash(&BlockId::Tag(BlockTag::Latest))? + .unwrap_or(/* genesis block's parent hash */ Felt::ZERO); - // Apply pending state - block_state.apply_writes(&state_diff_to_state_map(state_diff)?, &class_hash_to_class, &pcs); + let pending_block = MadaraPendingBlock::new_empty(make_pending_header( + parent_block_hash, + backend.chain_config(), + l1_data_provider.as_ref(), + )); + + let executor = + ExecutionContext::new_in_block(Arc::clone(&backend), &pending_block.info.clone().into())?.tx_executor(); Ok(Self { importer, @@ -190,7 +158,7 @@ impl BlockProductionTask { executor, current_pending_tick: 0, block: pending_block, - declared_classes, + declared_classes: Default::default(), l1_data_provider, metrics, }) @@ -294,7 +262,9 @@ impl BlockProductionTask { // Add back the unexecuted transactions to the mempool. stats.n_re_added_to_mempool = txs_to_process.len(); - self.mempool.re_add_txs(txs_to_process, executed_txs); + self.mempool + .re_add_txs(txs_to_process, executed_txs) + .map_err(|err| Error::Unexpected(format!("Mempool error: {err:#}").into()))?; tracing::debug!( "Finished tick with {} new transactions, now at {} - re-adding {} txs to mempool", diff --git a/crates/client/block_production/src/re_add_finalized_to_blockifier.rs b/crates/client/block_production/src/re_add_finalized_to_blockifier.rs new file mode 100644 index 000000000..1895bf27d --- /dev/null +++ b/crates/client/block_production/src/re_add_finalized_to_blockifier.rs @@ -0,0 +1,82 @@ +use std::time::{Duration, SystemTime}; + +use mc_db::{MadaraBackend, MadaraStorageError}; +use mc_mempool::{MempoolProvider, MempoolTransaction}; +use mp_block::{header::BlockTimestamp, BlockId, BlockTag, MadaraMaybePendingBlock}; +use mp_transactions::{ToBlockifierError, TransactionWithHash}; +use starknet_core::types::Felt; + +#[derive(Debug, thiserror::Error)] +pub enum ReAddTxsToMempoolError { + #[error( + "Converting transaction with hash {tx_hash:#x}: Error when getting class with hash {class_hash:#x}: {err:#}" + )] + GettingConvertedClass { tx_hash: Felt, class_hash: Felt, err: MadaraStorageError }, + #[error("Converting transaction with hash {tx_hash:#x}: No class found for class with hash {class_hash:#x}")] + NoClassFound { tx_hash: Felt, class_hash: Felt }, + + #[error("Converting transaction with hash {tx_hash:#x}: Blockifier conversion error: {err:#}")] + ToBlockifierError { tx_hash: Felt, err: ToBlockifierError }, + + /// This error should never happen unless we are running on a platform where SystemTime cannot represent the timestamp we are making. + #[error("Converting transaction with hash {tx_hash:#x}: Could not create arrived_at timestamp with block_timestamp={block_timestamp} and tx_index={tx_index}")] + MakingArrivedAtTimestamp { tx_hash: Felt, block_timestamp: BlockTimestamp, tx_index: usize }, +} + +/// Take a block that was already executed and saved, extract the transactions and re-add them to the mempool. +/// This is useful to re-execute a pending block without losing any transaction when restarting block production, +/// but it it could also be useful to avoid dropping transactions when a reorg happens in the future. +/// Returns the number of transactions. +pub fn re_add_txs_to_mempool( + block: MadaraMaybePendingBlock, + mempool: &impl MempoolProvider, + backend: &MadaraBackend, +) -> Result { + let block_timestamp = block.info.block_timestamp(); + + let txs_to_reexec: Vec<_> = block + .inner + .transactions + .into_iter() + .zip(block.info.tx_hashes()) + .enumerate() + .map(|(tx_index, (tx, &tx_hash))| { + let converted_class = if let Some(tx) = tx.as_declare() { + let class_hash = *tx.class_hash(); + Some( + backend + .get_converted_class(&BlockId::Tag(BlockTag::Pending), &class_hash) + .map_err(|err| ReAddTxsToMempoolError::GettingConvertedClass { tx_hash, class_hash, err })? + .ok_or_else(|| ReAddTxsToMempoolError::NoClassFound { tx_hash, class_hash })?, + ) + } else { + None + }; + + let tx = TransactionWithHash::new(tx, tx_hash) + .into_blockifier(converted_class.as_ref()) + .map_err(|err| ReAddTxsToMempoolError::ToBlockifierError { tx_hash, err })?; + + // HACK: we hack the order a little bit - this is because we don't have the arrived_at timestamp for the + // transaction. This hack ensures these trasactions should have priority and should be reexecuted + fn make_arrived_at(block_timestamp: BlockTimestamp, tx_index: usize) -> Option { + let duration = + Duration::from_secs(block_timestamp.0).checked_add(Duration::from_micros(tx_index as _))?; + SystemTime::UNIX_EPOCH.checked_add(duration) + } + + let arrived_at = make_arrived_at(block_timestamp, tx_index).ok_or_else(|| { + ReAddTxsToMempoolError::MakingArrivedAtTimestamp { tx_hash, block_timestamp, tx_index } + })?; + Ok(MempoolTransaction { tx, arrived_at, converted_class }) + }) + .collect::>()?; + + let n = txs_to_reexec.len(); + + mempool + .insert_txs_no_validation(txs_to_reexec, /* force insertion */ true) + .expect("Mempool force insertion should never fail"); + + Ok(n) +} diff --git a/crates/client/db/src/block_db.rs b/crates/client/db/src/block_db.rs index 827634be1..382d73581 100644 --- a/crates/client/db/src/block_db.rs +++ b/crates/client/db/src/block_db.rs @@ -142,7 +142,7 @@ impl MadaraBackend { // genesis has not been loaded yet will return an error. That probably fine because the ERC20 fee contracts are not even deployed yet - it // will error somewhere else anyway. sequencer_address: **self.chain_config().sequencer_address, - block_timestamp: 0, // Junk timestamp: unix epoch + block_timestamp: Default::default(), // Junk timestamp: unix epoch protocol_version: self.chain_config.latest_protocol_version, l1_gas_price: GasPrices { eth_l1_gas_price: 1, diff --git a/crates/client/db/src/class_db.rs b/crates/client/db/src/class_db.rs index 376c36318..f3a3914ed 100644 --- a/crates/client/db/src/class_db.rs +++ b/crates/client/db/src/class_db.rs @@ -1,4 +1,6 @@ -use mp_class::{ClassInfo, CompiledSierra, ConvertedClass}; +use std::sync::Arc; + +use mp_class::{ClassInfo, CompiledSierra, ConvertedClass, LegacyConvertedClass, SierraConvertedClass}; use rayon::{iter::ParallelIterator, slice::ParallelSlice}; use rocksdb::WriteOptions; use starknet_types_core::felt::Felt; @@ -110,6 +112,44 @@ impl MadaraBackend { Ok(Some(compiled)) } + /// Get class info + sierra compiled when it's a sierra class. + // Note/TODO: "ConvertedClass" is the name of the type that has info + sierra compiled, and it is used for blockifier + // convertion & storage. We should rename it, as this feels like undecipherable madara-specific jargon at this point. + #[tracing::instrument(skip(self, id), fields(module = "ClassDB"))] + pub fn get_converted_class( + &self, + id: &impl DbBlockIdResolvable, + class_hash: &Felt, + ) -> Result, MadaraStorageError> { + let Some(id) = id.resolve_db_block_id(self)? else { + // Block not found + return Ok(None); + }; + + let Some(class_info) = self.get_class_info(&id, class_hash)? else { + // No class found. + return Ok(None); + }; + + match class_info { + ClassInfo::Sierra(info) => { + let compiled_class_hash = info.compiled_class_hash; + let compiled_class = self + .get_sierra_compiled(&id, &info.compiled_class_hash)? + .ok_or(MadaraStorageError::MissingCompiledClass { class_hash: *class_hash, compiled_class_hash })?; + Ok(Some(ConvertedClass::Sierra(SierraConvertedClass { + class_hash: *class_hash, + info, + // TODO(perf): we should do global memoization for these Arcs. + compiled: Arc::new(compiled_class), + }))) + } + ClassInfo::Legacy(info) => { + Ok(Some(ConvertedClass::Legacy(LegacyConvertedClass { class_hash: *class_hash, info }))) + } + } + } + /// NB: This functions needs to run on the rayon thread pool #[tracing::instrument(skip(self, converted_classes, col_info, col_compiled), fields(module = "ClassDB"))] pub(crate) fn store_classes( diff --git a/crates/client/db/src/error.rs b/crates/client/db/src/error.rs index 44c59931a..c0fcbaeef 100644 --- a/crates/client/db/src/error.rs +++ b/crates/client/db/src/error.rs @@ -1,3 +1,5 @@ +use starknet_types_core::felt::Felt; + use crate::Column; use std::borrow::Cow; @@ -21,6 +23,10 @@ pub enum MadaraStorageError { InconsistentStorage(Cow<'static, str>), #[error("Cannot create a pending block of the genesis block of a chain")] PendingCreationNoGenesis, + #[error( + "Missing compiled class for class with hash {class_hash:#x} (compiled_class_hash={compiled_class_hash:#x}" + )] + MissingCompiledClass { class_hash: Felt, compiled_class_hash: Felt }, } pub type BonsaiStorageError = bonsai_trie::BonsaiStorageError; diff --git a/crates/client/devnet/src/lib.rs b/crates/client/devnet/src/lib.rs index 7f0dae113..0f71a655b 100644 --- a/crates/client/devnet/src/lib.rs +++ b/crates/client/devnet/src/lib.rs @@ -1,7 +1,7 @@ use anyhow::Context; use blockifier::abi::abi_utils::get_storage_var_address; use mc_block_import::{UnverifiedFullBlock, UnverifiedHeader}; -use mp_block::header::GasPrices; +use mp_block::header::{BlockTimestamp, GasPrices}; use mp_chain_config::ChainConfig; use mp_convert::ToFelt; use mp_state_update::{ContractStorageDiffItem, StateDiff, StorageEntry}; @@ -11,7 +11,7 @@ use starknet_types_core::{ felt::Felt, hash::{Poseidon, StarkHash}, }; -use std::{collections::HashMap, time::SystemTime}; +use std::collections::HashMap; mod balances; mod classes; @@ -154,10 +154,7 @@ impl ChainGenesisDescription { header: UnverifiedHeader { parent_block_hash: Some(Felt::ZERO), sequencer_address: chain_config.sequencer_address.to_felt(), - block_timestamp: SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .expect("Current time is before unix epoch!") - .as_secs(), + block_timestamp: BlockTimestamp::now(), protocol_version: chain_config.latest_protocol_version, l1_gas_price: GasPrices { eth_l1_gas_price: 5, @@ -641,7 +638,7 @@ mod tests { #[rstest] fn test_mempool_tx_limit() { let chain = chain_with_mempool_limits(MempoolLimits { - max_age: Duration::from_millis(1000000), + max_age: None, max_declare_transactions: 2, max_transactions: 5, }); @@ -717,8 +714,11 @@ mod tests { #[rstest] fn test_mempool_age_limit() { let max_age = Duration::from_millis(1000); - let mut chain = - chain_with_mempool_limits(MempoolLimits { max_age, max_declare_transactions: 2, max_transactions: 5 }); + let mut chain = chain_with_mempool_limits(MempoolLimits { + max_age: Some(max_age), + max_declare_transactions: 2, + max_transactions: 5, + }); tracing::info!("{}", chain.contracts); let contract_0 = &chain.contracts.0[0]; diff --git a/crates/client/exec/src/block_context.rs b/crates/client/exec/src/block_context.rs index 7cf7db67c..8bcb4acfd 100644 --- a/crates/client/exec/src/block_context.rs +++ b/crates/client/exec/src/block_context.rs @@ -91,7 +91,7 @@ impl ExecutionContext { }; let block_info = blockifier::blockifier::block::BlockInfo { block_number: BlockNumber(block_number), - block_timestamp: BlockTimestamp(block_timestamp), + block_timestamp: BlockTimestamp(block_timestamp.0), sequencer_address: sequencer_address .try_into() .map_err(|_| Error::InvalidSequencerAddress(sequencer_address))?, diff --git a/crates/client/exec/src/blockifier_state_adapter.rs b/crates/client/exec/src/blockifier_state_adapter.rs index 3cc3a6d72..31216e8ee 100644 --- a/crates/client/exec/src/blockifier_state_adapter.rs +++ b/crates/client/exec/src/blockifier_state_adapter.rs @@ -111,8 +111,8 @@ impl StateReader for BlockifierStateAdapter { return Err(StateError::UndeclaredClassHash(class_hash)); }; - let Some(class_info) = - self.backend.get_class_info(&on_top_of_block_id, &class_hash.to_felt()).map_err(|err| { + let Some(converted_class) = + self.backend.get_converted_class(&on_top_of_block_id, &class_hash.to_felt()).map_err(|err| { tracing::warn!("Failed to retrieve class {class_hash:#}: {err:#}"); StateError::StateReadError(format!("Failed to retrieve class {class_hash:#}")) })? @@ -120,31 +120,10 @@ impl StateReader for BlockifierStateAdapter { return Err(StateError::UndeclaredClassHash(class_hash)); }; - match class_info { - ClassInfo::Sierra(info) => { - let compiled_class = self - .backend - .get_sierra_compiled(&on_top_of_block_id, &info.compiled_class_hash) - .map_err(|err| { - tracing::warn!("Failed to retrieve sierra compiled class {:#x}: {err:#}", class_hash.to_felt()); - StateError::StateReadError(format!( - "Failed to retrieve compiled class {:#x}", - class_hash.to_felt() - )) - })? - .ok_or(StateError::StateReadError(format!( - "Inconsistent state: compiled sierra class {:#x} not found", - class_hash.to_felt() - )))?; - - // TODO: convert ClassCompilationError to StateError - Ok(compiled_class.to_blockifier_class().map_err(|e| StateError::StateReadError(e.to_string()))?) - } - ClassInfo::Legacy(info) => { - // TODO: convert ClassCompilationError to StateError - Ok(info.contract_class.to_blockifier_class().map_err(|e| StateError::StateReadError(e.to_string()))?) - } - } + converted_class.to_blockifier_class().map_err(|err| { + tracing::warn!("Failed to convert class {class_hash:#} to blockifier format: {err:#}"); + StateError::StateReadError(format!("Failed to convert class {class_hash:#}")) + }) } fn get_compiled_class_hash(&self, class_hash: ClassHash) -> StateResult { diff --git a/crates/client/exec/src/execution.rs b/crates/client/exec/src/execution.rs index cc136ec37..81ed634f5 100644 --- a/crates/client/exec/src/execution.rs +++ b/crates/client/exec/src/execution.rs @@ -9,7 +9,7 @@ use blockifier::transaction::transaction_types::TransactionType; use blockifier::transaction::transactions::{ExecutableTransaction, ExecutionFlags}; use starknet_api::transaction::TransactionHash; -use crate::{Error, ExecutionContext, ExecutionResult, TxFeeEstimationError, TxReexecError}; +use crate::{Error, ExecutionContext, ExecutionResult, TxExecError, TxFeeEstimationError}; impl ExecutionContext { /// Execute transactions. The returned `ExecutionResult`s are the results of the `transactions_to_trace`. The results of `transactions_before` are discarded. @@ -27,7 +27,7 @@ impl ExecutionContext { for (index, tx) in transactions_before.into_iter().enumerate() { let hash = tx.tx_hash(); tracing::debug!("executing {hash:#}"); - tx.execute(&mut cached_state, &self.block_context, charge_fee, validate).map_err(|err| TxReexecError { + tx.execute(&mut cached_state, &self.block_context, charge_fee, validate).map_err(|err| TxExecError { block_n: self.db_id, hash, index, @@ -56,7 +56,7 @@ impl ExecutionContext { }; let make_reexec_error = - |err| TxReexecError { block_n: self.db_id, hash, index: executed_prev + index, err }; + |err| TxExecError { block_n: self.db_id, hash, index: executed_prev + index, err }; let mut transactional_state = TransactionalState::create_transactional(&mut cached_state); let execution_flags = ExecutionFlags { charge_fee, validate, concurrency_mode: false }; diff --git a/crates/client/exec/src/lib.rs b/crates/client/exec/src/lib.rs index 05b4ba805..5506b7eca 100644 --- a/crates/client/exec/src/lib.rs +++ b/crates/client/exec/src/lib.rs @@ -27,7 +27,7 @@ pub enum Error { #[error(transparent)] UnsupportedProtocolVersion(#[from] mp_chain_config::UnsupportedProtocolVersion), #[error(transparent)] - Reexecution(#[from] TxReexecError), + Reexecution(#[from] TxExecError), #[error(transparent)] FeeEstimation(#[from] TxFeeEstimationError), #[error(transparent)] @@ -41,8 +41,8 @@ pub enum Error { } #[derive(thiserror::Error, Debug)] -#[error("Reexecuting tx {hash:#} (index {index}) on top of {block_n}: {err:#}")] -pub struct TxReexecError { +#[error("Executing tx {hash:#} (index {index}) on top of {block_n}: {err:#}")] +pub struct TxExecError { block_n: DbBlockId, hash: TransactionHash, index: usize, diff --git a/crates/client/exec/src/transaction.rs b/crates/client/exec/src/transaction.rs index 0f3522060..57b294bb1 100644 --- a/crates/client/exec/src/transaction.rs +++ b/crates/client/exec/src/transaction.rs @@ -1,12 +1,12 @@ -use std::{borrow::Cow, sync::Arc}; - -use blockifier::execution::{contract_class::ClassInfo, errors::ContractClassError}; +use blockifier::execution::errors::ContractClassError; use blockifier::transaction::transaction_execution as btx; use mc_db::{MadaraBackend, MadaraStorageError}; use mp_block::BlockId; use mp_class::compile::ClassCompilationError; use mp_convert::ToFelt; -use starknet_api::transaction::{Transaction, TransactionHash}; +use mp_transactions::TransactionWithHash; +use starknet_api::transaction::TransactionHash; +use std::{borrow::Cow, sync::Arc}; #[derive(Debug, thiserror::Error)] pub enum Error { @@ -26,53 +26,26 @@ pub enum Error { /// /// **note:** this function does not support deploy transaction /// because it is not supported by blockifier -pub fn to_blockifier_transactions( +pub fn to_blockifier_transaction( backend: Arc, block_id: BlockId, transaction: mp_transactions::Transaction, tx_hash: &TransactionHash, ) -> Result { - let transaction: Transaction = transaction - .try_into() - .map_err(|err| Error::Internal(format!("Converting to starknet api transaction {:#}", err).into()))?; - - let paid_fee_on_l1 = match transaction { - Transaction::L1Handler(_) => Some(starknet_api::transaction::Fee(1_000_000_000_000)), - _ => None, - }; - - let class_info = match &transaction { - Transaction::Declare(declare_tx) => { - let class_hash = declare_tx.class_hash(); - let class_info = backend.get_class_info(&block_id, &class_hash.to_felt())?.ok_or(Error::ClassNotFound)?; - - match class_info { - mp_class::ClassInfo::Sierra(info) => { - let compiled_class = - backend.get_sierra_compiled(&block_id, &info.compiled_class_hash)?.ok_or_else(|| { - Error::Internal( - "Inconsistent state: compiled sierra class from class_hash '{class_hash}' not found" - .into(), - ) - })?; - - let blockifier_class = compiled_class.to_blockifier_class()?; - Some(ClassInfo::new( - &blockifier_class, - info.contract_class.program_length(), - info.contract_class.abi_length(), - )?) - } - mp_class::ClassInfo::Legacy(info) => { - let blockifier_class = info.contract_class.to_blockifier_class()?; - Some(ClassInfo::new(&blockifier_class, 0, 0)?) - } - } - } - _ => None, - }; - - btx::Transaction::from_api(transaction.clone(), *tx_hash, class_info, paid_fee_on_l1, None, false).map_err(|err| { - Error::Internal(format!("Failed to convert transaction to blockifier transaction {:#}", err).into()) - }) + if transaction.as_deploy().is_some() { + return Err(Error::Internal("Unsupported deploy transaction type".to_string().into())); + } + + let class = + if let Some(tx) = transaction.as_declare() { + Some(backend.get_converted_class(&block_id, tx.class_hash())?.ok_or_else(|| { + Error::Internal(format!("No class found for class_hash={:#x}", tx.class_hash()).into()) + })?) + } else { + None + }; + + TransactionWithHash::new(transaction, tx_hash.to_felt()) + .into_blockifier(class.as_ref()) + .map_err(|err| Error::Internal(format!("Error converting class to blockifier format: {err:#}").into())) } diff --git a/crates/client/mempool/src/header.rs b/crates/client/mempool/src/header.rs index 11f60efc0..fd756ac8e 100644 --- a/crates/client/mempool/src/header.rs +++ b/crates/client/mempool/src/header.rs @@ -1,8 +1,7 @@ use crate::L1DataProvider; -use mp_block::header::PendingHeader; +use mp_block::header::{BlockTimestamp, PendingHeader}; use mp_chain_config::ChainConfig; use starknet_types_core::felt::Felt; -use std::time::SystemTime; pub fn make_pending_header( parent_block_hash: Felt, @@ -12,10 +11,7 @@ pub fn make_pending_header( PendingHeader { parent_block_hash, sequencer_address: **chain_config.sequencer_address, - block_timestamp: SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .expect("Current system time is before the UNIX epoch") - .as_secs(), + block_timestamp: BlockTimestamp::now(), protocol_version: chain_config.latest_protocol_version, l1_gas_price: l1_info.get_gas_prices(), l1_da_mode: l1_info.get_da_mode(), diff --git a/crates/client/mempool/src/inner/limits.rs b/crates/client/mempool/src/inner/limits.rs index 4749a0caa..d21627560 100644 --- a/crates/client/mempool/src/inner/limits.rs +++ b/crates/client/mempool/src/inner/limits.rs @@ -10,7 +10,7 @@ use crate::MempoolTransaction; pub struct MempoolLimits { pub max_transactions: usize, pub max_declare_transactions: usize, - pub max_age: Duration, + pub max_age: Option, } impl MempoolLimits { @@ -23,11 +23,7 @@ impl MempoolLimits { } #[cfg(any(test, feature = "testing"))] pub fn for_testing() -> Self { - Self { - max_age: Duration::from_secs(10000000), - max_declare_transactions: usize::MAX, - max_transactions: usize::MAX, - } + Self { max_age: None, max_declare_transactions: usize::MAX, max_transactions: usize::MAX } } } @@ -111,18 +107,20 @@ impl MempoolLimiter { } // age - if self.tx_age_exceeded(to_check) { - return Err(MempoolLimitReached::Age { max: self.config.max_age }); + if let Some(max_age) = self.config.max_age { + if self.tx_age_exceeded(to_check) { + return Err(MempoolLimitReached::Age { max: max_age }); + } } Ok(()) } pub fn tx_age_exceeded(&self, to_check: &TransactionCheckedLimits) -> bool { + let Some(max_age) = self.config.max_age else { return false }; if to_check.check_age { let current_time = SystemTime::now(); - if to_check.tx_arrived_at < current_time.checked_sub(self.config.max_age).unwrap_or(SystemTime::UNIX_EPOCH) - { + if to_check.tx_arrived_at < current_time.checked_sub(max_age).unwrap_or(SystemTime::UNIX_EPOCH) { return true; } } @@ -139,9 +137,11 @@ impl MempoolLimiter { pub fn mark_removed(&mut self, to_update: &TransactionCheckedLimits) { // These should not overflow unless block prod marks transactions as consumed even though they have not been popped. - self.current_transactions -= 1; + debug_assert!(self.current_transactions > 0); + self.current_transactions = self.current_transactions.saturating_sub(1); if to_update.check_declare_limit { - self.current_declare_transactions -= 1; + debug_assert!(self.current_declare_transactions > 0); + self.current_declare_transactions = self.current_declare_transactions.saturating_sub(1); } } } diff --git a/crates/client/mempool/src/inner/mod.rs b/crates/client/mempool/src/inner/mod.rs index 7e55313d9..7beb301c0 100644 --- a/crates/client/mempool/src/inner/mod.rs +++ b/crates/client/mempool/src/inner/mod.rs @@ -95,7 +95,13 @@ impl MempoolInner { } /// When `force` is `true`, this function should never return any error. - pub fn insert_tx(&mut self, mempool_tx: MempoolTransaction, force: bool) -> Result<(), TxInsersionError> { + /// `update_limits` is `false` when the transaction has been removed from the mempool in the past without updating the limits. + pub fn insert_tx( + &mut self, + mempool_tx: MempoolTransaction, + force: bool, + update_limits: bool, + ) -> Result<(), TxInsersionError> { // delete age-exceeded txs from the mempool // todo(perf): this may want to limit this check once every few seconds to avoid it being in the hot path? self.remove_age_exceeded_txs(); @@ -165,7 +171,9 @@ impl MempoolInner { } // Update transaction limits - self.limiter.update_tx_limits(&limits_for_tx); + if update_limits { + self.limiter.update_tx_limits(&limits_for_tx); + } Ok(()) } @@ -235,6 +243,7 @@ impl MempoolInner { break mempool_tx; } + // transaction age exceeded, remove the tx from mempool. self.limiter.mark_removed(&limits); }; @@ -258,10 +267,23 @@ impl MempoolInner { } for tx in txs { let force = true; - self.insert_tx(tx, force).expect("Force insert tx should not error"); + self.insert_tx(tx, force, false).expect("Force insert tx should not error"); } } + /// This is called by the block production after a batch of transaction is executed. + /// Mark the consumed txs as consumed, and re-add the transactions that are not consumed in the mempool. + pub fn insert_txs( + &mut self, + txs: impl IntoIterator, + force: bool, + ) -> Result<(), TxInsersionError> { + for tx in txs { + self.insert_tx(tx, force, true)?; + } + Ok(()) + } + #[cfg(any(test, feature = "testing"))] pub fn is_empty(&self) -> bool { self.tx_queue.is_empty() diff --git a/crates/client/mempool/src/inner/proptest.rs b/crates/client/mempool/src/inner/proptest.rs index 501f46e08..e448e07a5 100644 --- a/crates/client/mempool/src/inner/proptest.rs +++ b/crates/client/mempool/src/inner/proptest.rs @@ -210,7 +210,7 @@ impl MempoolInvariantsProblem { Operation::Insert(insert) => { let force = insert.1; tracing::trace!("Insert {:?}", insert); - let res = mempool.insert_tx(insert.0.clone(), insert.1); + let res = mempool.insert_tx(insert.0.clone(), insert.1, true); let expected = if !force && inserted_contract_nonce_pairs.contains(&(insert.0.nonce(), insert.0.contract_address())) diff --git a/crates/client/mempool/src/lib.rs b/crates/client/mempool/src/lib.rs index d5bddc425..6103be0cd 100644 --- a/crates/client/mempool/src/lib.rs +++ b/crates/client/mempool/src/lib.rs @@ -14,10 +14,10 @@ use mp_block::{BlockId, BlockTag, MadaraPendingBlockInfo}; use mp_class::ConvertedClass; use mp_convert::ToFelt; use mp_transactions::BroadcastedDeclareTransactionV0; -use mp_transactions::BroadcastedToBlockifierError; use mp_transactions::BroadcastedTransactionExt; use mp_transactions::L1HandlerTransaction; use mp_transactions::L1HandlerTransactionResult; +use mp_transactions::ToBlockifierError; use starknet_api::core::{ContractAddress, Nonce}; use starknet_api::transaction::TransactionHash; use starknet_types_core::felt::Felt; @@ -53,7 +53,7 @@ pub enum Error { #[error(transparent)] Exec(#[from] mc_exec::Error), #[error("Preprocessing transaction: {0:#}")] - BroadcastedToBlockifier(#[from] BroadcastedToBlockifierError), + BroadcastedToBlockifier(#[from] ToBlockifierError), } impl Error { pub fn is_internal(&self) -> bool { @@ -79,14 +79,15 @@ pub trait MempoolProvider: Send + Sync { where Self: Sized; fn take_tx(&self) -> Option; - fn re_add_txs< - I: IntoIterator + 'static, - CI: IntoIterator + 'static, - >( + fn re_add_txs + 'static>( &self, txs: I, - consumed_txs: CI, - ) where + consumed_txs: Vec, + ) -> Result<(), Error> + where + Self: Sized; + fn insert_txs_no_validation(&self, txs: Vec, force: bool) -> Result<(), Error> + where Self: Sized; fn chain_id(&self) -> Felt; } @@ -180,10 +181,11 @@ impl Mempool { // Add it to the inner mempool let force = false; - self.inner - .write() - .expect("Poisoned lock") - .insert_tx(MempoolTransaction { tx, arrived_at, converted_class }, force)?; + self.inner.write().expect("Poisoned lock").insert_tx( + MempoolTransaction { tx, arrived_at, converted_class }, + force, + true, + )?; self.metrics.accepted_transaction_counter.add(1, &[]); } @@ -305,13 +307,31 @@ impl MempoolProvider for Mempool { /// This is called by the block production after a batch of transaction is executed. /// Mark the consumed txs as consumed, and re-add the transactions that are not consumed in the mempool. #[tracing::instrument(skip(self, txs, consumed_txs), fields(module = "Mempool"))] - fn re_add_txs, CI: IntoIterator>( + fn re_add_txs>( &self, txs: I, - consumed_txs: CI, - ) { + consumed_txs: Vec, + ) -> Result<(), Error> { + let mut inner = self.inner.write().expect("Poisoned lock"); + let hashes = consumed_txs.iter().map(|tx| tx.tx_hash()).collect::>(); + inner.re_add_txs(txs, consumed_txs); + drop(inner); + for tx_hash in hashes { + self.backend.remove_mempool_transaction(&tx_hash.to_felt())?; + } + Ok(()) + } + + #[tracing::instrument(skip(self, txs), fields(module = "Mempool"))] + fn insert_txs_no_validation(&self, txs: Vec, force: bool) -> Result<(), Error> { + for tx in &txs { + let saved_tx = blockifier_to_saved_tx(&tx.tx, tx.arrived_at); + // save to db + self.backend.save_mempool_transaction(&saved_tx, tx.tx_hash().to_felt(), &tx.converted_class)?; + } let mut inner = self.inner.write().expect("Poisoned lock"); - inner.re_add_txs(txs, consumed_txs) + inner.insert_txs(txs, force)?; + Ok(()) } fn chain_id(&self) -> Felt { diff --git a/crates/client/rpc/src/test_utils.rs b/crates/client/rpc/src/test_utils.rs index 7f4d32a0e..79a786e08 100644 --- a/crates/client/rpc/src/test_utils.rs +++ b/crates/client/rpc/src/test_utils.rs @@ -1,7 +1,7 @@ use jsonrpsee::core::{async_trait, RpcResult}; use mc_db::MadaraBackend; use mp_block::{ - header::{GasPrices, L1DataAvailabilityMode, PendingHeader}, + header::{BlockTimestamp, GasPrices, L1DataAvailabilityMode, PendingHeader}, Header, MadaraBlockInfo, MadaraBlockInner, MadaraMaybePendingBlock, MadaraMaybePendingBlockInfo, MadaraPendingBlockInfo, }; @@ -204,7 +204,7 @@ pub fn make_sample_chain_for_block_getters(backend: &MadaraBackend) -> SampleCha transaction_count: 1, global_state_root: Felt::from_hex_unchecked("0x88912"), sequencer_address: Felt::from_hex_unchecked("0xbabaa"), - block_timestamp: 43, + block_timestamp: BlockTimestamp(43), transaction_commitment: Felt::from_hex_unchecked("0xbabaa0"), event_count: 0, event_commitment: Felt::from_hex_unchecked("0xb"), diff --git a/crates/client/rpc/src/utils/mod.rs b/crates/client/rpc/src/utils/mod.rs index 6326dd7d3..82fbf6188 100644 --- a/crates/client/rpc/src/utils/mod.rs +++ b/crates/client/rpc/src/utils/mod.rs @@ -1,9 +1,6 @@ -pub(crate) mod transaction; - use std::fmt; use crate::StarknetRpcApiError; -pub use transaction::to_blockifier_transaction; pub fn display_internal_server_error(err: impl fmt::Display) { tracing::error!(target: "rpc_errors", "{:#}", err); diff --git a/crates/client/rpc/src/utils/transaction.rs b/crates/client/rpc/src/utils/transaction.rs deleted file mode 100644 index bbc7c114f..000000000 --- a/crates/client/rpc/src/utils/transaction.rs +++ /dev/null @@ -1,90 +0,0 @@ -use std::sync::Arc; - -use blockifier::execution::contract_class::ClassInfo; -use blockifier::transaction::transaction_execution as btx; -use mc_db::MadaraBackend; -use mp_block::BlockId; -use mp_convert::ToFelt; -use starknet_api::transaction::{Transaction, TransactionHash}; - -use crate::errors::{StarknetRpcApiError, StarknetRpcResult}; - -/// Convert an starknet-api Transaction to a blockifier Transaction -/// -/// **note:** this function does not support deploy transaction -/// because it is not supported by blockifier -pub fn to_blockifier_transaction( - backend: Arc, - block_id: BlockId, - transaction: mp_transactions::Transaction, - tx_hash: &TransactionHash, -) -> StarknetRpcResult { - let transaction: Transaction = transaction.try_into().map_err(|_| StarknetRpcApiError::InternalServerError)?; - - let paid_fee_on_l1 = match transaction { - Transaction::L1Handler(_) => Some(starknet_api::transaction::Fee(1_000_000_000_000)), - _ => None, - }; - - let class_info = match transaction { - Transaction::Declare(ref declare_tx) => { - let class_hash = declare_tx.class_hash(); - - let Ok(Some(class_info)) = backend.get_class_info(&block_id, &class_hash.to_felt()) else { - tracing::error!("Failed to retrieve class from class_hash '{class_hash}'"); - return Err(StarknetRpcApiError::ContractNotFound); - }; - - match class_info { - mp_class::ClassInfo::Sierra(info) => { - let compiled_class = backend - .get_sierra_compiled(&block_id, &info.compiled_class_hash) - .map_err(|e| { - tracing::error!( - "Failed to retrieve sierra compiled class from class_hash '{class_hash}': {e}" - ); - StarknetRpcApiError::InternalServerError - })? - .ok_or_else(|| { - tracing::error!( - "Inconsistent state: compiled sierra class from class_hash '{class_hash}' not found" - ); - StarknetRpcApiError::InternalServerError - })?; - - let blockifier_class = compiled_class.to_blockifier_class().map_err(|e| { - tracing::error!("Failed to convert contract class to blockifier contract class: {e}"); - StarknetRpcApiError::InternalServerError - })?; - Some( - ClassInfo::new( - &blockifier_class, - info.contract_class.program_length(), - info.contract_class.abi_length(), - ) - .map_err(|_| { - tracing::error!("Mismatch between the length of the sierra program and the class version"); - StarknetRpcApiError::InternalServerError - })?, - ) - } - mp_class::ClassInfo::Legacy(info) => { - let blockifier_class = info.contract_class.to_blockifier_class().map_err(|e| { - tracing::error!("Failed to convert contract class to blockifier contract class: {e}"); - StarknetRpcApiError::InternalServerError - })?; - Some(ClassInfo::new(&blockifier_class, 0, 0).map_err(|_| { - tracing::error!("Mismatch between the length of the legacy program and the class version"); - StarknetRpcApiError::InternalServerError - })?) - } - } - } - _ => None, - }; - - btx::Transaction::from_api(transaction.clone(), *tx_hash, class_info, paid_fee_on_l1, None, false).map_err(|_| { - tracing::error!("Failed to convert transaction to blockifier transaction"); - StarknetRpcApiError::InternalServerError - }) -} diff --git a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_receipts.rs b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_receipts.rs index 6cd0a0027..3438dca3a 100644 --- a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_receipts.rs +++ b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_receipts.rs @@ -37,7 +37,7 @@ pub fn get_block_with_receipts( transactions: transactions_with_receipts, pending_block_header: PendingBlockHeader { parent_hash: block.header.parent_block_hash, - timestamp: block.header.block_timestamp, + timestamp: block.header.block_timestamp.0, sequencer_address: block.header.sequencer_address, l1_gas_price: block.header.l1_gas_price.l1_gas_price(), l1_data_gas_price: block.header.l1_gas_price.l1_data_gas_price(), @@ -56,7 +56,7 @@ pub fn get_block_with_receipts( parent_hash: block.header.parent_block_hash, block_number: block.header.block_number, new_root: block.header.global_state_root, - timestamp: block.header.block_timestamp, + timestamp: block.header.block_timestamp.0, sequencer_address: block.header.sequencer_address, l1_gas_price: block.header.l1_gas_price.l1_gas_price(), l1_data_gas_price: block.header.l1_gas_price.l1_data_gas_price(), @@ -76,7 +76,10 @@ mod tests { test_utils::{rpc_test_setup, sample_chain_for_block_getters, SampleChainForBlockGetters}, }; use mc_db::MadaraBackend; - use mp_block::{header::GasPrices, BlockTag, Header, MadaraBlockInfo, MadaraBlockInner, MadaraMaybePendingBlock}; + use mp_block::{ + header::{BlockTimestamp, GasPrices}, + BlockTag, Header, MadaraBlockInfo, MadaraBlockInner, MadaraMaybePendingBlock, + }; use mp_chain_config::StarknetVersion; use mp_receipt::{ ExecutionResources, ExecutionResult, FeePayment, InvokeTransactionReceipt, PriceUnit, TransactionReceipt, @@ -209,7 +212,7 @@ mod tests { transaction_count: 1, global_state_root: Felt::from_hex_unchecked("0x88912"), sequencer_address: Felt::from_hex_unchecked("0xbabaa"), - block_timestamp: 43, + block_timestamp: BlockTimestamp(43), transaction_commitment: Felt::from_hex_unchecked("0xbabaa0"), event_count: 0, event_commitment: Felt::from_hex_unchecked("0xb"), diff --git a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_tx_hashes.rs b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_tx_hashes.rs index 0d19896e4..9b951fa26 100644 --- a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_tx_hashes.rs +++ b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_tx_hashes.rs @@ -34,7 +34,7 @@ pub fn get_block_with_tx_hashes( transactions: block_txs_hashes, pending_block_header: PendingBlockHeader { parent_hash: block.header.parent_block_hash, - timestamp: block.header.block_timestamp, + timestamp: block.header.block_timestamp.0, sequencer_address: block.header.sequencer_address, l1_gas_price: block.header.l1_gas_price.l1_gas_price(), l1_data_gas_price: block.header.l1_gas_price.l1_data_gas_price(), @@ -57,7 +57,7 @@ pub fn get_block_with_tx_hashes( parent_hash: block.header.parent_block_hash, block_number: block.header.block_number, new_root: block.header.global_state_root, - timestamp: block.header.block_timestamp, + timestamp: block.header.block_timestamp.0, sequencer_address: block.header.sequencer_address, l1_gas_price: block.header.l1_gas_price.l1_gas_price(), l1_data_gas_price: block.header.l1_gas_price.l1_data_gas_price(), diff --git a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_txs.rs b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_txs.rs index ae12b33ec..014bedec1 100644 --- a/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_txs.rs +++ b/crates/client/rpc/src/versions/user/v0_7_1/methods/read/get_block_with_txs.rs @@ -38,7 +38,7 @@ pub fn get_block_with_txs(starknet: &Starknet, block_id: BlockId) -> RpcResult RpcResult>()?; diff --git a/crates/client/rpc/src/versions/user/v0_7_1/methods/trace/trace_transaction.rs b/crates/client/rpc/src/versions/user/v0_7_1/methods/trace/trace_transaction.rs index 1dfd170d0..3040d34a8 100644 --- a/crates/client/rpc/src/versions/user/v0_7_1/methods/trace/trace_transaction.rs +++ b/crates/client/rpc/src/versions/user/v0_7_1/methods/trace/trace_transaction.rs @@ -1,9 +1,9 @@ use crate::errors::StarknetRpcApiError; use crate::errors::StarknetRpcResult; -use crate::utils::transaction::to_blockifier_transaction; use crate::utils::{OptionExt, ResultExt}; use crate::Starknet; use mc_exec::execution_result_to_tx_trace; +use mc_exec::transaction::to_blockifier_transaction; use mc_exec::ExecutionContext; use mp_chain_config::StarknetVersion; use starknet_api::transaction::TransactionHash; @@ -33,6 +33,7 @@ pub async fn trace_transaction( let mut block_txs = Iterator::zip(block.inner.transactions.into_iter(), block.info.tx_hashes()).map(|(tx, hash)| { to_blockifier_transaction(starknet.clone_backend(), block.info.as_block_id(), tx, &TransactionHash(*hash)) + .or_internal_server_error("Failed to convert transaction to blockifier format") }); // takes up until not including last tx diff --git a/crates/client/sync/src/fetch/fetchers.rs b/crates/client/sync/src/fetch/fetchers.rs index b26f3a801..5be886e28 100644 --- a/crates/client/sync/src/fetch/fetchers.rs +++ b/crates/client/sync/src/fetch/fetchers.rs @@ -373,7 +373,7 @@ mod test_l2_fetchers { felt!("0x1176a1bd84444c89232ec27754698e5d2e7e1a7f1539f12027f28b23ec9f3d8"), "Sequencer address should match" ); - assert_eq!(pending_block.header.block_timestamp, 1725950824, "Block timestamp should match"); + assert_eq!(pending_block.header.block_timestamp.0, 1725950824, "Block timestamp should match"); assert_eq!( pending_block.header.protocol_version, StarknetVersion::new(0, 13, 2, 1), diff --git a/crates/client/sync/src/l2.rs b/crates/client/sync/src/l2.rs index dad900f43..b4ae85ab7 100644 --- a/crates/client/sync/src/l2.rs +++ b/crates/client/sync/src/l2.rs @@ -407,7 +407,7 @@ mod tests { let applied_block = MadaraBlock::try_from(applied_block.unwrap()).unwrap(); assert_eq!(applied_block.info.header.block_number, 0, "Block number does not match"); - assert_eq!(applied_block.info.header.block_timestamp, 0, "Block timestamp does not match"); + assert_eq!(applied_block.info.header.block_timestamp.0, 0, "Block timestamp does not match"); assert_eq!(applied_block.info.header.parent_block_hash, Felt::ZERO, "Parent block hash does not match"); assert!(applied_block.inner.transactions.is_empty(), "Block should not contain any transactions"); assert_eq!( diff --git a/crates/node/src/cli/chain_config_overrides.rs b/crates/node/src/cli/chain_config_overrides.rs index 6e1f3fdd9..7d86413db 100644 --- a/crates/node/src/cli/chain_config_overrides.rs +++ b/crates/node/src/cli/chain_config_overrides.rs @@ -14,7 +14,10 @@ use mp_chain_config::{ ChainConfig, StarknetVersion, }; use mp_utils::parsers::parse_key_value_yaml; -use mp_utils::serde::{deserialize_duration, deserialize_private_key, serialize_duration}; +use mp_utils::serde::{ + deserialize_duration, deserialize_optional_duration, deserialize_private_key, serialize_duration, + serialize_optional_duration, +}; use url::Url; /// Override chain config parameters. @@ -51,8 +54,8 @@ pub struct ChainConfigOverridesInner { pub private_key: ZeroingPrivateKey, pub mempool_tx_limit: usize, pub mempool_declare_tx_limit: usize, - #[serde(deserialize_with = "deserialize_duration", serialize_with = "serialize_duration")] - pub mempool_tx_max_age: Duration, + #[serde(deserialize_with = "deserialize_optional_duration", serialize_with = "serialize_optional_duration")] + pub mempool_tx_max_age: Option, } impl ChainConfigOverrideParams { diff --git a/crates/primitives/block/src/header.rs b/crates/primitives/block/src/header.rs index 7de360e39..2a1682bd3 100644 --- a/crates/primitives/block/src/header.rs +++ b/crates/primitives/block/src/header.rs @@ -1,14 +1,18 @@ use core::num::NonZeroU128; use mp_chain_config::StarknetVersion; +use serde::Deserialize; +use serde::Serialize; use starknet_types_core::felt::Felt; use starknet_types_core::hash::Pedersen; use starknet_types_core::hash::Poseidon; use starknet_types_core::hash::StarkHash as StarkHashTrait; +use std::fmt; +use std::time::SystemTime; /// Block status. /// /// The status of the block. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum BlockStatus { Pending, @@ -29,14 +33,31 @@ impl From for starknet_types_rpc::BlockStatus { } } -#[derive(Clone, Debug, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[derive(Clone, Copy, Default, Debug, PartialEq, PartialOrd, Ord, Eq, Serialize, Deserialize)] +#[serde(transparent)] +pub struct BlockTimestamp(pub u64); +impl BlockTimestamp { + pub fn now() -> Self { + Self( + SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).expect("SystemTime::now() < Unix epoch").as_secs(), + ) + } +} + +impl fmt::Display for BlockTimestamp { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct PendingHeader { /// The hash of this blockā€™s parent. pub parent_block_hash: Felt, /// The Starknet address of the sequencer who created this block. pub sequencer_address: Felt, /// Unix timestamp (seconds) when the block was produced -- before executing any transaction. - pub block_timestamp: u64, + pub block_timestamp: BlockTimestamp, /// The version of the Starknet protocol used when creating this block pub protocol_version: StarknetVersion, /// Gas prices for this block @@ -45,7 +66,7 @@ pub struct PendingHeader { pub l1_da_mode: L1DataAvailabilityMode, } -#[derive(Clone, Debug, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] /// Starknet header definition. pub struct Header { /// The hash of this blockā€™s parent. @@ -56,8 +77,8 @@ pub struct Header { pub global_state_root: Felt, /// The Starknet address of the sequencer who created this block. pub sequencer_address: Felt, - /// The time the sequencer created this block before executing transactions - pub block_timestamp: u64, + /// Unix timestamp (seconds) when the block was produced -- before executing any transaction. + pub block_timestamp: BlockTimestamp, /// The number of transactions in a block pub transaction_count: u64, /// A commitment to the transactions included in the block @@ -80,7 +101,7 @@ pub struct Header { pub l1_da_mode: L1DataAvailabilityMode, } -#[derive(Clone, Debug, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct GasPrices { pub eth_l1_gas_price: u128, pub strk_l1_gas_price: u128, @@ -117,7 +138,7 @@ impl GasPrices { } } -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, serde::Serialize, serde::Deserialize)] +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "UPPERCASE")] pub enum L1DataAvailabilityMode { #[serde(alias = "Calldata")] @@ -153,7 +174,7 @@ impl Header { block_number: u64, global_state_root: Felt, sequencer_address: Felt, - block_timestamp: u64, + block_timestamp: BlockTimestamp, transaction_count: u64, transaction_commitment: Felt, event_count: u64, @@ -193,7 +214,7 @@ impl Header { Felt::from(self.block_number), self.global_state_root, self.sequencer_address, - Felt::from(self.block_timestamp), + Felt::from(self.block_timestamp.0), Felt::from(self.transaction_count), self.transaction_commitment, Felt::from(self.event_count), @@ -209,7 +230,7 @@ impl Header { Felt::from(self.block_number), self.global_state_root, self.sequencer_address, - Felt::from(self.block_timestamp), + Felt::from(self.block_timestamp.0), concat_counts( self.transaction_count, self.event_count, @@ -291,7 +312,7 @@ mod tests { 2, Felt::from(3), Felt::from(4), - 5, + BlockTimestamp(5), 6, Felt::from(7), 8, @@ -345,7 +366,7 @@ mod tests { block_number: 2, global_state_root: Felt::from(3), sequencer_address: Felt::from(4), - block_timestamp: 5, + block_timestamp: BlockTimestamp(5), transaction_count: 6, transaction_commitment: Felt::from(7), event_count: 8, diff --git a/crates/primitives/block/src/lib.rs b/crates/primitives/block/src/lib.rs index 3f97be8ee..4db2aa545 100644 --- a/crates/primitives/block/src/lib.rs +++ b/crates/primitives/block/src/lib.rs @@ -1,17 +1,15 @@ //! Starknet block primitives. -pub mod header; - -pub use header::Header; -use header::{L1DataAvailabilityMode, PendingHeader}; +use crate::header::GasPrices; +use header::{BlockTimestamp, L1DataAvailabilityMode, PendingHeader}; use mp_chain_config::StarknetVersion; use mp_receipt::TransactionReceipt; use mp_transactions::Transaction; -pub use primitive_types::{H160, U256}; use starknet_types_core::felt::Felt; -use crate::header::GasPrices; - +pub mod header; +pub use header::Header; +pub use primitive_types::{H160, U256}; pub type BlockId = starknet_types_rpc::BlockId; pub type BlockTag = starknet_types_rpc::BlockTag; @@ -72,6 +70,13 @@ impl MadaraMaybePendingBlockInfo { MadaraMaybePendingBlockInfo::Pending(block) => &block.header.protocol_version, } } + + pub fn block_timestamp(&self) -> BlockTimestamp { + match self { + MadaraMaybePendingBlockInfo::NotPending(block) => block.header.block_timestamp, + MadaraMaybePendingBlockInfo::Pending(block) => block.header.block_timestamp, + } + } } impl From for MadaraMaybePendingBlockInfo { @@ -130,7 +135,7 @@ impl From for starknet_types_rpc::BlockHeader { } else { protocol_version.to_string() }, - timestamp, + timestamp: timestamp.0, } } } diff --git a/crates/primitives/chain_config/src/chain_config.rs b/crates/primitives/chain_config/src/chain_config.rs index 043649077..409ff9bb5 100644 --- a/crates/primitives/chain_config/src/chain_config.rs +++ b/crates/primitives/chain_config/src/chain_config.rs @@ -25,7 +25,7 @@ use starknet_api::core::{ChainId, ContractAddress, PatriciaKey}; use starknet_types_core::felt::Felt; use url::Url; -use mp_utils::serde::{deserialize_duration, deserialize_private_key}; +use mp_utils::serde::{deserialize_duration, deserialize_optional_duration, deserialize_private_key}; use crate::StarknetVersion; @@ -136,8 +136,8 @@ pub struct ChainConfig { /// Transaction limit in the mempool, we have an additional limit for declare transactions. pub mempool_declare_tx_limit: usize, /// Max age of a transaction in the mempool. - #[serde(deserialize_with = "deserialize_duration")] - pub mempool_tx_max_age: Duration, + #[serde(deserialize_with = "deserialize_optional_duration")] + pub mempool_tx_max_age: Option, } impl ChainConfig { @@ -247,7 +247,7 @@ impl ChainConfig { mempool_tx_limit: 10_000, mempool_declare_tx_limit: 20, - mempool_tx_max_age: Duration::from_secs(60 * 60), // an hour? + mempool_tx_max_age: Some(Duration::from_secs(60 * 60)), // an hour? } } diff --git a/crates/primitives/class/src/compile.rs b/crates/primitives/class/src/compile.rs index 851989b5f..219ce0f81 100644 --- a/crates/primitives/class/src/compile.rs +++ b/crates/primitives/class/src/compile.rs @@ -1,3 +1,10 @@ +use crate::{CompiledSierra, CompressedLegacyContractClass, FlattenedSierraClass, LegacyContractAbiEntry}; +use blockifier::execution::{ + contract_class::{ + ContractClass as BContractClass, ContractClassV0 as BContractClassV0, ContractClassV1 as BContractClassV1, + }, + errors::ContractClassError as BContractClassError, +}; use num_bigint::{BigInt, BigUint, Sign}; use starknet_types_core::felt::Felt; use std::{ @@ -5,10 +12,10 @@ use std::{ io::{Cursor, Read}, }; -use crate::{CompiledSierra, CompressedLegacyContractClass, FlattenedSierraClass, LegacyContractAbiEntry}; - #[derive(Debug, thiserror::Error)] pub enum ClassCompilationError { + #[error("Error while converting class to blockifier format: {0}")] + ContractClassError(#[from] BContractClassError), #[error("Failed to decompress program: {0}")] DecompressionFailed(#[from] std::io::Error), #[error("Failed to parse program JSON: {0}")] @@ -72,13 +79,9 @@ impl CompressedLegacyContractClass { Ok(serde_json::to_string(&json)?) } - pub fn to_blockifier_class( - &self, - ) -> Result { + pub fn to_blockifier_class(&self) -> Result { let class_json = self.serialize_to_json()?; - Ok(blockifier::execution::contract_class::ContractClass::V0( - blockifier::execution::contract_class::ContractClassV0::try_from_json_string(&class_json)?, - )) + Ok(BContractClass::V0(BContractClassV0::try_from_json_string(&class_json)?)) } } @@ -114,12 +117,8 @@ impl FlattenedSierraClass { } impl CompiledSierra { - pub fn to_blockifier_class( - &self, - ) -> Result { - Ok(blockifier::execution::contract_class::ContractClass::V1( - blockifier::execution::contract_class::ContractClassV1::try_from_json_string(&self.0)?, - )) + pub fn to_blockifier_class(&self) -> Result { + Ok(BContractClass::V1(BContractClassV1::try_from_json_string(&self.0)?)) } } diff --git a/crates/primitives/class/src/lib.rs b/crates/primitives/class/src/lib.rs index 185d10d62..4ed54ec31 100644 --- a/crates/primitives/class/src/lib.rs +++ b/crates/primitives/class/src/lib.rs @@ -1,6 +1,7 @@ -use std::{collections::HashMap, sync::Arc}; - +use blockifier::execution::contract_class::{ClassInfo as BClassInfo, ContractClass as BContractClass}; +use compile::ClassCompilationError; use starknet_types_core::felt::Felt; +use std::{collections::HashMap, sync::Arc}; pub mod class_hash; pub mod class_update; @@ -29,6 +30,24 @@ impl ConvertedClass { ConvertedClass::Sierra(sierra) => ClassInfo::Sierra(sierra.info.clone()), } } + + pub fn to_blockifier_class(&self) -> Result { + Ok(match self { + ConvertedClass::Legacy(class) => class.info.contract_class.to_blockifier_class()?, + ConvertedClass::Sierra(class) => class.compiled.to_blockifier_class()?, + }) + } + + pub fn to_blockifier_class_info(&self) -> Result { + Ok(match self { + ConvertedClass::Legacy(class) => BClassInfo::new(&class.info.contract_class.to_blockifier_class()?, 0, 0)?, + ConvertedClass::Sierra(class) => BClassInfo::new( + &class.compiled.to_blockifier_class()?, + class.info.contract_class.sierra_program.len(), + class.info.contract_class.abi.len(), + )?, + }) + } } #[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] diff --git a/crates/primitives/gateway/src/block.rs b/crates/primitives/gateway/src/block.rs index 383654b7c..cf682fc59 100644 --- a/crates/primitives/gateway/src/block.rs +++ b/crates/primitives/gateway/src/block.rs @@ -1,5 +1,5 @@ use anyhow::Context; -use mp_block::header::L1DataAvailabilityMode; +use mp_block::header::{BlockTimestamp, L1DataAvailabilityMode}; use mp_chain_config::StarknetVersion; use mp_convert::hex_serde::U128AsHex; use serde::{Deserialize, Serialize}; @@ -121,7 +121,7 @@ impl ProviderBlock { block_hash: block.info.block_hash, block_number: block.info.header.block_number, parent_block_hash: block.info.header.parent_block_hash, - timestamp: block.info.header.block_timestamp, + timestamp: block.info.header.block_timestamp.0, sequencer_address, state_root: block.info.header.global_state_root, transaction_commitment: block.info.header.transaction_commitment, @@ -149,7 +149,7 @@ impl ProviderBlock { Ok(mc_block_import::UnverifiedHeader { parent_block_hash: Some(self.parent_block_hash), sequencer_address: self.sequencer_address.unwrap_or_default(), - block_timestamp: self.timestamp, + block_timestamp: BlockTimestamp(self.timestamp), protocol_version: self .starknet_version .as_deref() @@ -217,7 +217,7 @@ impl ProviderBlockPending { price_in_fri: block.info.header.l1_gas_price.strk_l1_data_gas_price, }, transactions, - timestamp: block.info.header.block_timestamp, + timestamp: block.info.header.block_timestamp.0, sequencer_address: block.info.header.sequencer_address, transaction_receipts, starknet_version, @@ -228,7 +228,7 @@ impl ProviderBlockPending { Ok(mc_block_import::UnverifiedHeader { parent_block_hash: Some(self.parent_block_hash), sequencer_address: self.sequencer_address, - block_timestamp: self.timestamp, + block_timestamp: BlockTimestamp(self.timestamp), protocol_version: self .starknet_version .as_deref() diff --git a/crates/primitives/transactions/src/lib.rs b/crates/primitives/transactions/src/lib.rs index ab5c27d99..df75e949d 100644 --- a/crates/primitives/transactions/src/lib.rs +++ b/crates/primitives/transactions/src/lib.rs @@ -6,18 +6,18 @@ use starknet_api::transaction::TransactionVersion; use starknet_types_core::{felt::Felt, hash::StarkHash}; use std::sync::Arc; -mod broadcasted_to_blockifier; mod from_blockifier; mod from_broadcasted_transaction; mod from_starknet_types; mod into_starknet_api; +mod to_blockifier; mod to_starknet_types; // pub mod broadcasted; pub mod compute_hash; pub mod utils; -pub use broadcasted_to_blockifier::{BroadcastedToBlockifierError, BroadcastedTransactionExt}; +pub use to_blockifier::{BroadcastedTransactionExt, ToBlockifierError}; const SIMULATE_TX_VERSION_OFFSET: Felt = Felt::from_hex_unchecked("0x100000000000000000000000000000000"); @@ -218,6 +218,37 @@ impl Transaction { blockifier::transaction::objects::FeeType::Strk } } + + pub fn as_invoke(&self) -> Option<&InvokeTransaction> { + match self { + Transaction::Invoke(tx) => Some(tx), + _ => None, + } + } + pub fn as_declare(&self) -> Option<&DeclareTransaction> { + match self { + Transaction::Declare(tx) => Some(tx), + _ => None, + } + } + pub fn as_l1_handler(&self) -> Option<&L1HandlerTransaction> { + match self { + Transaction::L1Handler(tx) => Some(tx), + _ => None, + } + } + pub fn as_deploy(&self) -> Option<&DeployTransaction> { + match self { + Transaction::Deploy(tx) => Some(tx), + _ => None, + } + } + pub fn as_deploy_account(&self) -> Option<&DeployAccountTransaction> { + match self { + Transaction::DeployAccount(tx) => Some(tx), + _ => None, + } + } } #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] @@ -418,6 +449,14 @@ impl DeclareTransaction { DeclareTransaction::V3(tx) => &tx.sender_address, } } + pub fn class_hash(&self) -> &Felt { + match self { + DeclareTransaction::V0(tx) => &tx.class_hash, + DeclareTransaction::V1(tx) => &tx.class_hash, + DeclareTransaction::V2(tx) => &tx.class_hash, + DeclareTransaction::V3(tx) => &tx.class_hash, + } + } pub fn signature(&self) -> &[Felt] { match self { DeclareTransaction::V0(tx) => &tx.signature, diff --git a/crates/primitives/transactions/src/broadcasted_to_blockifier.rs b/crates/primitives/transactions/src/to_blockifier.rs similarity index 64% rename from crates/primitives/transactions/src/broadcasted_to_blockifier.rs rename to crates/primitives/transactions/src/to_blockifier.rs index 4cbf41556..64d9b989b 100644 --- a/crates/primitives/transactions/src/broadcasted_to_blockifier.rs +++ b/crates/primitives/transactions/src/to_blockifier.rs @@ -16,12 +16,60 @@ use starknet_types_core::felt::Felt; use starknet_types_rpc::{BroadcastedDeclareTxn, BroadcastedTxn}; use std::sync::Arc; +impl TransactionWithHash { + /// Very important note: When the transaction is an L1HandlerTransaction, the paid_fee_on_l1 field will be set to + /// a very high value, as it is not stored in the transaction. This field does not affect the execution except + /// that it may lead to a rejection on L2. (L1HandlerTransactions are not revertible) This means that this + /// implementation is fine as long as the transaction has been checked beforehand. + /// TODO: check that this is always true. + /// + /// Callers of this function must make sure that the transaction has originally been executed with the correct, + /// paid_fee_on_l1 field. + /// + /// In madara, there are currently two places where this function is executed: + /// - in RPCs, to replay (trace) transactions. + /// - in block production, to replay the pending block when restarting sequencing. + /// + /// In the first case, we can't always get the paid_fees_on_l1 field, but since the sequencer supposedly has + /// executed this transaction before, it's fine to suppose that it's valid. + /// In the second case, this transaction has already been validated by the mempool and block production. + pub fn into_blockifier(self, class: Option<&ConvertedClass>) -> Result { + let class_info = match &self.transaction { + Transaction::Declare(_txn) => { + let class = class.ok_or(ToBlockifierError::MissingClass)?; + Some(class.to_blockifier_class_info()?) + } + _ => None, + }; + + // see doc comment + let paid_fee_on_l1 = + self.transaction.as_l1_handler().map(|_| starknet_api::transaction::Fee(1_000_000_000_000)); + + let deployed_address = match &self.transaction { + // todo: this shouldnt be computed here... + Transaction::DeployAccount(tx) => Some(tx.calculate_contract_address()), + _ => None, + }; + let transaction: starknet_api::transaction::Transaction = self.transaction.try_into()?; + + Ok(BTransaction::from_api( + transaction, + TransactionHash(self.hash), + class_info, + paid_fee_on_l1, + deployed_address.map(|address| address.try_into().expect("Address conversion should never fail")), + /* is_query */ false, + )?) + } +} + pub trait BroadcastedTransactionExt { fn into_blockifier( self, chain_id: Felt, starknet_version: StarknetVersion, - ) -> Result<(BTransaction, Option), BroadcastedToBlockifierError>; + ) -> Result<(BTransaction, Option), ToBlockifierError>; } impl BroadcastedTransactionExt for BroadcastedTxn { @@ -29,7 +77,7 @@ impl BroadcastedTransactionExt for BroadcastedTxn { self, chain_id: Felt, starknet_version: StarknetVersion, - ) -> Result<(BTransaction, Option), BroadcastedToBlockifierError> { + ) -> Result<(BTransaction, Option), ToBlockifierError> { let (class_info, converted_class, class_hash) = match &self { BroadcastedTxn::Declare(tx) => match tx { BroadcastedDeclareTxn::V1(tx) | BroadcastedDeclareTxn::QueryV1(tx) => { @@ -74,7 +122,7 @@ impl L1HandlerTransaction { chain_id: Felt, _starknet_version: StarknetVersion, paid_fees_on_l1: u128, - ) -> Result<(BTransaction, Option), BroadcastedToBlockifierError> { + ) -> Result<(BTransaction, Option), ToBlockifierError> { let transaction = Transaction::L1Handler(self.clone()); // TODO: check self.version let hash = self.compute_hash(chain_id, false, false); @@ -92,7 +140,7 @@ impl BroadcastedDeclareTransactionV0 { self, chain_id: Felt, starknet_version: StarknetVersion, - ) -> Result<(BTransaction, Option), BroadcastedToBlockifierError> { + ) -> Result<(BTransaction, Option), ToBlockifierError> { let (class_info, converted_class, class_hash) = handle_class_legacy(Arc::clone(&self.contract_class))?; let is_query = self.is_query; @@ -111,7 +159,7 @@ impl BroadcastedDeclareTransactionV0 { } #[derive(thiserror::Error, Debug)] -pub enum BroadcastedToBlockifierError { +pub enum ToBlockifierError { #[error("Failed to compile contract class: {0}")] CompilationFailed(#[from] ClassCompilationError), #[error("Failed to convert program: {0}")] @@ -130,47 +178,38 @@ pub enum BroadcastedToBlockifierError { CompiledClassHashMismatch { expected: Felt, compilation: Felt }, #[error("Failed to convert base64 program to cairo program: {0}")] Base64ToCairoError(#[from] base64::DecodeError), + #[error("Missing class")] + MissingClass, } #[allow(clippy::type_complexity)] fn handle_class_legacy( contract_class: Arc, -) -> Result<(Option, Option, Option), BroadcastedToBlockifierError> { +) -> Result<(Option, Option, Option), ToBlockifierError> { let class_hash = contract_class.compute_class_hash()?; tracing::debug!("Computed legacy class hash: {:?}", class_hash); - let class_blockifier = - contract_class.to_blockifier_class().map_err(BroadcastedToBlockifierError::CompilationFailed)?; - Ok(( - Some(BClassInfo::new(&class_blockifier, 0, 0)?), - Some(ConvertedClass::Legacy(LegacyConvertedClass { class_hash, info: LegacyClassInfo { contract_class } })), - Some(class_hash), - )) + let converted_class = + ConvertedClass::Legacy(LegacyConvertedClass { class_hash, info: LegacyClassInfo { contract_class } }); + Ok((Some(converted_class.to_blockifier_class_info()?), Some(converted_class), Some(class_hash))) } #[allow(clippy::type_complexity)] fn handle_class_sierra( contract_class: Arc, expected_compiled_class_hash: Felt, -) -> Result<(Option, Option, Option), BroadcastedToBlockifierError> { +) -> Result<(Option, Option, Option), ToBlockifierError> { let class_hash = contract_class.compute_class_hash()?; let (compiled_class_hash, compiled) = contract_class.compile_to_casm()?; if expected_compiled_class_hash != compiled_class_hash { - return Err(BroadcastedToBlockifierError::CompiledClassHashMismatch { + return Err(ToBlockifierError::CompiledClassHashMismatch { expected: expected_compiled_class_hash, compilation: compiled_class_hash, }); } - Ok(( - Some(BClassInfo::new( - &compiled.to_blockifier_class()?, - contract_class.sierra_program.len(), - contract_class.abi.len(), - )?), - Some(ConvertedClass::Sierra(SierraConvertedClass { - class_hash, - info: SierraClassInfo { contract_class, compiled_class_hash }, - compiled: Arc::new(compiled), - })), - Some(class_hash), - )) + let converted_class = ConvertedClass::Sierra(SierraConvertedClass { + class_hash, + info: SierraClassInfo { contract_class, compiled_class_hash }, + compiled: Arc::new(compiled), + }); + Ok((Some(converted_class.to_blockifier_class_info()?), Some(converted_class), Some(class_hash))) } diff --git a/crates/primitives/utils/src/serde.rs b/crates/primitives/utils/src/serde.rs index aea7816bb..54eb45863 100644 --- a/crates/primitives/utils/src/serde.rs +++ b/crates/primitives/utils/src/serde.rs @@ -13,6 +13,27 @@ where parse_duration(&s).map_err(serde::de::Error::custom) } +pub fn deserialize_optional_duration<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let Some(s) = Option::::deserialize(deserializer)? else { + return Ok(None); + }; + parse_duration(&s).map_err(serde::de::Error::custom).map(Some) +} + +pub fn serialize_optional_duration(duration: &Option, serializer: S) -> Result +where + S: serde::Serializer, +{ + if let Some(duration) = duration { + serialize_duration(duration, serializer) + } else { + serializer.serialize_none() + } +} + pub fn serialize_duration(duration: &Duration, serializer: S) -> Result where S: serde::Serializer, diff --git a/crates/tests/src/devnet.rs b/crates/tests/src/devnet.rs new file mode 100644 index 000000000..2f5c94530 --- /dev/null +++ b/crates/tests/src/devnet.rs @@ -0,0 +1,239 @@ +use rstest::rstest; +use starknet::accounts::{Account, ExecutionEncoding, SingleOwnerAccount}; +use starknet::signers::{LocalWallet, SigningKey}; +use starknet_core::types::{BlockId, BlockTag, Call, Felt, ReceiptBlock}; +use starknet_core::utils::starknet_keccak; +use starknet_providers::Provider; +use std::time::Duration; + +use crate::{wait_for_cond, MadaraCmdBuilder}; + +const ERC20_STRK_CONTRACT_ADDRESS: Felt = + Felt::from_hex_unchecked("0x04718f5a0fc34cc1af16a1cdee98ffb20c31f5cd61d6ab07201858f4287c938d"); +#[allow(unused)] +const ERC20_ETH_CONTRACT_ADDRESS: Felt = + Felt::from_hex_unchecked("0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7"); + +const ACCOUNT_SECRET: Felt = + Felt::from_hex_unchecked("0x077e56c6dc32d40a67f6f7e6625c8dc5e570abe49c0a24e9202e4ae906abcc07"); +const ACCOUNT_ADDRESS: Felt = + Felt::from_hex_unchecked("0x055be462e718c4166d656d11f89e341115b8bc82389c3762a10eade04fcb225d"); + +#[rstest] +#[tokio::test] +async fn madara_devnet_add_transaction() { + let _ = tracing_subscriber::fmt().with_test_writer().try_init(); + + let args = &[ + "--devnet", + "--no-l1-sync", + "--gas-price", + "0", + // only produce blocks no pending txs + "--chain-config-override", + "block_time=1s,pending_block_update_time=1s", + ]; + + let cmd_builder = MadaraCmdBuilder::new().args(*args); + let mut node = cmd_builder.run(); + node.wait_for_ready().await; + + tokio::time::sleep(Duration::from_secs(3)).await; + + let chain_id = node.json_rpc().chain_id().await.unwrap(); + + let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); + let mut account = + SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); + account.set_block_id(BlockId::Tag(BlockTag::Latest)); + + let res = account + .execute_v3(vec![Call { + to: ERC20_STRK_CONTRACT_ADDRESS, + selector: starknet_keccak(b"transfer"), + calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], + }]) + .send() + .await + .unwrap(); + + wait_for_cond( + || async { + let receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; + assert!(receipt.block.is_block()); + Ok(()) + }, + Duration::from_millis(500), + 60, + ) + .await; + + tokio::time::sleep(Duration::from_secs(2)).await; + + let res = account + .execute_v3(vec![Call { + to: ERC20_STRK_CONTRACT_ADDRESS, + selector: starknet_keccak(b"transfer"), + calldata: vec![ACCOUNT_ADDRESS, 40.into(), Felt::ZERO], + }]) + .send() + .await + .unwrap(); + + wait_for_cond( + || async { + let receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; + assert!(receipt.block.is_block()); + Ok(()) + }, + Duration::from_millis(500), + 60, + ) + .await; +} + +#[rstest] +#[tokio::test] +async fn madara_devnet_mempool_saving() { + let _ = tracing_subscriber::fmt().with_test_writer().try_init(); + + let cmd_builder = MadaraCmdBuilder::new().args([ + "--devnet", + "--no-l1-sync", + "--gas-price", + "0", + // never produce blocks & pending txs + "--chain-config-path", + "test_devnet.yaml", + "--chain-config-override", + "block_time=5min,pending_block_update_time=5min", + ]); + let mut node = cmd_builder.clone().run(); + node.wait_for_ready().await; + + let chain_id = node.json_rpc().chain_id().await.unwrap(); + + let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); + let mut account = + SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let res = account + .execute_v3(vec![Call { + to: ERC20_STRK_CONTRACT_ADDRESS, + selector: starknet_keccak(b"transfer"), + calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], + }]) + .send() + .await + .unwrap(); + + drop(node); + + // tx should be in saved mempool + + let cmd_builder = cmd_builder.args([ + "--devnet", + "--no-l1-sync", + "--gas-price", + "0", + // never produce blocks but produce pending txs + "--chain-config-path", + "test_devnet.yaml", + "--chain-config-override", + "block_time=5min,pending_block_update_time=500ms", + ]); + let mut node = cmd_builder.clone().run(); + node.wait_for_ready().await; + + // tx should be in mempool + + wait_for_cond( + || async { + let receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; + assert_eq!(receipt.block, ReceiptBlock::Pending); + Ok(()) + }, + Duration::from_millis(500), + 60, + ) + .await; +} + +#[rstest] +#[tokio::test] +async fn madara_devnet_continue_pending() { + let _ = tracing_subscriber::fmt().with_test_writer().try_init(); + + let cmd_builder = MadaraCmdBuilder::new().args([ + "--devnet", + "--no-l1-sync", + "--gas-price", + "0", + // never produce blocks but produce pending txs + "--chain-config-path", + "test_devnet.yaml", + "--chain-config-override", + "block_time=5min,pending_block_update_time=500ms", + ]); + let mut node = cmd_builder.clone().run(); + node.wait_for_ready().await; + + let chain_id = node.json_rpc().chain_id().await.unwrap(); + + let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); + let mut account = + SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let res = account + .execute_v3(vec![Call { + to: ERC20_STRK_CONTRACT_ADDRESS, + selector: starknet_keccak(b"transfer"), + calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], + }]) + .send() + .await + .unwrap(); + + wait_for_cond( + || async { + let receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; + assert_eq!(receipt.block, ReceiptBlock::Pending); + Ok(()) + }, + Duration::from_millis(500), + 60, + ) + .await; + + drop(node); + + // tx should appear in saved pending block + + let cmd_builder = cmd_builder.args([ + "--devnet", + "--no-l1-sync", + "--gas-price", + "0", + // never produce blocks but produce pending txs + "--chain-config-path", + "test_devnet.yaml", + "--chain-config-override", + "block_time=5min,pending_block_update_time=500ms", + ]); + let mut node = cmd_builder.clone().run(); + node.wait_for_ready().await; + + // should find receipt + wait_for_cond( + || async { + let receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; + assert_eq!(receipt.block, ReceiptBlock::Pending); + Ok(()) + }, + Duration::from_millis(500), + 60, + ) + .await; +} diff --git a/crates/tests/src/lib.rs b/crates/tests/src/lib.rs index 4deda5c74..7fd9b7d6c 100644 --- a/crates/tests/src/lib.rs +++ b/crates/tests/src/lib.rs @@ -1,15 +1,12 @@ //! End to end tests for madara. #![cfg(test)] +mod devnet; mod rpc; mod storage_proof; use anyhow::bail; use rstest::rstest; -use starknet::accounts::{Account, ExecutionEncoding, SingleOwnerAccount}; -use starknet::signers::{LocalWallet, SigningKey}; -use starknet_core::types::{BlockId, BlockTag, Call, Felt}; -use starknet_core::utils::starknet_keccak; use starknet_providers::Provider; use starknet_providers::{jsonrpc::HttpTransport, JsonRpcClient, Url}; use std::ops::{Deref, Range}; @@ -76,7 +73,7 @@ impl MadaraCmd { anyhow::Ok(()) }, Duration::from_millis(500), - 20, + 50, ) .await; self.ready = true; @@ -168,6 +165,11 @@ pub fn get_port() -> MadaraPortNum { MadaraPortNum(port) } +/// Note: the builder is [`Clone`]able. When cloned, it will keep the same tempdir. +/// This is useful for tests that need to restart the node using the same DB: they +/// can just make a builder, clone() it and call [`MadaraCmdBuilder::run`] to launch +/// the node. They can then [`drop`] the [`MadaraCmd`] instance to kill the node, and +/// restart the node using the same db by reusing the earlier builder. #[derive(Clone)] pub struct MadaraCmdBuilder { args: Vec, @@ -284,198 +286,3 @@ async fn madara_can_sync_a_few_blocks() { } ); } - -const ERC20_STRK_CONTRACT_ADDRESS: Felt = - Felt::from_hex_unchecked("0x04718f5a0fc34cc1af16a1cdee98ffb20c31f5cd61d6ab07201858f4287c938d"); -#[allow(unused)] -const ERC20_ETH_CONTRACT_ADDRESS: Felt = - Felt::from_hex_unchecked("0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7"); - -const ACCOUNT_SECRET: Felt = - Felt::from_hex_unchecked("0x077e56c6dc32d40a67f6f7e6625c8dc5e570abe49c0a24e9202e4ae906abcc07"); -const ACCOUNT_ADDRESS: Felt = - Felt::from_hex_unchecked("0x055be462e718c4166d656d11f89e341115b8bc82389c3762a10eade04fcb225d"); - -#[rstest] -#[tokio::test] -async fn madara_devnet_add_transaction() { - let _ = tracing_subscriber::fmt().with_test_writer().try_init(); - - let args = &[ - "--devnet", - "--no-l1-sync", - "--gas-price", - "0", - // only produce blocks no pending txs - "--chain-config-override", - "block_time=1s,pending_block_update_time=1s", - ]; - - let cmd_builder = MadaraCmdBuilder::new().args(*args); - let mut node = cmd_builder.run(); - node.wait_for_ready().await; - - tokio::time::sleep(Duration::from_secs(3)).await; - - let chain_id = node.json_rpc().chain_id().await.unwrap(); - - let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); - let mut account = - SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); - account.set_block_id(BlockId::Tag(BlockTag::Latest)); - - let res = account - .execute_v3(vec![Call { - to: ERC20_STRK_CONTRACT_ADDRESS, - selector: starknet_keccak(b"transfer"), - calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], - }]) - .send() - .await - .unwrap(); - - wait_for_cond( - || async { - let _receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; - Ok(()) - }, - Duration::from_millis(500), - 60, - ) - .await; -} - -#[rstest] -#[tokio::test] -async fn madara_devnet_mempool_saving() { - let _ = tracing_subscriber::fmt().with_test_writer().try_init(); - - let cmd_builder = MadaraCmdBuilder::new().args([ - "--devnet", - "--no-l1-sync", - "--gas-price", - "0", - // never produce blocks & pending txs - "--chain-config-path", - "test_devnet.yaml", - "--chain-config-override", - "block_time=5min,pending_block_update_time=5min", - ]); - let mut node = cmd_builder.clone().run(); - node.wait_for_ready().await; - - let chain_id = node.json_rpc().chain_id().await.unwrap(); - - let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); - let mut account = - SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let res = account - .execute_v3(vec![Call { - to: ERC20_STRK_CONTRACT_ADDRESS, - selector: starknet_keccak(b"transfer"), - calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], - }]) - .send() - .await - .unwrap(); - - drop(node); - - // tx should be in saved mempool - - let cmd_builder = cmd_builder.args([ - "--devnet", - "--no-l1-sync", - "--gas-price", - "0", - // never produce blocks but produce pending txs - "--chain-config-path", - "test_devnet.yaml", - "--chain-config-override", - "block_time=5min,pending_block_update_time=500ms", - ]); - let mut node = cmd_builder.clone().run(); - node.wait_for_ready().await; - - // tx should be in mempool - - wait_for_cond( - || async { - let _receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; - Ok(()) - }, - Duration::from_millis(500), - 60, - ) - .await; -} - -#[rstest] -#[tokio::test] -async fn madara_devnet_continue_pending() { - let _ = tracing_subscriber::fmt().with_test_writer().try_init(); - - let cmd_builder = MadaraCmdBuilder::new().args([ - "--devnet", - "--no-l1-sync", - "--gas-price", - "0", - // never produce blocks but produce pending txs - "--chain-config-path", - "test_devnet.yaml", - "--chain-config-override", - "block_time=5min,pending_block_update_time=500ms", - ]); - let mut node = cmd_builder.clone().run(); - node.wait_for_ready().await; - - let chain_id = node.json_rpc().chain_id().await.unwrap(); - - let signer = LocalWallet::from_signing_key(SigningKey::from_secret_scalar(ACCOUNT_SECRET)); - let mut account = - SingleOwnerAccount::new(node.json_rpc(), signer, ACCOUNT_ADDRESS, chain_id, ExecutionEncoding::New); - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let res = account - .execute_v3(vec![Call { - to: ERC20_STRK_CONTRACT_ADDRESS, - selector: starknet_keccak(b"transfer"), - calldata: vec![ACCOUNT_ADDRESS, 15.into(), Felt::ZERO], - }]) - .send() - .await - .unwrap(); - - wait_for_cond( - || async { - let _receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await?; - Ok(()) - }, - Duration::from_millis(500), - 60, - ) - .await; - - drop(node); - - // tx should appear in saved pending block - - let cmd_builder = cmd_builder.args([ - "--devnet", - "--no-l1-sync", - "--gas-price", - "0", - // never produce blocks never produce pending txs - "--chain-config-path", - "test_devnet.yaml", - "--chain-config-override", - "block_time=5min,pending_block_update_time=5min", - ]); - let mut node = cmd_builder.clone().run(); - node.wait_for_ready().await; - - // should find receipt - let _receipt = node.json_rpc().get_transaction_receipt(res.transaction_hash).await.unwrap(); -}